summaryrefslogtreecommitdiffstats
path: root/src/tools/rust-analyzer/crates/ide
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs157
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs18
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs23
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs822
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs10
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs294
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs10
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs154
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs44
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs12
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs61
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs8
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs22
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs52
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_is_method.rs18
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_projection_method.rs39
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs375
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs19
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs277
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter.rs325
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs15
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs23
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_setter.rs21
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs101
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs248
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_lifetime.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs8
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_bounds.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_format_string_arg.rs296
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs7
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs3
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_or_with_or_else.rs364
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs162
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_match_arm.rs293
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_tuple.rs159
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/lib.rs14
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/tests.rs14
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs202
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/utils.rs94
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/Cargo.toml6
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions.rs8
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/env_vars.rs150
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs136
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs14
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs54
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs70
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs20
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix/format_like.rs247
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs69
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/config.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/context.rs49
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs1936
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/lib.rs8
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render.rs42
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs24
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs26
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs28
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs21
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs13
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs8
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs77
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs29
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs54
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/Cargo.toml5
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs8
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/defs.rs80
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs13
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs31
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/lib.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/line_index.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs3
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/rename.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/search.rs74
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/source_change.rs147
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string_exprs.rs267
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/insert_whitespace_into_node.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs10
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml5
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs120
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs38
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs312
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs46
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs18
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs33
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs35
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs12
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml4
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs12
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs7
-rw-r--r--src/tools/rust-analyzer/crates/ide/Cargo.toml6
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/annotations.rs307
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/annotations/fn_references.rs103
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/doc_links.rs11
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/expand_macro.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/fn_references.rs94
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_definition.rs221
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/highlight_related.rs30
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover.rs25
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover/render.rs34
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover/tests.rs270
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs267
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/join_lines.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/lib.rs48
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/matching_brace.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/moniker.rs143
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/move_item.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/parent_module.rs20
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/prime_caches.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/references.rs25
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/runnables.rs267
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/shuffle_crate_graph.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/static_index.rs13
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/status.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs74
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs11
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs21
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs21
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs8
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html6
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html4
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html12
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html4
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html8
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html8
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs30
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_tree.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs7
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/view_hir.rs16
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs2
156 files changed, 8704 insertions, 2672 deletions
diff --git a/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml b/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml
index fca09d384..57a41f3d9 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml
@@ -12,7 +12,7 @@ doctest = false
[dependencies]
cov-mark = "2.0.0-pre.1"
-itertools = "0.10.3"
+itertools = "0.10.5"
either = "1.7.0"
stdx = { path = "../stdx", version = "0.0.0" }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs b/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs
index d4d148c77..60d1588a4 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs
@@ -13,4 +13,5 @@ pub struct AssistConfig {
pub snippet_cap: Option<SnippetCap>,
pub allowed: Option<Vec<AssistKind>>,
pub insert_use: InsertUseConfig,
+ pub prefer_no_std: bool,
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs b/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs
index f9b426614..8c7670e0c 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs
@@ -1,28 +1,20 @@
//! See [`AssistContext`].
-use std::mem;
-
use hir::Semantics;
-use ide_db::{
- base_db::{AnchoredPathBuf, FileId, FileRange},
- SnippetCap,
-};
-use ide_db::{
- label::Label,
- source_change::{FileSystemEdit, SourceChange},
- RootDatabase,
-};
+use ide_db::base_db::{FileId, FileRange};
+use ide_db::{label::Label, RootDatabase};
use syntax::{
algo::{self, find_node_at_offset, find_node_at_range},
- AstNode, AstToken, Direction, SourceFile, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxNodePtr,
- SyntaxToken, TextRange, TextSize, TokenAtOffset,
+ AstNode, AstToken, Direction, SourceFile, SyntaxElement, SyntaxKind, SyntaxToken, TextRange,
+ TextSize, TokenAtOffset,
};
-use text_edit::{TextEdit, TextEditBuilder};
use crate::{
assist_config::AssistConfig, Assist, AssistId, AssistKind, AssistResolveStrategy, GroupLabel,
};
+pub(crate) use ide_db::source_change::{SourceChangeBuilder, TreeMutator};
+
/// `AssistContext` allows to apply an assist or check if it could be applied.
///
/// Assists use a somewhat over-engineered approach, given the current needs.
@@ -163,7 +155,7 @@ impl Assists {
id: AssistId,
label: impl Into<String>,
target: TextRange,
- f: impl FnOnce(&mut AssistBuilder),
+ f: impl FnOnce(&mut SourceChangeBuilder),
) -> Option<()> {
let mut f = Some(f);
self.add_impl(None, id, label.into(), target, &mut |it| f.take().unwrap()(it))
@@ -175,7 +167,7 @@ impl Assists {
id: AssistId,
label: impl Into<String>,
target: TextRange,
- f: impl FnOnce(&mut AssistBuilder),
+ f: impl FnOnce(&mut SourceChangeBuilder),
) -> Option<()> {
let mut f = Some(f);
self.add_impl(Some(group), id, label.into(), target, &mut |it| f.take().unwrap()(it))
@@ -187,7 +179,7 @@ impl Assists {
id: AssistId,
label: String,
target: TextRange,
- f: &mut dyn FnMut(&mut AssistBuilder),
+ f: &mut dyn FnMut(&mut SourceChangeBuilder),
) -> Option<()> {
if !self.is_allowed(&id) {
return None;
@@ -195,7 +187,7 @@ impl Assists {
let mut trigger_signature_help = false;
let source_change = if self.resolve.should_resolve(&id) {
- let mut builder = AssistBuilder::new(self.file);
+ let mut builder = SourceChangeBuilder::new(self.file);
f(&mut builder);
trigger_signature_help = builder.trigger_signature_help;
Some(builder.finish())
@@ -216,132 +208,3 @@ impl Assists {
}
}
}
-
-pub(crate) struct AssistBuilder {
- edit: TextEditBuilder,
- file_id: FileId,
- source_change: SourceChange,
- trigger_signature_help: bool,
-
- /// Maps the original, immutable `SyntaxNode` to a `clone_for_update` twin.
- mutated_tree: Option<TreeMutator>,
-}
-
-pub(crate) struct TreeMutator {
- immutable: SyntaxNode,
- mutable_clone: SyntaxNode,
-}
-
-impl TreeMutator {
- pub(crate) fn new(immutable: &SyntaxNode) -> TreeMutator {
- let immutable = immutable.ancestors().last().unwrap();
- let mutable_clone = immutable.clone_for_update();
- TreeMutator { immutable, mutable_clone }
- }
-
- pub(crate) fn make_mut<N: AstNode>(&self, node: &N) -> N {
- N::cast(self.make_syntax_mut(node.syntax())).unwrap()
- }
-
- pub(crate) fn make_syntax_mut(&self, node: &SyntaxNode) -> SyntaxNode {
- let ptr = SyntaxNodePtr::new(node);
- ptr.to_node(&self.mutable_clone)
- }
-}
-
-impl AssistBuilder {
- pub(crate) fn new(file_id: FileId) -> AssistBuilder {
- AssistBuilder {
- edit: TextEdit::builder(),
- file_id,
- source_change: SourceChange::default(),
- trigger_signature_help: false,
- mutated_tree: None,
- }
- }
-
- pub(crate) fn edit_file(&mut self, file_id: FileId) {
- self.commit();
- self.file_id = file_id;
- }
-
- fn commit(&mut self) {
- if let Some(tm) = self.mutated_tree.take() {
- algo::diff(&tm.immutable, &tm.mutable_clone).into_text_edit(&mut self.edit)
- }
-
- let edit = mem::take(&mut self.edit).finish();
- if !edit.is_empty() {
- self.source_change.insert_source_edit(self.file_id, edit);
- }
- }
-
- pub(crate) fn make_mut<N: AstNode>(&mut self, node: N) -> N {
- self.mutated_tree.get_or_insert_with(|| TreeMutator::new(node.syntax())).make_mut(&node)
- }
- /// Returns a copy of the `node`, suitable for mutation.
- ///
- /// Syntax trees in rust-analyzer are typically immutable, and mutating
- /// operations panic at runtime. However, it is possible to make a copy of
- /// the tree and mutate the copy freely. Mutation is based on interior
- /// mutability, and different nodes in the same tree see the same mutations.
- ///
- /// The typical pattern for an assist is to find specific nodes in the read
- /// phase, and then get their mutable couterparts using `make_mut` in the
- /// mutable state.
- pub(crate) fn make_syntax_mut(&mut self, node: SyntaxNode) -> SyntaxNode {
- self.mutated_tree.get_or_insert_with(|| TreeMutator::new(&node)).make_syntax_mut(&node)
- }
-
- /// Remove specified `range` of text.
- pub(crate) fn delete(&mut self, range: TextRange) {
- self.edit.delete(range)
- }
- /// Append specified `text` at the given `offset`
- pub(crate) fn insert(&mut self, offset: TextSize, text: impl Into<String>) {
- self.edit.insert(offset, text.into())
- }
- /// Append specified `snippet` at the given `offset`
- pub(crate) fn insert_snippet(
- &mut self,
- _cap: SnippetCap,
- offset: TextSize,
- snippet: impl Into<String>,
- ) {
- self.source_change.is_snippet = true;
- self.insert(offset, snippet);
- }
- /// Replaces specified `range` of text with a given string.
- pub(crate) fn replace(&mut self, range: TextRange, replace_with: impl Into<String>) {
- self.edit.replace(range, replace_with.into())
- }
- /// Replaces specified `range` of text with a given `snippet`.
- pub(crate) fn replace_snippet(
- &mut self,
- _cap: SnippetCap,
- range: TextRange,
- snippet: impl Into<String>,
- ) {
- self.source_change.is_snippet = true;
- self.replace(range, snippet);
- }
- pub(crate) fn replace_ast<N: AstNode>(&mut self, old: N, new: N) {
- algo::diff(old.syntax(), new.syntax()).into_text_edit(&mut self.edit)
- }
- pub(crate) fn create_file(&mut self, dst: AnchoredPathBuf, content: impl Into<String>) {
- let file_system_edit = FileSystemEdit::CreateFile { dst, initial_contents: content.into() };
- self.source_change.push_file_system_edit(file_system_edit);
- }
- pub(crate) fn move_file(&mut self, src: FileId, dst: AnchoredPathBuf) {
- let file_system_edit = FileSystemEdit::MoveFile { src, dst };
- self.source_change.push_file_system_edit(file_system_edit);
- }
- pub(crate) fn trigger_signature_help(&mut self) {
- self.trigger_signature_help = true;
- }
-
- fn finish(mut self) -> SourceChange {
- self.commit();
- mem::take(&mut self.source_change)
- }
-}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs
index c808c010c..62cf5ab4f 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs
@@ -944,7 +944,7 @@ foo!();
struct Foo(usize);
impl FooB for Foo {
- $0fn foo< 'lt>(& 'lt self){}
+ $0fn foo<'lt>(&'lt self){}
}
"#,
)
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
index b16f6fe03..73f4db4e5 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
@@ -5,6 +5,7 @@ use hir::{Adt, Crate, HasAttrs, HasSource, ModuleDef, Semantics};
use ide_db::RootDatabase;
use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast};
use itertools::Itertools;
+use syntax::ast::edit_in_place::Removable;
use syntax::ast::{self, make, AstNode, HasName, MatchArmList, MatchExpr, Pat};
use crate::{
@@ -86,7 +87,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
.into_iter()
.filter_map(|variant| {
Some((
- build_pat(ctx.db(), module, variant)?,
+ build_pat(ctx.db(), module, variant, ctx.config.prefer_no_std)?,
variant.should_be_hidden(ctx.db(), module.krate()),
))
})
@@ -131,8 +132,9 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
let is_hidden = variants
.iter()
.any(|variant| variant.should_be_hidden(ctx.db(), module.krate()));
- let patterns =
- variants.into_iter().filter_map(|variant| build_pat(ctx.db(), module, variant));
+ let patterns = variants.into_iter().filter_map(|variant| {
+ build_pat(ctx.db(), module, variant, ctx.config.prefer_no_std)
+ });
(ast::Pat::from(make::tuple_pat(patterns)), is_hidden)
})
@@ -348,10 +350,16 @@ fn resolve_tuple_of_enum_def(
.collect()
}
-fn build_pat(db: &RootDatabase, module: hir::Module, var: ExtendedVariant) -> Option<ast::Pat> {
+fn build_pat(
+ db: &RootDatabase,
+ module: hir::Module,
+ var: ExtendedVariant,
+ prefer_no_std: bool,
+) -> Option<ast::Pat> {
match var {
ExtendedVariant::Variant(var) => {
- let path = mod_path_to_ast(&module.find_use_path(db, ModuleDef::from(var))?);
+ let path =
+ mod_path_to_ast(&module.find_use_path(db, ModuleDef::from(var), prefer_no_std)?);
// FIXME: use HIR for this; it doesn't currently expose struct vs. tuple vs. unit variants though
let pat: ast::Pat = match var.source(db)?.value.kind() {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs
index 949cf3167..678dc877d 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs
@@ -89,8 +89,11 @@ use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel};
// ```
pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let (import_assets, syntax_under_caret) = find_importable_node(ctx)?;
- let mut proposed_imports =
- import_assets.search_for_imports(&ctx.sema, ctx.config.insert_use.prefix_kind);
+ let mut proposed_imports = import_assets.search_for_imports(
+ &ctx.sema,
+ ctx.config.insert_use.prefix_kind,
+ ctx.config.prefer_no_std,
+ );
if proposed_imports.is_empty() {
return None;
}
@@ -153,6 +156,8 @@ pub(super) fn find_importable_node(
{
ImportAssets::for_method_call(&method_under_caret, &ctx.sema)
.zip(Some(method_under_caret.syntax().clone().into()))
+ } else if let Some(_) = ctx.find_node_at_offset_with_descend::<ast::Param>() {
+ None
} else if let Some(pat) = ctx
.find_node_at_offset_with_descend::<ast::IdentPat>()
.filter(ast::IdentPat::is_simple_ident)
@@ -266,6 +271,20 @@ mod tests {
}
#[test]
+ fn ignore_parameter_name() {
+ check_assist_not_applicable(
+ auto_import,
+ r"
+ mod foo {
+ pub mod bar {}
+ }
+
+ fn foo(bar$0: &str) {}
+ ",
+ );
+ }
+
+ #[test]
fn prefer_shorter_paths() {
let before = r"
//- /main.rs crate:main deps:foo,bar
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs
index 30f6dd41a..95d11abe8 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs
@@ -50,7 +50,7 @@ pub(crate) fn convert_into_to_from(acc: &mut Assists, ctx: &AssistContext<'_>) -
_ => return None,
};
- mod_path_to_ast(&module.find_use_path(ctx.db(), src_type_def)?)
+ mod_path_to_ast(&module.find_use_path(ctx.db(), src_type_def, ctx.config.prefer_no_std)?)
};
let dest_type = match &ast_trait {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs
new file mode 100644
index 000000000..8d11e0bac
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs
@@ -0,0 +1,822 @@
+use either::Either;
+use ide_db::defs::Definition;
+use itertools::Itertools;
+use syntax::{
+ ast::{self, AstNode, HasGenericParams, HasVisibility},
+ match_ast, SyntaxKind, SyntaxNode,
+};
+
+use crate::{assist_context::SourceChangeBuilder, AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: convert_named_struct_to_tuple_struct
+//
+// Converts struct with named fields to tuple struct, and analogously for enum variants with named
+// fields.
+//
+// ```
+// struct Point$0 { x: f32, y: f32 }
+//
+// impl Point {
+// pub fn new(x: f32, y: f32) -> Self {
+// Point { x, y }
+// }
+//
+// pub fn x(&self) -> f32 {
+// self.x
+// }
+//
+// pub fn y(&self) -> f32 {
+// self.y
+// }
+// }
+// ```
+// ->
+// ```
+// struct Point(f32, f32);
+//
+// impl Point {
+// pub fn new(x: f32, y: f32) -> Self {
+// Point(x, y)
+// }
+//
+// pub fn x(&self) -> f32 {
+// self.0
+// }
+//
+// pub fn y(&self) -> f32 {
+// self.1
+// }
+// }
+// ```
+pub(crate) fn convert_named_struct_to_tuple_struct(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let strukt = ctx
+ .find_node_at_offset::<ast::Struct>()
+ .map(Either::Left)
+ .or_else(|| ctx.find_node_at_offset::<ast::Variant>().map(Either::Right))?;
+ let field_list = strukt.as_ref().either(|s| s.field_list(), |v| v.field_list())?;
+ let record_fields = match field_list {
+ ast::FieldList::RecordFieldList(it) => it,
+ ast::FieldList::TupleFieldList(_) => return None,
+ };
+ let strukt_def = match &strukt {
+ Either::Left(s) => Either::Left(ctx.sema.to_def(s)?),
+ Either::Right(v) => Either::Right(ctx.sema.to_def(v)?),
+ };
+ let target = strukt.as_ref().either(|s| s.syntax(), |v| v.syntax()).text_range();
+
+ acc.add(
+ AssistId("convert_named_struct_to_tuple_struct", AssistKind::RefactorRewrite),
+ "Convert to tuple struct",
+ target,
+ |edit| {
+ edit_field_references(ctx, edit, record_fields.fields());
+ edit_struct_references(ctx, edit, strukt_def);
+ edit_struct_def(ctx, edit, &strukt, record_fields);
+ },
+ )
+}
+
+fn edit_struct_def(
+ ctx: &AssistContext<'_>,
+ edit: &mut SourceChangeBuilder,
+ strukt: &Either<ast::Struct, ast::Variant>,
+ record_fields: ast::RecordFieldList,
+) {
+ let tuple_fields = record_fields
+ .fields()
+ .filter_map(|f| Some(ast::make::tuple_field(f.visibility(), f.ty()?)));
+ let tuple_fields = ast::make::tuple_field_list(tuple_fields);
+ let record_fields_text_range = record_fields.syntax().text_range();
+
+ edit.edit_file(ctx.file_id());
+ edit.replace(record_fields_text_range, tuple_fields.syntax().text());
+
+ if let Either::Left(strukt) = strukt {
+ if let Some(w) = strukt.where_clause() {
+ let mut where_clause = w.to_string();
+ if where_clause.ends_with(',') {
+ where_clause.pop();
+ }
+ where_clause.push(';');
+
+ edit.delete(w.syntax().text_range());
+ edit.insert(record_fields_text_range.end(), ast::make::tokens::single_newline().text());
+ edit.insert(record_fields_text_range.end(), where_clause);
+ edit.insert(record_fields_text_range.end(), ast::make::tokens::single_newline().text());
+
+ if let Some(tok) = strukt
+ .generic_param_list()
+ .and_then(|l| l.r_angle_token())
+ .and_then(|tok| tok.next_token())
+ .filter(|tok| tok.kind() == SyntaxKind::WHITESPACE)
+ {
+ edit.delete(tok.text_range());
+ }
+ } else {
+ edit.insert(record_fields_text_range.end(), ";");
+ }
+ }
+
+ if let Some(tok) = record_fields
+ .l_curly_token()
+ .and_then(|tok| tok.prev_token())
+ .filter(|tok| tok.kind() == SyntaxKind::WHITESPACE)
+ {
+ edit.delete(tok.text_range())
+ }
+}
+
+fn edit_struct_references(
+ ctx: &AssistContext<'_>,
+ edit: &mut SourceChangeBuilder,
+ strukt: Either<hir::Struct, hir::Variant>,
+) {
+ let strukt_def = match strukt {
+ Either::Left(s) => Definition::Adt(hir::Adt::Struct(s)),
+ Either::Right(v) => Definition::Variant(v),
+ };
+ let usages = strukt_def.usages(&ctx.sema).include_self_refs().all();
+
+ let edit_node = |edit: &mut SourceChangeBuilder, node: SyntaxNode| -> Option<()> {
+ match_ast! {
+ match node {
+ ast::RecordPat(record_struct_pat) => {
+ edit.replace(
+ record_struct_pat.syntax().text_range(),
+ ast::make::tuple_struct_pat(
+ record_struct_pat.path()?,
+ record_struct_pat
+ .record_pat_field_list()?
+ .fields()
+ .filter_map(|pat| pat.pat())
+ )
+ .to_string()
+ );
+ },
+ ast::RecordExpr(record_expr) => {
+ let path = record_expr.path()?;
+ let args = record_expr
+ .record_expr_field_list()?
+ .fields()
+ .filter_map(|f| f.expr())
+ .join(", ");
+
+ edit.replace(record_expr.syntax().text_range(), format!("{path}({args})"));
+ },
+ _ => return None,
+ }
+ }
+ Some(())
+ };
+
+ for (file_id, refs) in usages {
+ edit.edit_file(file_id);
+ for r in refs {
+ for node in r.name.syntax().ancestors() {
+ if edit_node(edit, node).is_some() {
+ break;
+ }
+ }
+ }
+ }
+}
+
+fn edit_field_references(
+ ctx: &AssistContext<'_>,
+ edit: &mut SourceChangeBuilder,
+ fields: impl Iterator<Item = ast::RecordField>,
+) {
+ for (index, field) in fields.enumerate() {
+ let field = match ctx.sema.to_def(&field) {
+ Some(it) => it,
+ None => continue,
+ };
+ let def = Definition::Field(field);
+ let usages = def.usages(&ctx.sema).all();
+ for (file_id, refs) in usages {
+ edit.edit_file(file_id);
+ for r in refs {
+ if let Some(name_ref) = r.name.as_name_ref() {
+ // Only edit the field reference if it's part of a `.field` access
+ if name_ref.syntax().parent().and_then(ast::FieldExpr::cast).is_some() {
+ edit.replace(name_ref.syntax().text_range(), index.to_string());
+ }
+ }
+ }
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn not_applicable_other_than_record_struct() {
+ check_assist_not_applicable(convert_named_struct_to_tuple_struct, r#"struct Foo$0(u32)"#);
+ check_assist_not_applicable(convert_named_struct_to_tuple_struct, r#"struct Foo$0;"#);
+ }
+
+ #[test]
+ fn convert_simple_struct() {
+ check_assist(
+ convert_named_struct_to_tuple_struct,
+ r#"
+struct Inner;
+struct A$0 { inner: Inner }
+
+impl A {
+ fn new(inner: Inner) -> A {
+ A { inner }
+ }
+
+ fn new_with_default() -> A {
+ A::new(Inner)
+ }
+
+ fn into_inner(self) -> Inner {
+ self.inner
+ }
+}"#,
+ r#"
+struct Inner;
+struct A(Inner);
+
+impl A {
+ fn new(inner: Inner) -> A {
+ A(inner)
+ }
+
+ fn new_with_default() -> A {
+ A::new(Inner)
+ }
+
+ fn into_inner(self) -> Inner {
+ self.0
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_struct_referenced_via_self_kw() {
+ check_assist(
+ convert_named_struct_to_tuple_struct,
+ r#"
+struct Inner;
+struct A$0 { inner: Inner }
+
+impl A {
+ fn new(inner: Inner) -> Self {
+ Self { inner }
+ }
+
+ fn new_with_default() -> Self {
+ Self::new(Inner)
+ }
+
+ fn into_inner(self) -> Inner {
+ self.inner
+ }
+}"#,
+ r#"
+struct Inner;
+struct A(Inner);
+
+impl A {
+ fn new(inner: Inner) -> Self {
+ Self(inner)
+ }
+
+ fn new_with_default() -> Self {
+ Self::new(Inner)
+ }
+
+ fn into_inner(self) -> Inner {
+ self.0
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_destructured_struct() {
+ check_assist(
+ convert_named_struct_to_tuple_struct,
+ r#"
+struct Inner;
+struct A$0 { inner: Inner }
+
+impl A {
+ fn into_inner(self) -> Inner {
+ let A { inner: a } = self;
+ a
+ }
+
+ fn into_inner_via_self(self) -> Inner {
+ let Self { inner } = self;
+ inner
+ }
+}"#,
+ r#"
+struct Inner;
+struct A(Inner);
+
+impl A {
+ fn into_inner(self) -> Inner {
+ let A(a) = self;
+ a
+ }
+
+ fn into_inner_via_self(self) -> Inner {
+ let Self(inner) = self;
+ inner
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_struct_with_visibility() {
+ check_assist(
+ convert_named_struct_to_tuple_struct,
+ r#"
+struct A$0 {
+ pub first: u32,
+ pub(crate) second: u64
+}
+
+impl A {
+ fn new() -> A {
+ A { first: 42, second: 42 }
+ }
+
+ fn into_first(self) -> u32 {
+ self.first
+ }
+
+ fn into_second(self) -> u64 {
+ self.second
+ }
+}"#,
+ r#"
+struct A(pub u32, pub(crate) u64);
+
+impl A {
+ fn new() -> A {
+ A(42, 42)
+ }
+
+ fn into_first(self) -> u32 {
+ self.0
+ }
+
+ fn into_second(self) -> u64 {
+ self.1
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_struct_with_wrapped_references() {
+ check_assist(
+ convert_named_struct_to_tuple_struct,
+ r#"
+struct Inner$0 { uint: u32 }
+struct Outer { inner: Inner }
+
+impl Outer {
+ fn new() -> Self {
+ Self { inner: Inner { uint: 42 } }
+ }
+
+ fn into_inner(self) -> u32 {
+ self.inner.uint
+ }
+
+ fn into_inner_destructed(self) -> u32 {
+ let Outer { inner: Inner { uint: x } } = self;
+ x
+ }
+}"#,
+ r#"
+struct Inner(u32);
+struct Outer { inner: Inner }
+
+impl Outer {
+ fn new() -> Self {
+ Self { inner: Inner(42) }
+ }
+
+ fn into_inner(self) -> u32 {
+ self.inner.0
+ }
+
+ fn into_inner_destructed(self) -> u32 {
+ let Outer { inner: Inner(x) } = self;
+ x
+ }
+}"#,
+ );
+
+ check_assist(
+ convert_named_struct_to_tuple_struct,
+ r#"
+struct Inner { uint: u32 }
+struct Outer$0 { inner: Inner }
+
+impl Outer {
+ fn new() -> Self {
+ Self { inner: Inner { uint: 42 } }
+ }
+
+ fn into_inner(self) -> u32 {
+ self.inner.uint
+ }
+
+ fn into_inner_destructed(self) -> u32 {
+ let Outer { inner: Inner { uint: x } } = self;
+ x
+ }
+}"#,
+ r#"
+struct Inner { uint: u32 }
+struct Outer(Inner);
+
+impl Outer {
+ fn new() -> Self {
+ Self(Inner { uint: 42 })
+ }
+
+ fn into_inner(self) -> u32 {
+ self.0.uint
+ }
+
+ fn into_inner_destructed(self) -> u32 {
+ let Outer(Inner { uint: x }) = self;
+ x
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_struct_with_multi_file_references() {
+ check_assist(
+ convert_named_struct_to_tuple_struct,
+ r#"
+//- /main.rs
+struct Inner;
+struct A$0 { inner: Inner }
+
+mod foo;
+
+//- /foo.rs
+use crate::{A, Inner};
+fn f() {
+ let a = A { inner: Inner };
+}
+"#,
+ r#"
+//- /main.rs
+struct Inner;
+struct A(Inner);
+
+mod foo;
+
+//- /foo.rs
+use crate::{A, Inner};
+fn f() {
+ let a = A(Inner);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_struct_with_where_clause() {
+ check_assist(
+ convert_named_struct_to_tuple_struct,
+ r#"
+struct Wrap$0<T>
+where
+ T: Display,
+{ field1: T }
+"#,
+ r#"
+struct Wrap<T>(T)
+where
+ T: Display;
+
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_other_than_record_variant() {
+ check_assist_not_applicable(
+ convert_named_struct_to_tuple_struct,
+ r#"enum Enum { Variant$0(usize) };"#,
+ );
+ check_assist_not_applicable(
+ convert_named_struct_to_tuple_struct,
+ r#"enum Enum { Variant$0 }"#,
+ );
+ }
+
+ #[test]
+ fn convert_simple_variant() {
+ check_assist(
+ convert_named_struct_to_tuple_struct,
+ r#"
+enum A {
+ $0Variant { field1: usize },
+}
+
+impl A {
+ fn new(value: usize) -> A {
+ A::Variant { field1: value }
+ }
+
+ fn new_with_default() -> A {
+ A::new(Default::default())
+ }
+
+ fn value(self) -> usize {
+ match self {
+ A::Variant { field1: value } => value,
+ }
+ }
+}"#,
+ r#"
+enum A {
+ Variant(usize),
+}
+
+impl A {
+ fn new(value: usize) -> A {
+ A::Variant(value)
+ }
+
+ fn new_with_default() -> A {
+ A::new(Default::default())
+ }
+
+ fn value(self) -> usize {
+ match self {
+ A::Variant(value) => value,
+ }
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_variant_referenced_via_self_kw() {
+ check_assist(
+ convert_named_struct_to_tuple_struct,
+ r#"
+enum A {
+ $0Variant { field1: usize },
+}
+
+impl A {
+ fn new(value: usize) -> A {
+ Self::Variant { field1: value }
+ }
+
+ fn new_with_default() -> A {
+ Self::new(Default::default())
+ }
+
+ fn value(self) -> usize {
+ match self {
+ Self::Variant { field1: value } => value,
+ }
+ }
+}"#,
+ r#"
+enum A {
+ Variant(usize),
+}
+
+impl A {
+ fn new(value: usize) -> A {
+ Self::Variant(value)
+ }
+
+ fn new_with_default() -> A {
+ Self::new(Default::default())
+ }
+
+ fn value(self) -> usize {
+ match self {
+ Self::Variant(value) => value,
+ }
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_destructured_variant() {
+ check_assist(
+ convert_named_struct_to_tuple_struct,
+ r#"
+enum A {
+ $0Variant { field1: usize },
+}
+
+impl A {
+ fn into_inner(self) -> usize {
+ let A::Variant { field1: first } = self;
+ first
+ }
+
+ fn into_inner_via_self(self) -> usize {
+ let Self::Variant { field1: first } = self;
+ first
+ }
+}"#,
+ r#"
+enum A {
+ Variant(usize),
+}
+
+impl A {
+ fn into_inner(self) -> usize {
+ let A::Variant(first) = self;
+ first
+ }
+
+ fn into_inner_via_self(self) -> usize {
+ let Self::Variant(first) = self;
+ first
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_variant_with_wrapped_references() {
+ check_assist(
+ convert_named_struct_to_tuple_struct,
+ r#"
+enum Inner {
+ $0Variant { field1: usize },
+}
+enum Outer {
+ Variant(Inner),
+}
+
+impl Outer {
+ fn new() -> Self {
+ Self::Variant(Inner::Variant { field1: 42 })
+ }
+
+ fn into_inner_destructed(self) -> u32 {
+ let Outer::Variant(Inner::Variant { field1: x }) = self;
+ x
+ }
+}"#,
+ r#"
+enum Inner {
+ Variant(usize),
+}
+enum Outer {
+ Variant(Inner),
+}
+
+impl Outer {
+ fn new() -> Self {
+ Self::Variant(Inner::Variant(42))
+ }
+
+ fn into_inner_destructed(self) -> u32 {
+ let Outer::Variant(Inner::Variant(x)) = self;
+ x
+ }
+}"#,
+ );
+
+ check_assist(
+ convert_named_struct_to_tuple_struct,
+ r#"
+enum Inner {
+ Variant(usize),
+}
+enum Outer {
+ $0Variant { field1: Inner },
+}
+
+impl Outer {
+ fn new() -> Self {
+ Self::Variant { field1: Inner::Variant(42) }
+ }
+
+ fn into_inner_destructed(self) -> u32 {
+ let Outer::Variant { field1: Inner::Variant(x) } = self;
+ x
+ }
+}"#,
+ r#"
+enum Inner {
+ Variant(usize),
+}
+enum Outer {
+ Variant(Inner),
+}
+
+impl Outer {
+ fn new() -> Self {
+ Self::Variant(Inner::Variant(42))
+ }
+
+ fn into_inner_destructed(self) -> u32 {
+ let Outer::Variant(Inner::Variant(x)) = self;
+ x
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_variant_with_multi_file_references() {
+ check_assist(
+ convert_named_struct_to_tuple_struct,
+ r#"
+//- /main.rs
+struct Inner;
+enum A {
+ $0Variant { field1: Inner },
+}
+
+mod foo;
+
+//- /foo.rs
+use crate::{A, Inner};
+fn f() {
+ let a = A::Variant { field1: Inner };
+}
+"#,
+ r#"
+//- /main.rs
+struct Inner;
+enum A {
+ Variant(Inner),
+}
+
+mod foo;
+
+//- /foo.rs
+use crate::{A, Inner};
+fn f() {
+ let a = A::Variant(Inner);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_directly_used_variant() {
+ check_assist(
+ convert_named_struct_to_tuple_struct,
+ r#"
+//- /main.rs
+struct Inner;
+enum A {
+ $0Variant { field1: Inner },
+}
+
+mod foo;
+
+//- /foo.rs
+use crate::{A::Variant, Inner};
+fn f() {
+ let a = Variant { field1: Inner };
+}
+"#,
+ r#"
+//- /main.rs
+struct Inner;
+enum A {
+ Variant(Inner),
+}
+
+mod foo;
+
+//- /foo.rs
+use crate::{A::Variant, Inner};
+fn f() {
+ let a = Variant(Inner);
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
index 4ab8e93a2..d8f522708 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
@@ -5,7 +5,7 @@ use syntax::{
match_ast, SyntaxNode,
};
-use crate::{assist_context::AssistBuilder, AssistContext, AssistId, AssistKind, Assists};
+use crate::{assist_context::SourceChangeBuilder, AssistContext, AssistId, AssistKind, Assists};
// Assist: convert_tuple_struct_to_named_struct
//
@@ -80,7 +80,7 @@ pub(crate) fn convert_tuple_struct_to_named_struct(
fn edit_struct_def(
ctx: &AssistContext<'_>,
- edit: &mut AssistBuilder,
+ edit: &mut SourceChangeBuilder,
strukt: &Either<ast::Struct, ast::Variant>,
tuple_fields: ast::TupleFieldList,
names: Vec<ast::Name>,
@@ -122,7 +122,7 @@ fn edit_struct_def(
fn edit_struct_references(
ctx: &AssistContext<'_>,
- edit: &mut AssistBuilder,
+ edit: &mut SourceChangeBuilder,
strukt: Either<hir::Struct, hir::Variant>,
names: &[ast::Name],
) {
@@ -132,7 +132,7 @@ fn edit_struct_references(
};
let usages = strukt_def.usages(&ctx.sema).include_self_refs().all();
- let edit_node = |edit: &mut AssistBuilder, node: SyntaxNode| -> Option<()> {
+ let edit_node = |edit: &mut SourceChangeBuilder, node: SyntaxNode| -> Option<()> {
match_ast! {
match node {
ast::TupleStructPat(tuple_struct_pat) => {
@@ -203,7 +203,7 @@ fn edit_struct_references(
fn edit_field_references(
ctx: &AssistContext<'_>,
- edit: &mut AssistBuilder,
+ edit: &mut SourceChangeBuilder,
fields: impl Iterator<Item = ast::TupleField>,
names: &[ast::Name],
) {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs
new file mode 100644
index 000000000..54a7f480a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs
@@ -0,0 +1,294 @@
+use syntax::ast::{self, AstNode};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: convert_two_arm_bool_match_to_matches_macro
+//
+// Convert 2-arm match that evaluates to a boolean into the equivalent matches! invocation.
+//
+// ```
+// fn main() {
+// match scrutinee$0 {
+// Some(val) if val.cond() => true,
+// _ => false,
+// }
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// matches!(scrutinee, Some(val) if val.cond())
+// }
+// ```
+pub(crate) fn convert_two_arm_bool_match_to_matches_macro(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let match_expr = ctx.find_node_at_offset::<ast::MatchExpr>()?;
+ let match_arm_list = match_expr.match_arm_list()?;
+ let mut arms = match_arm_list.arms();
+ let first_arm = arms.next()?;
+ let second_arm = arms.next()?;
+ if arms.next().is_some() {
+ cov_mark::hit!(non_two_arm_match);
+ return None;
+ }
+ let first_arm_expr = first_arm.expr();
+ let second_arm_expr = second_arm.expr();
+
+ let invert_matches = if is_bool_literal_expr(&first_arm_expr, true)
+ && is_bool_literal_expr(&second_arm_expr, false)
+ {
+ false
+ } else if is_bool_literal_expr(&first_arm_expr, false)
+ && is_bool_literal_expr(&second_arm_expr, true)
+ {
+ true
+ } else {
+ cov_mark::hit!(non_invert_bool_literal_arms);
+ return None;
+ };
+
+ let target_range = ctx.sema.original_range(match_expr.syntax()).range;
+ let expr = match_expr.expr()?;
+
+ acc.add(
+ AssistId("convert_two_arm_bool_match_to_matches_macro", AssistKind::RefactorRewrite),
+ "Convert to matches!",
+ target_range,
+ |builder| {
+ let mut arm_str = String::new();
+ if let Some(ref pat) = first_arm.pat() {
+ arm_str += &pat.to_string();
+ }
+ if let Some(ref guard) = first_arm.guard() {
+ arm_str += &format!(" {}", &guard.to_string());
+ }
+ if invert_matches {
+ builder.replace(target_range, format!("!matches!({}, {})", expr, arm_str));
+ } else {
+ builder.replace(target_range, format!("matches!({}, {})", expr, arm_str));
+ }
+ },
+ )
+}
+
+fn is_bool_literal_expr(expr: &Option<ast::Expr>, expect_bool: bool) -> bool {
+ if let Some(ast::Expr::Literal(lit)) = expr {
+ if let ast::LiteralKind::Bool(b) = lit.kind() {
+ return b == expect_bool;
+ }
+ }
+
+ return false;
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ use super::convert_two_arm_bool_match_to_matches_macro;
+
+ #[test]
+ fn not_applicable_outside_of_range_left() {
+ check_assist_not_applicable(
+ convert_two_arm_bool_match_to_matches_macro,
+ r#"
+fn foo(a: Option<u32>) -> bool {
+ $0 match a {
+ Some(_val) => true,
+ _ => false
+ }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_non_two_arm_match() {
+ cov_mark::check!(non_two_arm_match);
+ check_assist_not_applicable(
+ convert_two_arm_bool_match_to_matches_macro,
+ r#"
+fn foo(a: Option<u32>) -> bool {
+ match a$0 {
+ Some(3) => true,
+ Some(4) => true,
+ _ => false
+ }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_non_bool_literal_arms() {
+ cov_mark::check!(non_invert_bool_literal_arms);
+ check_assist_not_applicable(
+ convert_two_arm_bool_match_to_matches_macro,
+ r#"
+fn foo(a: Option<u32>) -> bool {
+ match a$0 {
+ Some(val) => val == 3,
+ _ => false
+ }
+}
+ "#,
+ );
+ }
+ #[test]
+ fn not_applicable_both_false_arms() {
+ cov_mark::check!(non_invert_bool_literal_arms);
+ check_assist_not_applicable(
+ convert_two_arm_bool_match_to_matches_macro,
+ r#"
+fn foo(a: Option<u32>) -> bool {
+ match a$0 {
+ Some(val) => false,
+ _ => false
+ }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_both_true_arms() {
+ cov_mark::check!(non_invert_bool_literal_arms);
+ check_assist_not_applicable(
+ convert_two_arm_bool_match_to_matches_macro,
+ r#"
+fn foo(a: Option<u32>) -> bool {
+ match a$0 {
+ Some(val) => true,
+ _ => true
+ }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn convert_simple_case() {
+ check_assist(
+ convert_two_arm_bool_match_to_matches_macro,
+ r#"
+fn foo(a: Option<u32>) -> bool {
+ match a$0 {
+ Some(_val) => true,
+ _ => false
+ }
+}
+"#,
+ r#"
+fn foo(a: Option<u32>) -> bool {
+ matches!(a, Some(_val))
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_simple_invert_case() {
+ check_assist(
+ convert_two_arm_bool_match_to_matches_macro,
+ r#"
+fn foo(a: Option<u32>) -> bool {
+ match a$0 {
+ Some(_val) => false,
+ _ => true
+ }
+}
+"#,
+ r#"
+fn foo(a: Option<u32>) -> bool {
+ !matches!(a, Some(_val))
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_with_guard_case() {
+ check_assist(
+ convert_two_arm_bool_match_to_matches_macro,
+ r#"
+fn foo(a: Option<u32>) -> bool {
+ match a$0 {
+ Some(val) if val > 3 => true,
+ _ => false
+ }
+}
+"#,
+ r#"
+fn foo(a: Option<u32>) -> bool {
+ matches!(a, Some(val) if val > 3)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_enum_match_cases() {
+ check_assist(
+ convert_two_arm_bool_match_to_matches_macro,
+ r#"
+enum X { A, B }
+
+fn foo(a: X) -> bool {
+ match a$0 {
+ X::A => true,
+ _ => false
+ }
+}
+"#,
+ r#"
+enum X { A, B }
+
+fn foo(a: X) -> bool {
+ matches!(a, X::A)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_target_simple() {
+ check_assist_target(
+ convert_two_arm_bool_match_to_matches_macro,
+ r#"
+fn foo(a: Option<u32>) -> bool {
+ match a$0 {
+ Some(val) => true,
+ _ => false
+ }
+}
+"#,
+ r#"match a {
+ Some(val) => true,
+ _ => false
+ }"#,
+ );
+ }
+
+ #[test]
+ fn convert_target_complex() {
+ check_assist_target(
+ convert_two_arm_bool_match_to_matches_macro,
+ r#"
+enum E { X, Y }
+
+fn main() {
+ match E::X$0 {
+ E::X => true,
+ _ => false,
+ }
+}
+"#,
+ "match E::X {
+ E::X => true,
+ _ => false,
+ }",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs
index c1f57532b..dc581ff3b 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs
@@ -8,7 +8,7 @@ use syntax::{
TextRange,
};
-use crate::assist_context::{AssistBuilder, AssistContext, Assists};
+use crate::assist_context::{AssistContext, Assists, SourceChangeBuilder};
// Assist: destructure_tuple_binding
//
@@ -151,7 +151,7 @@ struct TupleData {
}
fn edit_tuple_assignment(
ctx: &AssistContext<'_>,
- builder: &mut AssistBuilder,
+ builder: &mut SourceChangeBuilder,
data: &TupleData,
in_sub_pattern: bool,
) {
@@ -195,7 +195,7 @@ fn edit_tuple_assignment(
fn edit_tuple_usages(
data: &TupleData,
- builder: &mut AssistBuilder,
+ builder: &mut SourceChangeBuilder,
ctx: &AssistContext<'_>,
in_sub_pattern: bool,
) {
@@ -211,7 +211,7 @@ fn edit_tuple_usages(
}
fn edit_tuple_usage(
ctx: &AssistContext<'_>,
- builder: &mut AssistBuilder,
+ builder: &mut SourceChangeBuilder,
usage: &FileReference,
data: &TupleData,
in_sub_pattern: bool,
@@ -239,7 +239,7 @@ fn edit_tuple_usage(
fn edit_tuple_field_usage(
ctx: &AssistContext<'_>,
- builder: &mut AssistBuilder,
+ builder: &mut SourceChangeBuilder,
data: &TupleData,
index: TupleIndex,
) {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
index 52a55ead3..d6c8ea785 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
@@ -152,6 +152,7 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
ctx.sema.db,
ModuleDef::from(control_flow_enum),
ctx.config.insert_use.prefix_kind,
+ ctx.config.prefer_no_std,
);
if let Some(mod_path) = mod_path {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs
index b3c4d306a..897980c66 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs
@@ -29,7 +29,7 @@ use super::remove_unused_param::range_to_remove;
// Assist: extract_module
//
-// Extracts a selected region as seperate module. All the references, visibility and imports are
+// Extracts a selected region as separate module. All the references, visibility and imports are
// resolved.
//
// ```
@@ -105,7 +105,7 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
//
//- Thirdly, resolving all the imports this includes removing paths from imports
// outside the module, shifting/cloning them inside new module, or shifting the imports, or making
- // new import statemnts
+ // new import statements
//We are getting item usages and record_fields together, record_fields
//for change_visibility and usages for first point mentioned above in the process
@@ -661,7 +661,7 @@ fn check_intersection_and_push(
import_path: TextRange,
) {
if import_paths_to_be_removed.len() > 0 {
- // Text ranges recieved here for imports are extended to the
+ // Text ranges received here for imports are extended to the
// next/previous comma which can cause intersections among them
// and later deletion of these can cause panics similar
// to reported in #11766. So to mitigate it, we
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs
index a93648f2d..970e948df 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs
@@ -9,7 +9,7 @@ use ide_db::{
search::FileReference,
FxHashSet, RootDatabase,
};
-use itertools::{Itertools, Position};
+use itertools::Itertools;
use syntax::{
ast::{
self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, HasAttrs, HasGenericParams,
@@ -20,7 +20,7 @@ use syntax::{
SyntaxNode, T,
};
-use crate::{assist_context::AssistBuilder, AssistContext, AssistId, AssistKind, Assists};
+use crate::{assist_context::SourceChangeBuilder, AssistContext, AssistId, AssistKind, Assists};
// Assist: extract_struct_from_enum_variant
//
@@ -101,21 +101,22 @@ pub(crate) fn extract_struct_from_enum_variant(
});
}
- let indent = enum_ast.indent_level();
let generic_params = enum_ast
.generic_param_list()
.and_then(|known_generics| extract_generic_params(&known_generics, &field_list));
let generics = generic_params.as_ref().map(|generics| generics.clone_for_update());
let def =
create_struct_def(variant_name.clone(), &variant, &field_list, generics, &enum_ast);
+
+ let enum_ast = variant.parent_enum();
+ let indent = enum_ast.indent_level();
def.reindent_to(indent);
- let start_offset = &variant.parent_enum().syntax().clone();
- ted::insert_all_raw(
- ted::Position::before(start_offset),
+ ted::insert_all(
+ ted::Position::before(enum_ast.syntax()),
vec![
def.syntax().clone().into(),
- make::tokens::whitespace(&format!("\n\n{}", indent)).into(),
+ make::tokens::whitespace(&format!("\n\n{indent}")).into(),
],
);
@@ -227,7 +228,7 @@ fn tag_generics_in_variant(ty: &ast::Type, generics: &mut [(ast::GenericParam, b
}
fn create_struct_def(
- variant_name: ast::Name,
+ name: ast::Name,
variant: &ast::Variant,
field_list: &Either<ast::RecordFieldList, ast::TupleFieldList>,
generics: Option<ast::GenericParamList>,
@@ -269,43 +270,27 @@ fn create_struct_def(
field_list.into()
}
};
-
field_list.reindent_to(IndentLevel::single());
- let strukt = make::struct_(enum_vis, variant_name, generics, field_list).clone_for_update();
-
- // FIXME: Consider making this an actual function somewhere (like in `AttrsOwnerEdit`) after some deliberation
- let attrs_and_docs = |node: &SyntaxNode| {
- let mut select_next_ws = false;
- node.children_with_tokens().filter(move |child| {
- let accept = match child.kind() {
- ATTR | COMMENT => {
- select_next_ws = true;
- return true;
- }
- WHITESPACE if select_next_ws => true,
- _ => false,
- };
- select_next_ws = false;
+ let strukt = make::struct_(enum_vis, name, generics, field_list).clone_for_update();
- accept
- })
- };
-
- // copy attributes & comments from variant
- let variant_attrs = attrs_and_docs(variant.syntax())
- .map(|tok| match tok.kind() {
- WHITESPACE => make::tokens::single_newline().into(),
- _ => tok,
- })
- .collect();
- ted::insert_all(ted::Position::first_child_of(strukt.syntax()), variant_attrs);
+ // take comments from variant
+ ted::insert_all(
+ ted::Position::first_child_of(strukt.syntax()),
+ take_all_comments(variant.syntax()),
+ );
// copy attributes from enum
ted::insert_all(
ted::Position::first_child_of(strukt.syntax()),
- enum_.attrs().map(|it| it.syntax().clone_for_update().into()).collect(),
+ enum_
+ .attrs()
+ .flat_map(|it| {
+ vec![it.syntax().clone_for_update().into(), make::tokens::single_newline().into()]
+ })
+ .collect(),
);
+
strukt
}
@@ -313,49 +298,51 @@ fn update_variant(variant: &ast::Variant, generics: Option<ast::GenericParamList
let name = variant.name()?;
let ty = generics
.filter(|generics| generics.generic_params().count() > 0)
- .map(|generics| {
- let mut generic_str = String::with_capacity(8);
-
- for (p, more) in generics.generic_params().with_position().map(|p| match p {
- Position::First(p) | Position::Middle(p) => (p, true),
- Position::Last(p) | Position::Only(p) => (p, false),
- }) {
- match p {
- ast::GenericParam::ConstParam(konst) => {
- if let Some(name) = konst.name() {
- generic_str.push_str(name.text().as_str());
- }
- }
- ast::GenericParam::LifetimeParam(lt) => {
- if let Some(lt) = lt.lifetime() {
- generic_str.push_str(lt.text().as_str());
- }
- }
- ast::GenericParam::TypeParam(ty) => {
- if let Some(name) = ty.name() {
- generic_str.push_str(name.text().as_str());
- }
- }
- }
- if more {
- generic_str.push_str(", ");
- }
- }
-
- make::ty(&format!("{}<{}>", &name.text(), &generic_str))
- })
+ .map(|generics| make::ty(&format!("{}{}", &name.text(), generics.to_generic_args())))
.unwrap_or_else(|| make::ty(&name.text()));
+ // change from a record to a tuple field list
let tuple_field = make::tuple_field(None, ty);
- let replacement = make::variant(
- name,
- Some(ast::FieldList::TupleFieldList(make::tuple_field_list(iter::once(tuple_field)))),
- )
- .clone_for_update();
- ted::replace(variant.syntax(), replacement.syntax());
+ let field_list = make::tuple_field_list(iter::once(tuple_field)).clone_for_update();
+ ted::replace(variant.field_list()?.syntax(), field_list.syntax());
+
+ // remove any ws after the name
+ if let Some(ws) = name
+ .syntax()
+ .siblings_with_tokens(syntax::Direction::Next)
+ .find_map(|tok| tok.into_token().filter(|tok| tok.kind() == WHITESPACE))
+ {
+ ted::remove(SyntaxElement::Token(ws));
+ }
+
Some(())
}
+// Note: this also detaches whitespace after comments,
+// since `SyntaxNode::splice_children` (and by extension `ted::insert_all_raw`)
+// detaches nodes. If we only took the comments, we'd leave behind the old whitespace.
+fn take_all_comments(node: &SyntaxNode) -> Vec<SyntaxElement> {
+ let mut remove_next_ws = false;
+ node.children_with_tokens()
+ .filter_map(move |child| match child.kind() {
+ COMMENT => {
+ remove_next_ws = true;
+ child.detach();
+ Some(child)
+ }
+ WHITESPACE if remove_next_ws => {
+ remove_next_ws = false;
+ child.detach();
+ Some(make::tokens::single_newline().into())
+ }
+ _ => {
+ remove_next_ws = false;
+ None
+ }
+ })
+ .collect()
+}
+
fn apply_references(
insert_use_cfg: InsertUseConfig,
segment: ast::PathSegment,
@@ -374,7 +361,7 @@ fn apply_references(
fn process_references(
ctx: &AssistContext<'_>,
- builder: &mut AssistBuilder,
+ builder: &mut SourceChangeBuilder,
visited_modules: &mut FxHashSet<Module>,
enum_module_def: &ModuleDef,
variant_hir_name: &Name,
@@ -392,6 +379,7 @@ fn process_references(
ctx.sema.db,
*enum_module_def,
ctx.config.insert_use.prefix_kind,
+ ctx.config.prefer_no_std,
);
if let Some(mut mod_path) = mod_path {
mod_path.pop_segment();
@@ -480,10 +468,14 @@ enum En<T> { Var(Var<T>) }"#,
fn test_extract_struct_carries_over_attributes() {
check_assist(
extract_struct_from_enum_variant,
- r#"#[derive(Debug)]
+ r#"
+#[derive(Debug)]
#[derive(Clone)]
enum Enum { Variant{ field: u32$0 } }"#,
- r#"#[derive(Debug)]#[derive(Clone)] struct Variant{ field: u32 }
+ r#"
+#[derive(Debug)]
+#[derive(Clone)]
+struct Variant{ field: u32 }
#[derive(Debug)]
#[derive(Clone)]
@@ -614,7 +606,7 @@ enum A { One(One) }"#,
}
#[test]
- fn test_extract_struct_keep_comments_and_attrs_on_variant_struct() {
+ fn test_extract_struct_move_struct_variant_comments() {
check_assist(
extract_struct_from_enum_variant,
r#"
@@ -631,19 +623,19 @@ enum A {
/* comment */
// other
/// comment
-#[attr]
struct One{
a: u32
}
enum A {
+ #[attr]
One(One)
}"#,
);
}
#[test]
- fn test_extract_struct_keep_comments_and_attrs_on_variant_tuple() {
+ fn test_extract_struct_move_tuple_variant_comments() {
check_assist(
extract_struct_from_enum_variant,
r#"
@@ -658,10 +650,10 @@ enum A {
/* comment */
// other
/// comment
-#[attr]
struct One(u32, u32);
enum A {
+ #[attr]
One(One)
}"#,
);
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs
index af584cdb4..03aa8601d 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs
@@ -171,6 +171,25 @@ fn collect_used_generics<'gp>(
ast::Type::RefType(ref_) => generics.extend(
ref_.lifetime().and_then(|lt| known_generics.iter().find(find_lifetime(&lt.text()))),
),
+ ast::Type::ArrayType(ar) => {
+ if let Some(expr) = ar.expr() {
+ if let ast::Expr::PathExpr(p) = expr {
+ if let Some(path) = p.path() {
+ if let Some(name_ref) = path.as_single_name_ref() {
+ if let Some(param) = known_generics.iter().find(|gp| {
+ if let ast::GenericParam::ConstParam(cp) = gp {
+ cp.name().map_or(false, |n| n.text() == name_ref.text())
+ } else {
+ false
+ }
+ }) {
+ generics.push(param);
+ }
+ }
+ }
+ }
+ }
+ }
_ => (),
});
// stable resort to lifetime, type, const
@@ -357,4 +376,29 @@ impl<'outer, Outer, const OUTER: usize> () {
"#,
);
}
+
+ #[test]
+ fn issue_11197() {
+ check_assist(
+ extract_type_alias,
+ r#"
+struct Foo<T, const N: usize>
+where
+ [T; N]: Sized,
+{
+ arr: $0[T; N]$0,
+}
+ "#,
+ r#"
+type $0Type<T, const N: usize> = [T; N];
+
+struct Foo<T, const N: usize>
+where
+ [T; N]: Sized,
+{
+ arr: Type<T, N>,
+}
+ "#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs
index eaa6de73e..ccdfcb0d9 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs
@@ -77,7 +77,7 @@ pub(crate) fn generate_constant(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
target_data_for_generate_constant(ctx, current_module, constant_module).unwrap_or_else(
|| {
let indent = IndentLevel::from_node(statement.syntax());
- (statement.syntax().text_range().start(), indent, None, format!("\n{}", indent))
+ (statement.syntax().text_range().start(), indent, None, format!("\n{indent}"))
},
);
@@ -90,7 +90,7 @@ pub(crate) fn generate_constant(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
if let Some(file_id) = file_id {
builder.edit_file(file_id);
}
- builder.insert(offset, format!("{}{}", text, post_string));
+ builder.insert(offset, format!("{text}{post_string}"));
},
)
}
@@ -103,13 +103,13 @@ fn get_text_for_generate_constant(
) -> Option<String> {
let constant_token = not_exist_name_ref.pop()?;
let vis = if not_exist_name_ref.len() == 0 && !outer_exists { "" } else { "\npub " };
- let mut text = format!("{}const {}: {} = $0;", vis, constant_token, type_name);
+ let mut text = format!("{vis}const {constant_token}: {type_name} = $0;");
while let Some(name_ref) = not_exist_name_ref.pop() {
let vis = if not_exist_name_ref.len() == 0 && !outer_exists { "" } else { "\npub " };
text = text.replace("\n", "\n ");
- text = format!("{}mod {} {{{}\n}}", vis, name_ref.to_string(), text);
+ text = format!("{vis}mod {name_ref} {{{text}\n}}");
}
- Some(text.replace("\n", &format!("\n{}", indent)))
+ Some(text.replace("\n", &format!("\n{indent}")))
}
fn target_data_for_generate_constant(
@@ -134,7 +134,7 @@ fn target_data_for_generate_constant(
.find(|it| it.kind() == SyntaxKind::WHITESPACE && it.to_string().contains("\n"))
.is_some();
let post_string =
- if siblings_has_newline { format!("{}", indent) } else { format!("\n{}", indent) };
+ if siblings_has_newline { format!("{indent}") } else { format!("\n{indent}") };
Some((offset, indent + 1, Some(file_id), post_string))
}
_ => Some((TextSize::from(0), 0.into(), Some(file_id), "\n".into())),
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs
index 5e9995a98..a6e3d49e0 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs
@@ -55,12 +55,11 @@ pub(crate) fn generate_default_from_enum_variant(
let buf = format!(
r#"
-impl Default for {0} {{
+impl Default for {enum_name} {{
fn default() -> Self {{
- Self::{1}
+ Self::{variant_name}
}}
}}"#,
- enum_name, variant_name
);
edit.insert(start_offset, buf);
},
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs
index cbd33de19..49d9fd707 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs
@@ -1,8 +1,7 @@
use ide_db::famous_defs::FamousDefs;
-use itertools::Itertools;
use stdx::format_to;
use syntax::{
- ast::{self, HasGenericParams, HasName, HasTypeBounds, Impl},
+ ast::{self, make, HasGenericParams, HasName, Impl},
AstNode,
};
@@ -77,45 +76,47 @@ pub(crate) fn generate_default_from_new(acc: &mut Assists, ctx: &AssistContext<'
)
}
+// FIXME: based on from utils::generate_impl_text_inner
fn generate_trait_impl_text_from_impl(impl_: &ast::Impl, trait_text: &str, code: &str) -> String {
- let generic_params = impl_.generic_param_list();
- let mut buf = String::with_capacity(code.len());
- buf.push_str("\n\n");
- buf.push_str("impl");
-
- if let Some(generic_params) = &generic_params {
- let lifetimes = generic_params.lifetime_params().map(|lt| format!("{}", lt.syntax()));
- let toc_params = generic_params.type_or_const_params().map(|toc_param| match toc_param {
- ast::TypeOrConstParam::Type(type_param) => {
- let mut buf = String::new();
- if let Some(it) = type_param.name() {
- format_to!(buf, "{}", it.syntax());
- }
- if let Some(it) = type_param.colon_token() {
- format_to!(buf, "{} ", it);
+ let impl_ty = impl_.self_ty().unwrap();
+ let generic_params = impl_.generic_param_list().map(|generic_params| {
+ let lifetime_params =
+ generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam);
+ let ty_or_const_params = generic_params.type_or_const_params().filter_map(|param| {
+ // remove defaults since they can't be specified in impls
+ match param {
+ ast::TypeOrConstParam::Type(param) => {
+ let param = param.clone_for_update();
+ param.remove_default();
+ Some(ast::GenericParam::TypeParam(param))
}
- if let Some(it) = type_param.type_bound_list() {
- format_to!(buf, "{}", it.syntax());
+ ast::TypeOrConstParam::Const(param) => {
+ let param = param.clone_for_update();
+ param.remove_default();
+ Some(ast::GenericParam::ConstParam(param))
}
- buf
}
- ast::TypeOrConstParam::Const(const_param) => const_param.syntax().to_string(),
});
- let generics = lifetimes.chain(toc_params).format(", ");
- format_to!(buf, "<{}>", generics);
- }
- buf.push(' ');
- buf.push_str(trait_text);
- buf.push_str(" for ");
- buf.push_str(&impl_.self_ty().unwrap().syntax().text().to_string());
+ make::generic_param_list(itertools::chain(lifetime_params, ty_or_const_params))
+ });
+
+ let mut buf = String::with_capacity(code.len());
+ buf.push_str("\n\n");
+
+ // `impl{generic_params} {trait_text} for {impl_.self_ty()}`
+ buf.push_str("impl");
+ if let Some(generic_params) = &generic_params {
+ format_to!(buf, "{generic_params}")
+ }
+ format_to!(buf, " {trait_text} for {impl_ty}");
match impl_.where_clause() {
Some(where_clause) => {
- format_to!(buf, "\n{}\n{{\n{}\n}}", where_clause, code);
+ format_to!(buf, "\n{where_clause}\n{{\n{code}\n}}");
}
None => {
- format_to!(buf, " {{\n{}\n}}", code);
+ format_to!(buf, " {{\n{code}\n}}");
}
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs
index 85b193663..ceae80755 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs
@@ -51,14 +51,14 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
Some(field) => {
let field_name = field.name()?;
let field_ty = field.ty()?;
- (format!("{}", field_name), field_ty, field.syntax().text_range())
+ (field_name.to_string(), field_ty, field.syntax().text_range())
}
None => {
let field = ctx.find_node_at_offset::<ast::TupleField>()?;
let field_list = ctx.find_node_at_offset::<ast::TupleFieldList>()?;
let field_list_index = field_list.fields().position(|it| it == field)?;
let field_ty = field.ty()?;
- (format!("{}", field_list_index), field_ty, field.syntax().text_range())
+ (field_list_index.to_string(), field_ty, field.syntax().text_range())
}
};
@@ -77,7 +77,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
for method in methods {
let adt = ast::Adt::Struct(strukt.clone());
let name = method.name(ctx.db()).to_string();
- let impl_def = find_struct_impl(ctx, &adt, &name).flatten();
+ let impl_def = find_struct_impl(ctx, &adt, &[name]).flatten();
acc.add_group(
&GroupLabel("Generate delegate methods…".to_owned()),
AssistId("generate_delegate_methods", AssistKind::Generate),
@@ -151,7 +151,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
Some(cap) => {
let offset = strukt.syntax().text_range().end();
let snippet = render_snippet(cap, impl_def.syntax(), cursor);
- let snippet = format!("\n\n{}", snippet);
+ let snippet = format!("\n\n{snippet}");
builder.insert_snippet(cap, offset, snippet);
}
None => {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs
index b9637ee8d..55b7afb3d 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs
@@ -8,7 +8,7 @@ use syntax::{
};
use crate::{
- assist_context::{AssistBuilder, AssistContext, Assists},
+ assist_context::{AssistContext, Assists, SourceChangeBuilder},
utils::generate_trait_impl_text,
AssistId, AssistKind,
};
@@ -58,14 +58,15 @@ fn generate_record_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
let module = ctx.sema.to_def(&strukt)?.module(ctx.db());
let trait_ = deref_type_to_generate.to_trait(&ctx.sema, module.krate())?;
- let trait_path = module.find_use_path(ctx.db(), ModuleDef::Trait(trait_))?;
+ let trait_path =
+ module.find_use_path(ctx.db(), ModuleDef::Trait(trait_), ctx.config.prefer_no_std)?;
let field_type = field.ty()?;
let field_name = field.name()?;
let target = field.syntax().text_range();
acc.add(
AssistId("generate_deref", AssistKind::Generate),
- format!("Generate `{:?}` impl using `{}`", deref_type_to_generate, field_name),
+ format!("Generate `{deref_type_to_generate:?}` impl using `{field_name}`"),
target,
|edit| {
generate_edit(
@@ -98,13 +99,14 @@ fn generate_tuple_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()
let module = ctx.sema.to_def(&strukt)?.module(ctx.db());
let trait_ = deref_type_to_generate.to_trait(&ctx.sema, module.krate())?;
- let trait_path = module.find_use_path(ctx.db(), ModuleDef::Trait(trait_))?;
+ let trait_path =
+ module.find_use_path(ctx.db(), ModuleDef::Trait(trait_), ctx.config.prefer_no_std)?;
let field_type = field.ty()?;
let target = field.syntax().text_range();
acc.add(
AssistId("generate_deref", AssistKind::Generate),
- format!("Generate `{:?}` impl using `{}`", deref_type_to_generate, field.syntax()),
+ format!("Generate `{deref_type_to_generate:?}` impl using `{field}`"),
target,
|edit| {
generate_edit(
@@ -120,7 +122,7 @@ fn generate_tuple_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()
}
fn generate_edit(
- edit: &mut AssistBuilder,
+ edit: &mut SourceChangeBuilder,
strukt: ast::Struct,
field_type_syntax: &SyntaxNode,
field_name: impl Display,
@@ -130,18 +132,16 @@ fn generate_edit(
let start_offset = strukt.syntax().text_range().end();
let impl_code = match deref_type {
DerefType::Deref => format!(
- r#" type Target = {0};
+ r#" type Target = {field_type_syntax};
fn deref(&self) -> &Self::Target {{
- &self.{1}
+ &self.{field_name}
}}"#,
- field_type_syntax, field_name
),
DerefType::DerefMut => format!(
r#" fn deref_mut(&mut self) -> &mut Self::Target {{
- &mut self.{}
+ &mut self.{field_name}
}}"#,
- field_name
),
};
let strukt_adt = ast::Adt::Struct(strukt);
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs
index c91141f8e..b8415c72a 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs
@@ -139,40 +139,44 @@ fn make_example_for_fn(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option<St
let mut example = String::new();
+ let use_path = build_path(ast_func, ctx)?;
let is_unsafe = ast_func.unsafe_token().is_some();
let param_list = ast_func.param_list()?;
let ref_mut_params = ref_mut_params(&param_list);
let self_name = self_name(ast_func);
- format_to!(example, "use {};\n\n", build_path(ast_func, ctx)?);
+ format_to!(example, "use {use_path};\n\n");
if let Some(self_name) = &self_name {
- if let Some(mtbl) = is_ref_mut_self(ast_func) {
- let mtbl = if mtbl == true { " mut" } else { "" };
- format_to!(example, "let{} {} = ;\n", mtbl, self_name);
+ if let Some(mut_) = is_ref_mut_self(ast_func) {
+ let mut_ = if mut_ == true { "mut " } else { "" };
+ format_to!(example, "let {mut_}{self_name} = ;\n");
}
}
for param_name in &ref_mut_params {
- format_to!(example, "let mut {} = ;\n", param_name);
+ format_to!(example, "let mut {param_name} = ;\n");
}
// Call the function, check result
let function_call = function_call(ast_func, &param_list, self_name.as_deref(), is_unsafe)?;
if returns_a_value(ast_func, ctx) {
if count_parameters(&param_list) < 3 {
- format_to!(example, "assert_eq!({}, );\n", function_call);
+ format_to!(example, "assert_eq!({function_call}, );\n");
} else {
- format_to!(example, "let result = {};\n", function_call);
+ format_to!(example, "let result = {function_call};\n");
example.push_str("assert_eq!(result, );\n");
}
} else {
- format_to!(example, "{};\n", function_call);
+ format_to!(example, "{function_call};\n");
}
// Check the mutated values
- if is_ref_mut_self(ast_func) == Some(true) {
- format_to!(example, "assert_eq!({}, );", self_name?);
+ if let Some(self_name) = &self_name {
+ if is_ref_mut_self(ast_func) == Some(true) {
+ format_to!(example, "assert_eq!({self_name}, );");
+ }
}
for param_name in &ref_mut_params {
- format_to!(example, "assert_eq!({}, );", param_name);
+ format_to!(example, "assert_eq!({param_name}, );");
}
+
Some(example)
}
@@ -189,7 +193,8 @@ fn introduction_builder(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option<S
let intro_for_new = || {
let is_new = name == "new";
if is_new && ret_ty == self_ty {
- Some(format!("Creates a new [`{}`].", linkable_self_ty?))
+ let self_ty = linkable_self_ty?;
+ Some(format!("Creates a new [`{self_ty}`]."))
} else {
None
}
@@ -214,7 +219,9 @@ fn introduction_builder(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option<S
} else {
""
};
- Some(format!("Returns{reference} the {what} of this [`{}`].", linkable_self_ty?))
+
+ let self_ty = linkable_self_ty?;
+ Some(format!("Returns{reference} the {what} of this [`{self_ty}`]."))
}
_ => None,
};
@@ -228,7 +235,9 @@ fn introduction_builder(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option<S
if what == "len" {
what = "length".into()
};
- Some(format!("Sets the {what} of this [`{}`].", linkable_self_ty?))
+
+ let self_ty = linkable_self_ty?;
+ Some(format!("Sets the {what} of this [`{self_ty}`]."))
};
if let Some(intro) = intro_for_new() {
@@ -404,7 +413,7 @@ fn arguments_from_params(param_list: &ast::ParamList) -> String {
// instance `TuplePat`) could be managed later.
Some(ast::Pat::IdentPat(ident_pat)) => match ident_pat.name() {
Some(name) => match is_a_ref_mut_param(&param) {
- true => format!("&mut {}", name),
+ true => format!("&mut {name}"),
false => name.to_string(),
},
None => "_".to_string(),
@@ -424,14 +433,15 @@ fn function_call(
let name = ast_func.name()?;
let arguments = arguments_from_params(param_list);
let function_call = if param_list.self_param().is_some() {
- format!("{}.{}({})", self_name?, name, arguments)
+ let self_ = self_name?;
+ format!("{self_}.{name}({arguments})")
} else if let Some(implementation) = self_partial_type(ast_func) {
- format!("{}::{}({})", implementation, name, arguments)
+ format!("{implementation}::{name}({arguments})")
} else {
- format!("{}({})", name, arguments)
+ format!("{name}({arguments})")
};
match is_unsafe {
- true => Some(format!("unsafe {{ {} }}", function_call)),
+ true => Some(format!("unsafe {{ {function_call} }}")),
false => Some(function_call),
}
}
@@ -469,8 +479,8 @@ fn build_path(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option<String> {
.unwrap_or_else(|| "*".into());
let module_def: ModuleDef = ctx.sema.to_def(ast_func)?.module(ctx.db()).into();
match module_def.canonical_path(ctx.db()) {
- Some(path) => Some(format!("{}::{}::{}", crate_name, path, leaf)),
- None => Some(format!("{}::{}", crate_name, leaf)),
+ Some(path) => Some(format!("{crate_name}::{path}::{leaf}")),
+ None => Some(format!("{crate_name}::{leaf}")),
}
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_is_method.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_is_method.rs
index 52d27d8a7..63e91b835 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_is_method.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_is_method.rs
@@ -52,7 +52,7 @@ pub(crate) fn generate_enum_is_method(acc: &mut Assists, ctx: &AssistContext<'_>
let fn_name = format!("is_{}", &to_lower_snake_case(&variant_name.text()));
// Return early if we've found an existing new fn
- let impl_def = find_struct_impl(ctx, &parent_enum, &fn_name)?;
+ let impl_def = find_struct_impl(ctx, &parent_enum, &[fn_name.clone()])?;
let target = variant.syntax().text_range();
acc.add_group(
@@ -61,21 +61,15 @@ pub(crate) fn generate_enum_is_method(acc: &mut Assists, ctx: &AssistContext<'_>
"Generate an `is_` method for this enum variant",
target,
|builder| {
- let vis = parent_enum.visibility().map_or(String::new(), |v| format!("{} ", v));
+ let vis = parent_enum.visibility().map_or(String::new(), |v| format!("{v} "));
let method = format!(
- " /// Returns `true` if the {} is [`{variant}`].
+ " /// Returns `true` if the {enum_lowercase_name} is [`{variant_name}`].
///
- /// [`{variant}`]: {}::{variant}
+ /// [`{variant_name}`]: {enum_name}::{variant_name}
#[must_use]
- {}fn {}(&self) -> bool {{
- matches!(self, Self::{variant}{})
+ {vis}fn {fn_name}(&self) -> bool {{
+ matches!(self, Self::{variant_name}{pattern_suffix})
}}",
- enum_lowercase_name,
- enum_name,
- vis,
- fn_name,
- pattern_suffix,
- variant = variant_name
);
add_method_to_adt(builder, &parent_enum, impl_def, &method);
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_projection_method.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_projection_method.rs
index b19aa0f65..bdd3cf4f0 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_projection_method.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_projection_method.rs
@@ -116,6 +116,14 @@ fn generate_enum_projection_method(
assist_description: &str,
props: ProjectionProps,
) -> Option<()> {
+ let ProjectionProps {
+ fn_name_prefix,
+ self_param,
+ return_prefix,
+ return_suffix,
+ happy_case,
+ sad_case,
+ } = props;
let variant = ctx.find_node_at_offset::<ast::Variant>()?;
let variant_name = variant.name()?;
let parent_enum = ast::Adt::Enum(variant.parent_enum());
@@ -125,7 +133,7 @@ fn generate_enum_projection_method(
let (field,) = record.fields().collect_tuple()?;
let name = field.name()?.to_string();
let ty = field.ty()?;
- let pattern_suffix = format!(" {{ {} }}", name);
+ let pattern_suffix = format!(" {{ {name} }}");
(pattern_suffix, ty, name)
}
ast::StructKind::Tuple(tuple) => {
@@ -136,11 +144,10 @@ fn generate_enum_projection_method(
ast::StructKind::Unit => return None,
};
- let fn_name =
- format!("{}_{}", props.fn_name_prefix, &to_lower_snake_case(&variant_name.text()));
+ let fn_name = format!("{}_{}", fn_name_prefix, &to_lower_snake_case(&variant_name.text()));
// Return early if we've found an existing new fn
- let impl_def = find_struct_impl(ctx, &parent_enum, &fn_name)?;
+ let impl_def = find_struct_impl(ctx, &parent_enum, &[fn_name.clone()])?;
let target = variant.syntax().text_range();
acc.add_group(
@@ -149,27 +156,15 @@ fn generate_enum_projection_method(
assist_description,
target,
|builder| {
- let vis = parent_enum.visibility().map_or(String::new(), |v| format!("{} ", v));
+ let vis = parent_enum.visibility().map_or(String::new(), |v| format!("{v} "));
let method = format!(
- " {0}fn {1}({2}) -> {3}{4}{5} {{
- if let Self::{6}{7} = self {{
- {8}({9})
+ " {vis}fn {fn_name}({self_param}) -> {return_prefix}{field_type}{return_suffix} {{
+ if let Self::{variant_name}{pattern_suffix} = self {{
+ {happy_case}({bound_name})
}} else {{
- {10}
+ {sad_case}
}}
- }}",
- vis,
- fn_name,
- props.self_param,
- props.return_prefix,
- field_type.syntax(),
- props.return_suffix,
- variant_name,
- pattern_suffix,
- props.happy_case,
- bound_name,
- props.sad_case,
- );
+ }}");
add_method_to_adt(builder, &parent_enum, impl_def, &method);
},
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs
index 4461fbd5a..35cd42908 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs
@@ -1,8 +1,8 @@
-use hir::{HasSource, InFile};
+use hir::{HasSource, HirDisplay, InFile};
use ide_db::assists::{AssistId, AssistKind};
use syntax::{
- ast::{self, edit::IndentLevel},
- AstNode, TextSize,
+ ast::{self, make, HasArgList},
+ match_ast, AstNode, SyntaxNode,
};
use crate::assist_context::{AssistContext, Assists};
@@ -32,8 +32,8 @@ use crate::assist_context::{AssistContext, Assists};
// }
// ```
pub(crate) fn generate_enum_variant(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
- let path_expr: ast::PathExpr = ctx.find_node_at_offset()?;
- let path = path_expr.path()?;
+ let path: ast::Path = ctx.find_node_at_offset()?;
+ let parent = path_parent(&path)?;
if ctx.sema.resolve_path(&path).is_some() {
// No need to generate anything if the path resolves
@@ -50,26 +50,71 @@ pub(crate) fn generate_enum_variant(acc: &mut Assists, ctx: &AssistContext<'_>)
ctx.sema.resolve_path(&path.qualifier()?)
{
let target = path.syntax().text_range();
- return add_variant_to_accumulator(acc, ctx, target, e, &name_ref);
+ return add_variant_to_accumulator(acc, ctx, target, e, &name_ref, parent);
}
None
}
+#[derive(Debug)]
+enum PathParent {
+ PathExpr(ast::PathExpr),
+ RecordExpr(ast::RecordExpr),
+ PathPat(ast::PathPat),
+ UseTree(ast::UseTree),
+}
+
+impl PathParent {
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ PathParent::PathExpr(it) => it.syntax(),
+ PathParent::RecordExpr(it) => it.syntax(),
+ PathParent::PathPat(it) => it.syntax(),
+ PathParent::UseTree(it) => it.syntax(),
+ }
+ }
+
+ fn make_field_list(&self, ctx: &AssistContext<'_>) -> Option<ast::FieldList> {
+ let scope = ctx.sema.scope(self.syntax())?;
+
+ match self {
+ PathParent::PathExpr(it) => {
+ if let Some(call_expr) = it.syntax().parent().and_then(ast::CallExpr::cast) {
+ make_tuple_field_list(call_expr, ctx, &scope)
+ } else {
+ None
+ }
+ }
+ PathParent::RecordExpr(it) => make_record_field_list(it, ctx, &scope),
+ PathParent::UseTree(_) | PathParent::PathPat(_) => None,
+ }
+ }
+}
+
+fn path_parent(path: &ast::Path) -> Option<PathParent> {
+ let parent = path.syntax().parent()?;
+
+ match_ast! {
+ match parent {
+ ast::PathExpr(it) => Some(PathParent::PathExpr(it)),
+ ast::RecordExpr(it) => Some(PathParent::RecordExpr(it)),
+ ast::PathPat(it) => Some(PathParent::PathPat(it)),
+ ast::UseTree(it) => Some(PathParent::UseTree(it)),
+ _ => None
+ }
+ }
+}
+
fn add_variant_to_accumulator(
acc: &mut Assists,
ctx: &AssistContext<'_>,
target: syntax::TextRange,
adt: hir::Enum,
name_ref: &ast::NameRef,
+ parent: PathParent,
) -> Option<()> {
let db = ctx.db();
let InFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node(db)?;
- let enum_indent = IndentLevel::from_node(&enum_node.syntax());
-
- let variant_list = enum_node.variant_list()?;
- let offset = variant_list.syntax().text_range().end() - TextSize::of('}');
- let empty_enum = variant_list.variants().next().is_none();
acc.add(
AssistId("generate_enum_variant", AssistKind::Generate),
@@ -77,18 +122,80 @@ fn add_variant_to_accumulator(
target,
|builder| {
builder.edit_file(file_id.original_file(db));
- let text = format!(
- "{maybe_newline}{indent_1}{name},\n{enum_indent}",
- maybe_newline = if empty_enum { "\n" } else { "" },
- indent_1 = IndentLevel(1),
- name = name_ref,
- enum_indent = enum_indent
- );
- builder.insert(offset, text)
+ let node = builder.make_mut(enum_node);
+ let variant = make_variant(ctx, name_ref, parent);
+ node.variant_list().map(|it| it.add_variant(variant.clone_for_update()));
},
)
}
+fn make_variant(
+ ctx: &AssistContext<'_>,
+ name_ref: &ast::NameRef,
+ parent: PathParent,
+) -> ast::Variant {
+ let field_list = parent.make_field_list(ctx);
+ make::variant(make::name(&name_ref.text()), field_list)
+}
+
+fn make_record_field_list(
+ record: &ast::RecordExpr,
+ ctx: &AssistContext<'_>,
+ scope: &hir::SemanticsScope<'_>,
+) -> Option<ast::FieldList> {
+ let fields = record.record_expr_field_list()?.fields();
+ let record_fields = fields.map(|field| {
+ let name = name_from_field(&field);
+
+ let ty = field
+ .expr()
+ .and_then(|it| expr_ty(ctx, it, scope))
+ .unwrap_or_else(make::ty_placeholder);
+
+ make::record_field(None, name, ty)
+ });
+ Some(make::record_field_list(record_fields).into())
+}
+
+fn name_from_field(field: &ast::RecordExprField) -> ast::Name {
+ let text = match field.name_ref() {
+ Some(it) => it.to_string(),
+ None => name_from_field_shorthand(field).unwrap_or("unknown".to_string()),
+ };
+ make::name(&text)
+}
+
+fn name_from_field_shorthand(field: &ast::RecordExprField) -> Option<String> {
+ let path = match field.expr()? {
+ ast::Expr::PathExpr(path_expr) => path_expr.path(),
+ _ => None,
+ }?;
+ Some(path.as_single_name_ref()?.to_string())
+}
+
+fn make_tuple_field_list(
+ call_expr: ast::CallExpr,
+ ctx: &AssistContext<'_>,
+ scope: &hir::SemanticsScope<'_>,
+) -> Option<ast::FieldList> {
+ let args = call_expr.arg_list()?.args();
+ let tuple_fields = args.map(|arg| {
+ let ty = expr_ty(ctx, arg, &scope).unwrap_or_else(make::ty_placeholder);
+ make::tuple_field(None, ty)
+ });
+ Some(make::tuple_field_list(tuple_fields).into())
+}
+
+fn expr_ty(
+ ctx: &AssistContext<'_>,
+ arg: ast::Expr,
+ scope: &hir::SemanticsScope<'_>,
+) -> Option<ast::Type> {
+ let ty = ctx.sema.type_of_expr(&arg).map(|it| it.adjusted())?;
+ let text = ty.display_source_code(ctx.db(), scope.module().into()).ok()?;
+ Some(make::ty(&text))
+}
+
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
@@ -224,4 +331,234 @@ fn main() {
",
)
}
+
+ #[test]
+ fn associated_single_element_tuple() {
+ check_assist(
+ generate_enum_variant,
+ r"
+enum Foo {}
+fn main() {
+ Foo::Bar$0(true)
+}
+",
+ r"
+enum Foo {
+ Bar(bool),
+}
+fn main() {
+ Foo::Bar(true)
+}
+",
+ )
+ }
+
+ #[test]
+ fn associated_single_element_tuple_unknown_type() {
+ check_assist(
+ generate_enum_variant,
+ r"
+enum Foo {}
+fn main() {
+ Foo::Bar$0(x)
+}
+",
+ r"
+enum Foo {
+ Bar(_),
+}
+fn main() {
+ Foo::Bar(x)
+}
+",
+ )
+ }
+
+ #[test]
+ fn associated_multi_element_tuple() {
+ check_assist(
+ generate_enum_variant,
+ r"
+struct Struct {}
+enum Foo {}
+fn main() {
+ Foo::Bar$0(true, x, Struct {})
+}
+",
+ r"
+struct Struct {}
+enum Foo {
+ Bar(bool, _, Struct),
+}
+fn main() {
+ Foo::Bar(true, x, Struct {})
+}
+",
+ )
+ }
+
+ #[test]
+ fn associated_record() {
+ check_assist(
+ generate_enum_variant,
+ r"
+enum Foo {}
+fn main() {
+ Foo::$0Bar { x: true }
+}
+",
+ r"
+enum Foo {
+ Bar { x: bool },
+}
+fn main() {
+ Foo::Bar { x: true }
+}
+",
+ )
+ }
+
+ #[test]
+ fn associated_record_unknown_type() {
+ check_assist(
+ generate_enum_variant,
+ r"
+enum Foo {}
+fn main() {
+ Foo::$0Bar { x: y }
+}
+",
+ r"
+enum Foo {
+ Bar { x: _ },
+}
+fn main() {
+ Foo::Bar { x: y }
+}
+",
+ )
+ }
+
+ #[test]
+ fn associated_record_field_shorthand() {
+ check_assist(
+ generate_enum_variant,
+ r"
+enum Foo {}
+fn main() {
+ let x = true;
+ Foo::$0Bar { x }
+}
+",
+ r"
+enum Foo {
+ Bar { x: bool },
+}
+fn main() {
+ let x = true;
+ Foo::Bar { x }
+}
+",
+ )
+ }
+
+ #[test]
+ fn associated_record_field_shorthand_unknown_type() {
+ check_assist(
+ generate_enum_variant,
+ r"
+enum Foo {}
+fn main() {
+ Foo::$0Bar { x }
+}
+",
+ r"
+enum Foo {
+ Bar { x: _ },
+}
+fn main() {
+ Foo::Bar { x }
+}
+",
+ )
+ }
+
+ #[test]
+ fn associated_record_field_multiple_fields() {
+ check_assist(
+ generate_enum_variant,
+ r"
+struct Struct {}
+enum Foo {}
+fn main() {
+ Foo::$0Bar { x, y: x, s: Struct {} }
+}
+",
+ r"
+struct Struct {}
+enum Foo {
+ Bar { x: _, y: _, s: Struct },
+}
+fn main() {
+ Foo::Bar { x, y: x, s: Struct {} }
+}
+",
+ )
+ }
+
+ #[test]
+ fn use_tree() {
+ check_assist(
+ generate_enum_variant,
+ r"
+//- /main.rs
+mod foo;
+use foo::Foo::Bar$0;
+
+//- /foo.rs
+enum Foo {}
+",
+ r"
+enum Foo {
+ Bar,
+}
+",
+ )
+ }
+
+ #[test]
+ fn not_applicable_for_path_type() {
+ check_assist_not_applicable(
+ generate_enum_variant,
+ r"
+enum Foo {}
+impl Foo::Bar$0 {}
+",
+ )
+ }
+
+ #[test]
+ fn path_pat() {
+ check_assist(
+ generate_enum_variant,
+ r"
+enum Foo {}
+fn foo(x: Foo) {
+ match x {
+ Foo::Bar$0 =>
+ }
+}
+",
+ r"
+enum Foo {
+ Bar,
+}
+fn foo(x: Foo) {
+ match x {
+ Foo::Bar =>
+ }
+}
+",
+ )
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs
index 507ea012b..7c81d2c6a 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs
@@ -56,23 +56,18 @@ pub(crate) fn generate_from_impl_for_enum(
target,
|edit| {
let start_offset = variant.parent_enum().syntax().text_range().end();
- let from_trait = format!("From<{}>", field_type.syntax());
+ let from_trait = format!("From<{field_type}>");
let impl_code = if let Some(name) = field_name {
format!(
- r#" fn from({0}: {1}) -> Self {{
- Self::{2} {{ {0} }}
- }}"#,
- name.text(),
- field_type.syntax(),
- variant_name,
+ r#" fn from({name}: {field_type}) -> Self {{
+ Self::{variant_name} {{ {name} }}
+ }}"#
)
} else {
format!(
- r#" fn from(v: {}) -> Self {{
- Self::{}(v)
- }}"#,
- field_type.syntax(),
- variant_name,
+ r#" fn from(v: {field_type}) -> Self {{
+ Self::{variant_name}(v)
+ }}"#
)
};
let from_impl = generate_trait_impl_text(&enum_, &from_trait, &impl_code);
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
index d564a0540..c229127e4 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
@@ -1,4 +1,4 @@
-use hir::{HasSource, HirDisplay, Module, Semantics, TypeInfo};
+use hir::{Adt, HasSource, HirDisplay, Module, Semantics, TypeInfo};
use ide_db::{
base_db::FileId,
defs::{Definition, NameRefClass},
@@ -61,56 +61,72 @@ fn gen_fn(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
}
let fn_name = &*name_ref.text();
- let target_module;
- let mut adt_name = None;
+ let TargetInfo { target_module, adt_name, target, file, insert_offset } =
+ fn_target_info(ctx, path, &call, fn_name)?;
+ let function_builder = FunctionBuilder::from_call(ctx, &call, fn_name, target_module, target)?;
+ let text_range = call.syntax().text_range();
+ let label = format!("Generate {} function", function_builder.fn_name);
+ add_func_to_accumulator(
+ acc,
+ ctx,
+ text_range,
+ function_builder,
+ insert_offset,
+ file,
+ adt_name,
+ label,
+ )
+}
+
+struct TargetInfo {
+ target_module: Option<Module>,
+ adt_name: Option<hir::Name>,
+ target: GeneratedFunctionTarget,
+ file: FileId,
+ insert_offset: TextSize,
+}
- let (target, file, insert_offset) = match path.qualifier() {
+impl TargetInfo {
+ fn new(
+ target_module: Option<Module>,
+ adt_name: Option<hir::Name>,
+ target: GeneratedFunctionTarget,
+ file: FileId,
+ insert_offset: TextSize,
+ ) -> Self {
+ Self { target_module, adt_name, target, file, insert_offset }
+ }
+}
+
+fn fn_target_info(
+ ctx: &AssistContext<'_>,
+ path: ast::Path,
+ call: &CallExpr,
+ fn_name: &str,
+) -> Option<TargetInfo> {
+ match path.qualifier() {
Some(qualifier) => match ctx.sema.resolve_path(&qualifier) {
Some(hir::PathResolution::Def(hir::ModuleDef::Module(module))) => {
- target_module = Some(module);
- get_fn_target(ctx, &target_module, call.clone())?
+ get_fn_target_info(ctx, &Some(module), call.clone())
}
Some(hir::PathResolution::Def(hir::ModuleDef::Adt(adt))) => {
if let hir::Adt::Enum(_) = adt {
// Don't suggest generating function if the name starts with an uppercase letter
- if name_ref.text().starts_with(char::is_uppercase) {
+ if fn_name.starts_with(char::is_uppercase) {
return None;
}
}
- let current_module = ctx.sema.scope(call.syntax())?.module();
- let module = adt.module(ctx.sema.db);
- target_module = if current_module == module { None } else { Some(module) };
- if current_module.krate() != module.krate() {
- return None;
- }
- let (impl_, file) = get_adt_source(ctx, &adt, fn_name)?;
- let (target, insert_offset) = get_method_target(ctx, &module, &impl_)?;
- adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None };
- (target, file, insert_offset)
+ assoc_fn_target_info(ctx, call, adt, fn_name)
}
- _ => {
- return None;
+ Some(hir::PathResolution::SelfType(impl_)) => {
+ let adt = impl_.self_ty(ctx.db()).as_adt()?;
+ assoc_fn_target_info(ctx, call, adt, fn_name)
}
+ _ => None,
},
- _ => {
- target_module = None;
- get_fn_target(ctx, &target_module, call.clone())?
- }
- };
- let function_builder = FunctionBuilder::from_call(ctx, &call, fn_name, target_module, target)?;
- let text_range = call.syntax().text_range();
- let label = format!("Generate {} function", function_builder.fn_name);
- add_func_to_accumulator(
- acc,
- ctx,
- text_range,
- function_builder,
- insert_offset,
- file,
- adt_name,
- label,
- )
+ _ => get_fn_target_info(ctx, &None, call.clone()),
+ }
}
fn gen_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
@@ -129,7 +145,8 @@ fn gen_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
return None;
}
let (impl_, file) = get_adt_source(ctx, &adt, fn_name.text().as_str())?;
- let (target, insert_offset) = get_method_target(ctx, &target_module, &impl_)?;
+ let (target, insert_offset) = get_method_target(ctx, &impl_, &adt)?;
+
let function_builder =
FunctionBuilder::from_method_call(ctx, &call, &fn_name, target_module, target)?;
let text_range = call.syntax().text_range();
@@ -158,10 +175,11 @@ fn add_func_to_accumulator(
label: String,
) -> Option<()> {
acc.add(AssistId("generate_function", AssistKind::Generate), label, text_range, |builder| {
- let function_template = function_builder.render();
+ let indent = IndentLevel::from_node(function_builder.target.syntax());
+ let function_template = function_builder.render(adt_name.is_some());
let mut func = function_template.to_string(ctx.config.snippet_cap);
if let Some(name) = adt_name {
- func = format!("\nimpl {} {{\n{}\n}}", name, func);
+ func = format!("\n{indent}impl {name} {{\n{func}\n{indent}}}");
}
builder.edit_file(file);
match ctx.config.snippet_cap {
@@ -180,7 +198,7 @@ fn get_adt_source(
let file = ctx.sema.parse(range.file_id);
let adt_source =
ctx.sema.find_node_at_offset_with_macros(file.syntax(), range.range.start())?;
- find_struct_impl(ctx, &adt_source, fn_name).map(|impl_| (impl_, range.file_id))
+ find_struct_impl(ctx, &adt_source, &[fn_name.to_string()]).map(|impl_| (impl_, range.file_id))
}
struct FunctionTemplate {
@@ -194,23 +212,26 @@ struct FunctionTemplate {
impl FunctionTemplate {
fn to_string(&self, cap: Option<SnippetCap>) -> String {
+ let Self { leading_ws, fn_def, ret_type, should_focus_return_type, trailing_ws, tail_expr } =
+ self;
+
let f = match cap {
Some(cap) => {
- let cursor = if self.should_focus_return_type {
+ let cursor = if *should_focus_return_type {
// Focus the return type if there is one
- match self.ret_type {
- Some(ref ret_type) => ret_type.syntax(),
- None => self.tail_expr.syntax(),
+ match ret_type {
+ Some(ret_type) => ret_type.syntax(),
+ None => tail_expr.syntax(),
}
} else {
- self.tail_expr.syntax()
+ tail_expr.syntax()
};
- render_snippet(cap, self.fn_def.syntax(), Cursor::Replace(cursor))
+ render_snippet(cap, fn_def.syntax(), Cursor::Replace(cursor))
}
- None => self.fn_def.to_string(),
+ None => fn_def.to_string(),
};
- format!("{}{}{}", self.leading_ws, f, self.trailing_ws)
+ format!("{leading_ws}{f}{trailing_ws}")
}
}
@@ -291,7 +312,7 @@ impl FunctionBuilder {
})
}
- fn render(self) -> FunctionTemplate {
+ fn render(self, is_method: bool) -> FunctionTemplate {
let placeholder_expr = make::ext::expr_todo();
let fn_body = make::block_expr(vec![], Some(placeholder_expr));
let visibility = if self.needs_pub { Some(make::visibility_pub_crate()) } else { None };
@@ -309,16 +330,23 @@ impl FunctionBuilder {
match self.target {
GeneratedFunctionTarget::BehindItem(it) => {
- let indent = IndentLevel::from_node(&it);
- leading_ws = format!("\n\n{}", indent);
+ let mut indent = IndentLevel::from_node(&it);
+ if is_method {
+ indent = indent + 1;
+ leading_ws = format!("{indent}");
+ } else {
+ leading_ws = format!("\n\n{indent}");
+ }
+
fn_def = fn_def.indent(indent);
trailing_ws = String::new();
}
GeneratedFunctionTarget::InEmptyItemList(it) => {
let indent = IndentLevel::from_node(&it);
- leading_ws = format!("\n{}", indent + 1);
- fn_def = fn_def.indent(indent + 1);
- trailing_ws = format!("\n{}", indent);
+ let leading_indent = indent + 1;
+ leading_ws = format!("\n{leading_indent}");
+ fn_def = fn_def.indent(leading_indent);
+ trailing_ws = format!("\n{indent}");
}
};
@@ -366,6 +394,15 @@ fn make_return_type(
(ret_type, should_focus_return_type)
}
+fn get_fn_target_info(
+ ctx: &AssistContext<'_>,
+ target_module: &Option<Module>,
+ call: CallExpr,
+) -> Option<TargetInfo> {
+ let (target, file, insert_offset) = get_fn_target(ctx, target_module, call)?;
+ Some(TargetInfo::new(*target_module, None, target, file, insert_offset))
+}
+
fn get_fn_target(
ctx: &AssistContext<'_>,
target_module: &Option<Module>,
@@ -386,19 +423,36 @@ fn get_fn_target(
fn get_method_target(
ctx: &AssistContext<'_>,
- target_module: &Module,
impl_: &Option<ast::Impl>,
+ adt: &Adt,
) -> Option<(GeneratedFunctionTarget, TextSize)> {
let target = match impl_ {
Some(impl_) => next_space_for_fn_in_impl(impl_)?,
None => {
- next_space_for_fn_in_module(ctx.sema.db, &target_module.definition_source(ctx.sema.db))?
- .1
+ GeneratedFunctionTarget::BehindItem(adt.source(ctx.sema.db)?.syntax().value.clone())
}
};
Some((target.clone(), get_insert_offset(&target)))
}
+fn assoc_fn_target_info(
+ ctx: &AssistContext<'_>,
+ call: &CallExpr,
+ adt: hir::Adt,
+ fn_name: &str,
+) -> Option<TargetInfo> {
+ let current_module = ctx.sema.scope(call.syntax())?.module();
+ let module = adt.module(ctx.sema.db);
+ let target_module = if current_module == module { None } else { Some(module) };
+ if current_module.krate() != module.krate() {
+ return None;
+ }
+ let (impl_, file) = get_adt_source(ctx, &adt, fn_name)?;
+ let (target, insert_offset) = get_method_target(ctx, &impl_, &adt)?;
+ let adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None };
+ Some(TargetInfo::new(target_module, adt_name, target, file, insert_offset))
+}
+
fn get_insert_offset(target: &GeneratedFunctionTarget) -> TextSize {
match &target {
GeneratedFunctionTarget::BehindItem(it) => it.text_range().end(),
@@ -1425,14 +1479,12 @@ fn foo() {S.bar$0();}
",
r"
struct S;
-fn foo() {S.bar();}
impl S {
-
-
-fn bar(&self) ${0:-> _} {
- todo!()
-}
+ fn bar(&self) ${0:-> _} {
+ todo!()
+ }
}
+fn foo() {S.bar();}
",
)
}
@@ -1473,14 +1525,12 @@ fn foo() {s::S.bar$0();}
r"
mod s {
pub struct S;
-impl S {
-
-
- pub(crate) fn bar(&self) ${0:-> _} {
- todo!()
+ impl S {
+ pub(crate) fn bar(&self) ${0:-> _} {
+ todo!()
+ }
}
}
-}
fn foo() {s::S.bar();}
",
)
@@ -1501,18 +1551,16 @@ mod s {
",
r"
struct S;
+impl S {
+ fn bar(&self) ${0:-> _} {
+ todo!()
+ }
+}
mod s {
fn foo() {
super::S.bar();
}
}
-impl S {
-
-
-fn bar(&self) ${0:-> _} {
- todo!()
-}
-}
",
)
@@ -1528,14 +1576,12 @@ fn foo() {$0S.bar();}
",
r"
struct S;
-fn foo() {S.bar();}
impl S {
-
-
-fn bar(&self) ${0:-> _} {
- todo!()
-}
+ fn bar(&self) ${0:-> _} {
+ todo!()
+ }
}
+fn foo() {S.bar();}
",
)
}
@@ -1550,14 +1596,12 @@ fn foo() {S::bar$0();}
",
r"
struct S;
-fn foo() {S::bar();}
impl S {
-
-
-fn bar() ${0:-> _} {
- todo!()
-}
+ fn bar() ${0:-> _} {
+ todo!()
+ }
}
+fn foo() {S::bar();}
",
)
}
@@ -1598,14 +1642,12 @@ fn foo() {s::S::bar$0();}
r"
mod s {
pub struct S;
-impl S {
-
-
- pub(crate) fn bar() ${0:-> _} {
- todo!()
+ impl S {
+ pub(crate) fn bar() ${0:-> _} {
+ todo!()
+ }
}
}
-}
fn foo() {s::S::bar();}
",
)
@@ -1621,13 +1663,38 @@ fn foo() {$0S::bar();}
",
r"
struct S;
-fn foo() {S::bar();}
impl S {
+ fn bar() ${0:-> _} {
+ todo!()
+ }
+}
+fn foo() {S::bar();}
+",
+ )
+ }
-
-fn bar() ${0:-> _} {
- todo!()
+ #[test]
+ fn create_static_method_within_an_impl_with_self_syntax() {
+ check_assist(
+ generate_function,
+ r"
+struct S;
+impl S {
+ fn foo(&self) {
+ Self::bar$0();
+ }
}
+",
+ r"
+struct S;
+impl S {
+ fn foo(&self) {
+ Self::bar();
+ }
+
+ fn bar() ${0:-> _} {
+ todo!()
+ }
}
",
)
@@ -1771,15 +1838,13 @@ fn main() {
",
r"
enum Foo {}
-fn main() {
- Foo::new();
-}
impl Foo {
-
-
-fn new() ${0:-> _} {
- todo!()
+ fn new() ${0:-> _} {
+ todo!()
+ }
}
+fn main() {
+ Foo::new();
}
",
)
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter.rs
index 76fcef0ca..5e7191428 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter.rs
@@ -1,6 +1,9 @@
use ide_db::famous_defs::FamousDefs;
use stdx::{format_to, to_lower_snake_case};
-use syntax::ast::{self, AstNode, HasName, HasVisibility};
+use syntax::{
+ ast::{self, AstNode, HasName, HasVisibility},
+ TextRange,
+};
use crate::{
utils::{convert_reference_type, find_impl_block_end, find_struct_impl, generate_impl_text},
@@ -72,92 +75,259 @@ pub(crate) fn generate_getter_mut(acc: &mut Assists, ctx: &AssistContext<'_>) ->
generate_getter_impl(acc, ctx, true)
}
+#[derive(Clone, Debug)]
+struct RecordFieldInfo {
+ field_name: syntax::ast::Name,
+ field_ty: syntax::ast::Type,
+ fn_name: String,
+ target: TextRange,
+}
+
+struct GetterInfo {
+ impl_def: Option<ast::Impl>,
+ strukt: ast::Struct,
+ mutable: bool,
+}
+
pub(crate) fn generate_getter_impl(
acc: &mut Assists,
ctx: &AssistContext<'_>,
mutable: bool,
) -> Option<()> {
- let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
- let field = ctx.find_node_at_offset::<ast::RecordField>()?;
+ // This if condition denotes two modes this assist can work in:
+ // - First is acting upon selection of record fields
+ // - Next is acting upon a single record field
+ //
+ // This is the only part where implementation diverges a bit,
+ // subsequent code is generic for both of these modes
- let field_name = field.name()?;
- let field_ty = field.ty()?;
+ let (strukt, info_of_record_fields, fn_names) = if !ctx.has_empty_selection() {
+ // Selection Mode
+ let node = ctx.covering_element();
- // Return early if we've found an existing fn
- let mut fn_name = to_lower_snake_case(&field_name.to_string());
- if mutable {
- format_to!(fn_name, "_mut");
+ let node = match node {
+ syntax::NodeOrToken::Node(n) => n,
+ syntax::NodeOrToken::Token(t) => t.parent()?,
+ };
+
+ let parent_struct = node.ancestors().find_map(ast::Struct::cast)?;
+
+ let (info_of_record_fields, field_names) =
+ extract_and_parse_record_fields(&parent_struct, ctx.selection_trimmed(), mutable)?;
+
+ (parent_struct, info_of_record_fields, field_names)
+ } else {
+ // Single Record Field mode
+ let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
+ let field = ctx.find_node_at_offset::<ast::RecordField>()?;
+
+ let record_field_info = parse_record_field(field, mutable)?;
+
+ let fn_name = record_field_info.fn_name.clone();
+
+ (strukt, vec![record_field_info], vec![fn_name])
+ };
+
+ // No record fields to do work on :(
+ if info_of_record_fields.len() == 0 {
+ return None;
}
- let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), fn_name.as_str())?;
+
+ let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), &fn_names)?;
let (id, label) = if mutable {
("generate_getter_mut", "Generate a mut getter method")
} else {
("generate_getter", "Generate a getter method")
};
- let target = field.syntax().text_range();
+
+ // Computing collective text range of all record fields in selected region
+ let target: TextRange = info_of_record_fields
+ .iter()
+ .map(|record_field_info| record_field_info.target)
+ .reduce(|acc, target| acc.cover(target))?;
+
+ let getter_info = GetterInfo { impl_def, strukt, mutable };
+
acc.add_group(
&GroupLabel("Generate getter/setter".to_owned()),
AssistId(id, AssistKind::Generate),
label,
target,
|builder| {
+ let record_fields_count = info_of_record_fields.len();
+
let mut buf = String::with_capacity(512);
- if impl_def.is_some() {
- buf.push('\n');
+ // Check if an impl exists
+ if let Some(impl_def) = &getter_info.impl_def {
+ // Check if impl is empty
+ if let Some(assoc_item_list) = impl_def.assoc_item_list() {
+ if assoc_item_list.assoc_items().next().is_some() {
+ // If not empty then only insert a new line
+ buf.push('\n');
+ }
+ }
}
- let vis = strukt.visibility().map_or(String::new(), |v| format!("{} ", v));
- let (ty, body) = if mutable {
- (format!("&mut {}", field_ty), format!("&mut self.{}", field_name))
- } else {
- (|| {
- let krate = ctx.sema.scope(field_ty.syntax())?.krate();
- let famous_defs = &FamousDefs(&ctx.sema, krate);
- ctx.sema
- .resolve_type(&field_ty)
- .and_then(|ty| convert_reference_type(ty, ctx.db(), famous_defs))
- .map(|conversion| {
- cov_mark::hit!(convert_reference_type);
- (
- conversion.convert_type(ctx.db()),
- conversion.getter(field_name.to_string()),
- )
- })
- })()
- .unwrap_or_else(|| (format!("&{}", field_ty), format!("&self.{}", field_name)))
- };
-
- format_to!(
- buf,
- " {}fn {}(&{}self) -> {} {{
- {}
- }}",
- vis,
- fn_name,
- mutable.then(|| "mut ").unwrap_or_default(),
- ty,
- body,
- );
-
- let start_offset = impl_def
- .and_then(|impl_def| find_impl_block_end(impl_def, &mut buf))
+ for (i, record_field_info) in info_of_record_fields.iter().enumerate() {
+ // this buf inserts a newline at the end of a getter
+ // automatically, if one wants to add one more newline
+ // for separating it from other assoc items, that needs
+ // to be handled spearately
+ let mut getter_buf =
+ generate_getter_from_info(ctx, &getter_info, &record_field_info);
+
+ // Insert `$0` only for last getter we generate
+ if i == record_fields_count - 1 {
+ getter_buf = getter_buf.replacen("fn ", "fn $0", 1);
+ }
+
+ // For first element we do not merge with '\n', as
+ // that can be inserted by impl_def check defined
+ // above, for other cases which are:
+ //
+ // - impl exists but it empty, here we would ideally
+ // not want to keep newline between impl <struct> {
+ // and fn <fn-name>() { line
+ //
+ // - next if impl itself does not exist, in this
+ // case we ourselves generate a new impl and that
+ // again ends up with the same reasoning as above
+ // for not keeping newline
+ if i == 0 {
+ buf = buf + &getter_buf;
+ } else {
+ buf = buf + "\n" + &getter_buf;
+ }
+
+ // We don't insert a new line at the end of
+ // last getter as it will end up in the end
+ // of an impl where we would not like to keep
+ // getter and end of impl ( i.e. `}` ) with an
+ // extra line for no reason
+ if i < record_fields_count - 1 {
+ buf = buf + "\n";
+ }
+ }
+
+ let start_offset = getter_info
+ .impl_def
+ .as_ref()
+ .and_then(|impl_def| find_impl_block_end(impl_def.to_owned(), &mut buf))
.unwrap_or_else(|| {
- buf = generate_impl_text(&ast::Adt::Struct(strukt.clone()), &buf);
- strukt.syntax().text_range().end()
+ buf = generate_impl_text(&ast::Adt::Struct(getter_info.strukt.clone()), &buf);
+ getter_info.strukt.syntax().text_range().end()
});
match ctx.config.snippet_cap {
- Some(cap) => {
- builder.insert_snippet(cap, start_offset, buf.replacen("fn ", "fn $0", 1))
- }
+ Some(cap) => builder.insert_snippet(cap, start_offset, buf),
None => builder.insert(start_offset, buf),
}
},
)
}
+fn generate_getter_from_info(
+ ctx: &AssistContext<'_>,
+ info: &GetterInfo,
+ record_field_info: &RecordFieldInfo,
+) -> String {
+ let mut buf = String::with_capacity(512);
+
+ let vis = info.strukt.visibility().map_or(String::new(), |v| format!("{} ", v));
+ let (ty, body) = if info.mutable {
+ (
+ format!("&mut {}", record_field_info.field_ty),
+ format!("&mut self.{}", record_field_info.field_name),
+ )
+ } else {
+ (|| {
+ let krate = ctx.sema.scope(record_field_info.field_ty.syntax())?.krate();
+ let famous_defs = &FamousDefs(&ctx.sema, krate);
+ ctx.sema
+ .resolve_type(&record_field_info.field_ty)
+ .and_then(|ty| convert_reference_type(ty, ctx.db(), famous_defs))
+ .map(|conversion| {
+ cov_mark::hit!(convert_reference_type);
+ (
+ conversion.convert_type(ctx.db()),
+ conversion.getter(record_field_info.field_name.to_string()),
+ )
+ })
+ })()
+ .unwrap_or_else(|| {
+ (
+ format!("&{}", record_field_info.field_ty),
+ format!("&self.{}", record_field_info.field_name),
+ )
+ })
+ };
+
+ format_to!(
+ buf,
+ " {}fn {}(&{}self) -> {} {{
+ {}
+ }}",
+ vis,
+ record_field_info.fn_name,
+ info.mutable.then(|| "mut ").unwrap_or_default(),
+ ty,
+ body,
+ );
+
+ buf
+}
+
+fn extract_and_parse_record_fields(
+ node: &ast::Struct,
+ selection_range: TextRange,
+ mutable: bool,
+) -> Option<(Vec<RecordFieldInfo>, Vec<String>)> {
+ let mut field_names: Vec<String> = vec![];
+ let field_list = node.field_list()?;
+
+ match field_list {
+ ast::FieldList::RecordFieldList(ele) => {
+ let info_of_record_fields_in_selection = ele
+ .fields()
+ .filter_map(|record_field| {
+ if selection_range.contains_range(record_field.syntax().text_range()) {
+ let record_field_info = parse_record_field(record_field, mutable)?;
+ field_names.push(record_field_info.fn_name.clone());
+ return Some(record_field_info);
+ }
+
+ None
+ })
+ .collect::<Vec<RecordFieldInfo>>();
+
+ if info_of_record_fields_in_selection.len() == 0 {
+ return None;
+ }
+
+ Some((info_of_record_fields_in_selection, field_names))
+ }
+ ast::FieldList::TupleFieldList(_) => {
+ return None;
+ }
+ }
+}
+
+fn parse_record_field(record_field: ast::RecordField, mutable: bool) -> Option<RecordFieldInfo> {
+ let field_name = record_field.name()?;
+ let field_ty = record_field.ty()?;
+
+ let mut fn_name = to_lower_snake_case(&field_name.to_string());
+ if mutable {
+ format_to!(fn_name, "_mut");
+ }
+
+ let target = record_field.syntax().text_range();
+
+ Some(RecordFieldInfo { field_name, field_ty, fn_name, target })
+}
+
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
@@ -489,4 +659,53 @@ impl Context {
"#,
);
}
+
+ #[test]
+ fn test_generate_multiple_getters_from_selection() {
+ check_assist(
+ generate_getter,
+ r#"
+struct Context {
+ $0data: Data,
+ count: usize,$0
+}
+ "#,
+ r#"
+struct Context {
+ data: Data,
+ count: usize,
+}
+
+impl Context {
+ fn data(&self) -> &Data {
+ &self.data
+ }
+
+ fn $0count(&self) -> &usize {
+ &self.count
+ }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_generate_multiple_getters_from_selection_one_already_exists() {
+ // As impl for one of the fields already exist, skip it
+ check_assist_not_applicable(
+ generate_getter,
+ r#"
+struct Context {
+ $0data: Data,
+ count: usize,$0
+}
+
+impl Context {
+ fn data(&self) -> &Data {
+ &self.data
+ }
+}
+ "#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs
index 68287a20b..9af26c04e 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs
@@ -28,7 +28,7 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio
acc.add(
AssistId("generate_impl", AssistKind::Generate),
- format!("Generate impl for `{}`", name),
+ format!("Generate impl for `{name}`"),
target,
|edit| {
let start_offset = nominal.syntax().text_range().end();
@@ -52,6 +52,7 @@ mod tests {
use super::*;
+ // FIXME: break up into separate test fns
#[test]
fn test_add_impl() {
check_assist(
@@ -136,6 +137,18 @@ mod tests {
check_assist(
generate_impl,
+ r#"
+ struct Defaulted<const N: i32 = 0> {}$0"#,
+ r#"
+ struct Defaulted<const N: i32 = 0> {}
+
+ impl<const N: i32> Defaulted<N> {
+ $0
+ }"#,
+ );
+
+ check_assist(
+ generate_impl,
r#"pub trait Trait {}
struct Struct<T>$0
where
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs
index 6c93875e9..17fadea0e 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs
@@ -39,7 +39,8 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
};
// Return early if we've found an existing new fn
- let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), "new")?;
+ let impl_def =
+ find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), &[String::from("new")])?;
let current_module = ctx.sema.scope(strukt.syntax())?.module();
@@ -51,17 +52,22 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
buf.push('\n');
}
- let vis = strukt.visibility().map_or(String::new(), |v| format!("{} ", v));
+ let vis = strukt.visibility().map_or(String::new(), |v| format!("{v} "));
let trivial_constructors = field_list
.fields()
.map(|f| {
+ let name = f.name()?;
+
let ty = ctx.sema.resolve_type(&f.ty()?)?;
let item_in_ns = hir::ItemInNs::from(hir::ModuleDef::from(ty.as_adt()?));
- let type_path = current_module
- .find_use_path(ctx.sema.db, item_for_path_search(ctx.sema.db, item_in_ns)?)?;
+ let type_path = current_module.find_use_path(
+ ctx.sema.db,
+ item_for_path_search(ctx.sema.db, item_in_ns)?,
+ ctx.config.prefer_no_std,
+ )?;
let expr = use_trivial_constructor(
&ctx.sema.db,
@@ -69,7 +75,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
&ty,
)?;
- Some(format!("{}: {}", f.name()?.syntax(), expr))
+ Some(format!("{name}: {expr}"))
})
.collect::<Vec<_>>();
@@ -78,7 +84,10 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
.enumerate()
.filter_map(|(i, f)| {
if trivial_constructors[i].is_none() {
- Some(format!("{}: {}", f.name()?.syntax(), f.ty()?.syntax()))
+ let name = f.name()?;
+ let ty = f.ty()?;
+
+ Some(format!("{name}: {ty}"))
} else {
None
}
@@ -98,7 +107,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
})
.format(", ");
- format_to!(buf, " {}fn new({}) -> Self {{ Self {{ {} }} }}", vis, params, fields);
+ format_to!(buf, " {vis}fn new({params}) -> Self {{ Self {{ {fields} }} }}");
let start_offset = impl_def
.and_then(|impl_def| find_impl_block_start(impl_def, &mut buf))
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_setter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_setter.rs
index 2a7ad6ce3..62f72df1c 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_setter.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_setter.rs
@@ -36,11 +36,8 @@ pub(crate) fn generate_setter(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt
// Return early if we've found an existing fn
let fn_name = to_lower_snake_case(&field_name.to_string());
- let impl_def = find_struct_impl(
- ctx,
- &ast::Adt::Struct(strukt.clone()),
- format!("set_{}", fn_name).as_str(),
- )?;
+ let impl_def =
+ find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), &[format!("set_{fn_name}")])?;
let target = field.syntax().text_range();
acc.add_group(
@@ -55,18 +52,12 @@ pub(crate) fn generate_setter(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt
buf.push('\n');
}
- let vis = strukt.visibility().map_or(String::new(), |v| format!("{} ", v));
+ let vis = strukt.visibility().map_or(String::new(), |v| format!("{v} "));
format_to!(
buf,
- " {}fn set_{}(&mut self, {}: {}) {{
- self.{} = {};
- }}",
- vis,
- fn_name,
- fn_name,
- field_ty,
- fn_name,
- fn_name,
+ " {vis}fn set_{fn_name}(&mut self, {fn_name}: {field_ty}) {{
+ self.{fn_name} = {fn_name};
+ }}"
);
let start_offset = impl_def
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs
index 80d3b9255..9f51cdaf8 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs
@@ -7,13 +7,14 @@ use ide_db::{
imports::insert_use::remove_path_if_in_use_stmt,
path_transform::PathTransform,
search::{FileReference, SearchScope},
+ source_change::SourceChangeBuilder,
syntax_helpers::{insert_whitespace_into_node::insert_ws_into, node_ext::expr_as_name_ref},
RootDatabase,
};
use itertools::{izip, Itertools};
use syntax::{
ast::{self, edit_in_place::Indent, HasArgList, PathExpr},
- ted, AstNode,
+ ted, AstNode, NodeOrToken, SyntaxKind,
};
use crate::{
@@ -100,18 +101,7 @@ pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) ->
builder.edit_file(file_id);
let count = refs.len();
// The collects are required as we are otherwise iterating while mutating 🙅‍♀️🙅‍♂️
- let (name_refs, name_refs_use): (Vec<_>, Vec<_>) = refs
- .into_iter()
- .filter_map(|file_ref| match file_ref.name {
- ast::NameLike::NameRef(name_ref) => Some(name_ref),
- _ => None,
- })
- .partition_map(|name_ref| {
- match name_ref.syntax().ancestors().find_map(ast::UseTree::cast) {
- Some(use_tree) => Either::Right(builder.make_mut(use_tree)),
- None => Either::Left(name_ref),
- }
- });
+ let (name_refs, name_refs_use) = split_refs_and_uses(builder, refs, Some);
let call_infos: Vec<_> = name_refs
.into_iter()
.filter_map(CallInfo::from_name_ref)
@@ -130,11 +120,7 @@ pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) ->
.count();
if replaced + name_refs_use.len() == count {
// we replaced all usages in this file, so we can remove the imports
- name_refs_use.into_iter().for_each(|use_tree| {
- if let Some(path) = use_tree.path() {
- remove_path_if_in_use_stmt(&path);
- }
- })
+ name_refs_use.iter().for_each(remove_path_if_in_use_stmt);
} else {
remove_def = false;
}
@@ -153,6 +139,23 @@ pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) ->
)
}
+pub(super) fn split_refs_and_uses<T: ast::AstNode>(
+ builder: &mut SourceChangeBuilder,
+ iter: impl IntoIterator<Item = FileReference>,
+ mut map_ref: impl FnMut(ast::NameRef) -> Option<T>,
+) -> (Vec<T>, Vec<ast::Path>) {
+ iter.into_iter()
+ .filter_map(|file_ref| match file_ref.name {
+ ast::NameLike::NameRef(name_ref) => Some(name_ref),
+ _ => None,
+ })
+ .filter_map(|name_ref| match name_ref.syntax().ancestors().find_map(ast::UseTree::cast) {
+ Some(use_tree) => builder.make_mut(use_tree).path().map(Either::Right),
+ None => map_ref(name_ref).map(Either::Left),
+ })
+ .partition_map(|either| either)
+}
+
// Assist: inline_call
//
// Inlines a function or method body creating a `let` statement per parameter unless the parameter
@@ -311,6 +314,17 @@ fn inline(
} else {
fn_body.clone_for_update()
};
+ if let Some(imp) = body.syntax().ancestors().find_map(ast::Impl::cast) {
+ if !node.syntax().ancestors().any(|anc| &anc == imp.syntax()) {
+ if let Some(t) = imp.self_ty() {
+ body.syntax()
+ .descendants_with_tokens()
+ .filter_map(NodeOrToken::into_token)
+ .filter(|tok| tok.kind() == SyntaxKind::SELF_TYPE_KW)
+ .for_each(|tok| ted::replace(tok, t.syntax()));
+ }
+ }
+ }
let usages_for_locals = |local| {
Definition::Local(local)
.usages(sema)
@@ -345,6 +359,7 @@ fn inline(
}
})
.collect();
+
if function.self_param(sema.db).is_some() {
let this = || make::name_ref("this").syntax().clone_for_update();
if let Some(self_local) = params[0].2.as_local(sema.db) {
@@ -1191,4 +1206,54 @@ fn bar() -> u32 {
"#,
)
}
+
+ #[test]
+ fn inline_call_with_self_type() {
+ check_assist(
+ inline_call,
+ r#"
+struct A(u32);
+impl A {
+ fn f() -> Self { Self(114514) }
+}
+fn main() {
+ A::f$0();
+}
+"#,
+ r#"
+struct A(u32);
+impl A {
+ fn f() -> Self { Self(114514) }
+}
+fn main() {
+ A(114514);
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn inline_call_with_self_type_but_within_same_impl() {
+ check_assist(
+ inline_call,
+ r#"
+struct A(u32);
+impl A {
+ fn f() -> Self { Self(1919810) }
+ fn main() {
+ Self::f$0();
+ }
+}
+"#,
+ r#"
+struct A(u32);
+impl A {
+ fn f() -> Self { Self(1919810) }
+ fn main() {
+ Self(1919810);
+ }
+}
+"#,
+ )
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs
index 054663a06..353d467ed 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs
@@ -1,9 +1,12 @@
// Some ideas for future improvements:
// - Support replacing aliases which are used in expressions, e.g. `A::new()`.
-// - "inline_alias_to_users" assist #10881.
// - Remove unused aliases if there are no longer any users, see inline_call.rs.
use hir::{HasSource, PathResolution};
+use ide_db::{
+ defs::Definition, imports::insert_use::ast_to_remove_for_path_in_use_stmt,
+ search::FileReference,
+};
use itertools::Itertools;
use std::collections::HashMap;
use syntax::{
@@ -16,6 +19,89 @@ use crate::{
AssistId, AssistKind,
};
+use super::inline_call::split_refs_and_uses;
+
+// Assist: inline_type_alias_uses
+//
+// Inline a type alias into all of its uses where possible.
+//
+// ```
+// type $0A = i32;
+// fn id(x: A) -> A {
+// x
+// };
+// fn foo() {
+// let _: A = 3;
+// }
+// ```
+// ->
+// ```
+//
+// fn id(x: i32) -> i32 {
+// x
+// };
+// fn foo() {
+// let _: i32 = 3;
+// }
+pub(crate) fn inline_type_alias_uses(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let name = ctx.find_node_at_offset::<ast::Name>()?;
+ let ast_alias = name.syntax().parent().and_then(ast::TypeAlias::cast)?;
+
+ let hir_alias = ctx.sema.to_def(&ast_alias)?;
+ let concrete_type = ast_alias.ty()?;
+
+ let usages = Definition::TypeAlias(hir_alias).usages(&ctx.sema);
+ if !usages.at_least_one() {
+ return None;
+ }
+
+ // until this is ok
+
+ acc.add(
+ AssistId("inline_type_alias_uses", AssistKind::RefactorInline),
+ "Inline type alias into all uses",
+ name.syntax().text_range(),
+ |builder| {
+ let usages = usages.all();
+ let mut definition_deleted = false;
+
+ let mut inline_refs_for_file = |file_id, refs: Vec<FileReference>| {
+ builder.edit_file(file_id);
+
+ let (path_types, path_type_uses) =
+ split_refs_and_uses(builder, refs, |path_type| {
+ path_type.syntax().ancestors().nth(3).and_then(ast::PathType::cast)
+ });
+
+ path_type_uses
+ .iter()
+ .flat_map(ast_to_remove_for_path_in_use_stmt)
+ .for_each(|x| builder.delete(x.syntax().text_range()));
+ for (target, replacement) in path_types.into_iter().filter_map(|path_type| {
+ let replacement = inline(&ast_alias, &path_type)?.to_text(&concrete_type);
+ let target = path_type.syntax().text_range();
+ Some((target, replacement))
+ }) {
+ builder.replace(target, replacement);
+ }
+
+ if file_id == ctx.file_id() {
+ builder.delete(ast_alias.syntax().text_range());
+ definition_deleted = true;
+ }
+ };
+
+ for (file_id, refs) in usages.into_iter() {
+ inline_refs_for_file(file_id, refs);
+ }
+ if !definition_deleted {
+ builder.edit_file(ctx.file_id());
+ builder.delete(ast_alias.syntax().text_range());
+ }
+ },
+ )
+}
+
// Assist: inline_type_alias
//
// Replace a type alias with its concrete type.
@@ -36,11 +122,6 @@ use crate::{
// }
// ```
pub(crate) fn inline_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
- enum Replacement {
- Generic { lifetime_map: LifetimeMap, const_and_type_map: ConstAndTypeMap },
- Plain,
- }
-
let alias_instance = ctx.find_node_at_offset::<ast::PathType>()?;
let concrete_type;
let replacement;
@@ -59,23 +140,7 @@ pub(crate) fn inline_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
_ => {
let alias = get_type_alias(&ctx, &alias_instance)?;
concrete_type = alias.ty()?;
-
- replacement = if let Some(alias_generics) = alias.generic_param_list() {
- if alias_generics.generic_params().next().is_none() {
- cov_mark::hit!(no_generics_params);
- return None;
- }
-
- let instance_args =
- alias_instance.syntax().descendants().find_map(ast::GenericArgList::cast);
-
- Replacement::Generic {
- lifetime_map: LifetimeMap::new(&instance_args, &alias_generics)?,
- const_and_type_map: ConstAndTypeMap::new(&instance_args, &alias_generics)?,
- }
- } else {
- Replacement::Plain
- };
+ replacement = inline(&alias, &alias_instance)?;
}
}
@@ -85,19 +150,45 @@ pub(crate) fn inline_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
AssistId("inline_type_alias", AssistKind::RefactorInline),
"Inline type alias",
target,
- |builder| {
- let replacement_text = match replacement {
- Replacement::Generic { lifetime_map, const_and_type_map } => {
- create_replacement(&lifetime_map, &const_and_type_map, &concrete_type)
- }
- Replacement::Plain => concrete_type.to_string(),
- };
-
- builder.replace(target, replacement_text);
- },
+ |builder| builder.replace(target, replacement.to_text(&concrete_type)),
)
}
+impl Replacement {
+ fn to_text(&self, concrete_type: &ast::Type) -> String {
+ match self {
+ Replacement::Generic { lifetime_map, const_and_type_map } => {
+ create_replacement(&lifetime_map, &const_and_type_map, &concrete_type)
+ }
+ Replacement::Plain => concrete_type.to_string(),
+ }
+ }
+}
+
+enum Replacement {
+ Generic { lifetime_map: LifetimeMap, const_and_type_map: ConstAndTypeMap },
+ Plain,
+}
+
+fn inline(alias_def: &ast::TypeAlias, alias_instance: &ast::PathType) -> Option<Replacement> {
+ let repl = if let Some(alias_generics) = alias_def.generic_param_list() {
+ if alias_generics.generic_params().next().is_none() {
+ cov_mark::hit!(no_generics_params);
+ return None;
+ }
+ let instance_args =
+ alias_instance.syntax().descendants().find_map(ast::GenericArgList::cast);
+
+ Replacement::Generic {
+ lifetime_map: LifetimeMap::new(&instance_args, &alias_generics)?,
+ const_and_type_map: ConstAndTypeMap::new(&instance_args, &alias_generics)?,
+ }
+ } else {
+ Replacement::Plain
+ };
+ Some(repl)
+}
+
struct LifetimeMap(HashMap<String, ast::Lifetime>);
impl LifetimeMap {
@@ -835,4 +926,95 @@ trait Tr {
"#,
);
}
+
+ mod inline_type_alias_uses {
+ use crate::{handlers::inline_type_alias::inline_type_alias_uses, tests::check_assist};
+
+ #[test]
+ fn inline_uses() {
+ check_assist(
+ inline_type_alias_uses,
+ r#"
+type $0A = u32;
+
+fn foo() {
+ let _: A = 3;
+ let _: A = 4;
+}
+"#,
+ r#"
+
+
+fn foo() {
+ let _: u32 = 3;
+ let _: u32 = 4;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn inline_uses_across_files() {
+ check_assist(
+ inline_type_alias_uses,
+ r#"
+//- /lib.rs
+mod foo;
+type $0T<E> = Vec<E>;
+fn f() -> T<&str> {
+ vec!["hello"]
+}
+
+//- /foo.rs
+use super::T;
+fn foo() {
+ let _: T<i8> = Vec::new();
+}
+"#,
+ r#"
+//- /lib.rs
+mod foo;
+
+fn f() -> Vec<&str> {
+ vec!["hello"]
+}
+
+//- /foo.rs
+
+fn foo() {
+ let _: Vec<i8> = Vec::new();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn inline_uses_across_files_2() {
+ check_assist(
+ inline_type_alias_uses,
+ r#"
+//- /lib.rs
+mod foo;
+type $0I = i32;
+
+//- /foo.rs
+use super::I;
+fn foo() {
+ let _: I = 0;
+}
+"#,
+ r#"
+//- /lib.rs
+mod foo;
+
+
+//- /foo.rs
+
+fn foo() {
+ let _: i32 = 0;
+}
+"#,
+ );
+ }
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_lifetime.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_lifetime.rs
index ce91dd237..2fc754e3e 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_lifetime.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_lifetime.rs
@@ -5,7 +5,7 @@ use syntax::{
AstNode, TextRange,
};
-use crate::{assist_context::AssistBuilder, AssistContext, AssistId, AssistKind, Assists};
+use crate::{assist_context::SourceChangeBuilder, AssistContext, AssistId, AssistKind, Assists};
static ASSIST_NAME: &str = "introduce_named_lifetime";
static ASSIST_LABEL: &str = "Introduce named lifetime";
@@ -140,7 +140,7 @@ enum NeedsLifetime {
}
impl NeedsLifetime {
- fn make_mut(self, builder: &mut AssistBuilder) -> Self {
+ fn make_mut(self, builder: &mut SourceChangeBuilder) -> Self {
match self {
Self::SelfParam(it) => Self::SelfParam(builder.make_mut(it)),
Self::RefType(it) => Self::RefType(builder.make_mut(it)),
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs
index 7e102ceba..2bdbec93b 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs
@@ -1,6 +1,10 @@
use either::Either;
use ide_db::imports::merge_imports::{try_merge_imports, try_merge_trees, MergeBehavior};
-use syntax::{algo::neighbor, ast, match_ast, ted, AstNode, SyntaxElement, SyntaxNode};
+use syntax::{
+ algo::neighbor,
+ ast::{self, edit_in_place::Removable},
+ match_ast, ted, AstNode, SyntaxElement, SyntaxNode,
+};
use crate::{
assist_context::{AssistContext, Assists},
@@ -76,7 +80,7 @@ pub(crate) fn merge_imports(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio
.collect();
for edit in edits_mut {
match edit {
- Remove(it) => it.as_ref().either(ast::Use::remove, ast::UseTree::remove),
+ Remove(it) => it.as_ref().either(Removable::remove, Removable::remove),
Replace(old, new) => ted::replace(old, new),
}
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_bounds.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_bounds.rs
index 176a3bf58..1dd376ac3 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_bounds.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_bounds.rs
@@ -1,5 +1,9 @@
use syntax::{
- ast::{self, edit_in_place::GenericParamsOwnerEdit, make, AstNode, HasName, HasTypeBounds},
+ ast::{
+ self,
+ edit_in_place::{GenericParamsOwnerEdit, Removable},
+ make, AstNode, HasName, HasTypeBounds,
+ },
match_ast,
};
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_format_string_arg.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_format_string_arg.rs
new file mode 100644
index 000000000..aa710d2ce
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_format_string_arg.rs
@@ -0,0 +1,296 @@
+use crate::{AssistContext, Assists};
+use ide_db::{
+ assists::{AssistId, AssistKind},
+ syntax_helpers::{
+ format_string::is_format_string,
+ format_string_exprs::{parse_format_exprs, Arg},
+ },
+};
+use itertools::Itertools;
+use stdx::format_to;
+use syntax::{ast, AstNode, AstToken, NodeOrToken, SyntaxKind::COMMA, TextRange};
+
+// Assist: move_format_string_arg
+//
+// Move an expression out of a format string.
+//
+// ```
+// macro_rules! format_args {
+// ($lit:literal $(tt:tt)*) => { 0 },
+// }
+// macro_rules! print {
+// ($($arg:tt)*) => (std::io::_print(format_args!($($arg)*)));
+// }
+//
+// fn main() {
+// print!("{x + 1}$0");
+// }
+// ```
+// ->
+// ```
+// macro_rules! format_args {
+// ($lit:literal $(tt:tt)*) => { 0 },
+// }
+// macro_rules! print {
+// ($($arg:tt)*) => (std::io::_print(format_args!($($arg)*)));
+// }
+//
+// fn main() {
+// print!("{}"$0, x + 1);
+// }
+// ```
+
+pub(crate) fn move_format_string_arg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let fmt_string = ctx.find_token_at_offset::<ast::String>()?;
+ let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?;
+
+ let expanded_t = ast::String::cast(
+ ctx.sema.descend_into_macros_with_kind_preference(fmt_string.syntax().clone()),
+ )?;
+ if !is_format_string(&expanded_t) {
+ return None;
+ }
+
+ let (new_fmt, extracted_args) = parse_format_exprs(fmt_string.text()).ok()?;
+ if extracted_args.is_empty() {
+ return None;
+ }
+
+ acc.add(
+ AssistId(
+ "move_format_string_arg",
+ // if there aren't any expressions, then make the assist a RefactorExtract
+ if extracted_args.iter().filter(|f| matches!(f, Arg::Expr(_))).count() == 0 {
+ AssistKind::RefactorExtract
+ } else {
+ AssistKind::QuickFix
+ },
+ ),
+ "Extract format args",
+ tt.syntax().text_range(),
+ |edit| {
+ let fmt_range = fmt_string.syntax().text_range();
+
+ // Replace old format string with new format string whose arguments have been extracted
+ edit.replace(fmt_range, new_fmt);
+
+ // Insert cursor at end of format string
+ edit.insert(fmt_range.end(), "$0");
+
+ // Extract existing arguments in macro
+ let tokens =
+ tt.token_trees_and_tokens().collect_vec();
+
+ let mut existing_args: Vec<String> = vec![];
+
+ let mut current_arg = String::new();
+ if let [_opening_bracket, NodeOrToken::Token(format_string), _args_start_comma, tokens @ .., NodeOrToken::Token(end_bracket)] =
+ tokens.as_slice()
+ {
+ for t in tokens {
+ match t {
+ NodeOrToken::Node(n) => {
+ format_to!(current_arg, "{n}");
+ },
+ NodeOrToken::Token(t) if t.kind() == COMMA=> {
+ existing_args.push(current_arg.trim().into());
+ current_arg.clear();
+ },
+ NodeOrToken::Token(t) => {
+ current_arg.push_str(t.text());
+ },
+ }
+ }
+ existing_args.push(current_arg.trim().into());
+
+ // delete everything after the format string till end bracket
+ // we're going to insert the new arguments later
+ edit.delete(TextRange::new(
+ format_string.text_range().end(),
+ end_bracket.text_range().start(),
+ ));
+ }
+
+ // Start building the new args
+ let mut existing_args = existing_args.into_iter();
+ let mut args = String::new();
+
+ let mut placeholder_idx = 1;
+
+ for extracted_args in extracted_args {
+ // remove expr from format string
+ args.push_str(", ");
+
+ match extracted_args {
+ Arg::Ident(s) | Arg::Expr(s) => {
+ // insert arg
+ args.push_str(&s);
+ }
+ Arg::Placeholder => {
+ // try matching with existing argument
+ match existing_args.next() {
+ Some(ea) => {
+ args.push_str(&ea);
+ }
+ None => {
+ // insert placeholder
+ args.push_str(&format!("${placeholder_idx}"));
+ placeholder_idx += 1;
+ }
+ }
+ }
+ }
+ }
+
+ // Insert new args
+ edit.insert(fmt_range.end(), args);
+ },
+ );
+
+ Some(())
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::tests::check_assist;
+
+ const MACRO_DECL: &'static str = r#"
+macro_rules! format_args {
+ ($lit:literal $(tt:tt)*) => { 0 },
+}
+macro_rules! print {
+ ($($arg:tt)*) => (std::io::_print(format_args!($($arg)*)));
+}
+"#;
+
+ fn add_macro_decl(s: &'static str) -> String {
+ MACRO_DECL.to_string() + s
+ }
+
+ #[test]
+ fn multiple_middle_arg() {
+ check_assist(
+ move_format_string_arg,
+ &add_macro_decl(
+ r#"
+fn main() {
+ print!("{} {x + 1:b} {}$0", y + 2, 2);
+}
+"#,
+ ),
+ &add_macro_decl(
+ r#"
+fn main() {
+ print!("{} {:b} {}"$0, y + 2, x + 1, 2);
+}
+"#,
+ ),
+ );
+ }
+
+ #[test]
+ fn single_arg() {
+ check_assist(
+ move_format_string_arg,
+ &add_macro_decl(
+ r#"
+fn main() {
+ print!("{obj.value:b}$0",);
+}
+"#,
+ ),
+ &add_macro_decl(
+ r#"
+fn main() {
+ print!("{:b}"$0, obj.value);
+}
+"#,
+ ),
+ );
+ }
+
+ #[test]
+ fn multiple_middle_placeholders_arg() {
+ check_assist(
+ move_format_string_arg,
+ &add_macro_decl(
+ r#"
+fn main() {
+ print!("{} {x + 1:b} {} {}$0", y + 2, 2);
+}
+"#,
+ ),
+ &add_macro_decl(
+ r#"
+fn main() {
+ print!("{} {:b} {} {}"$0, y + 2, x + 1, 2, $1);
+}
+"#,
+ ),
+ );
+ }
+
+ #[test]
+ fn multiple_trailing_args() {
+ check_assist(
+ move_format_string_arg,
+ &add_macro_decl(
+ r#"
+fn main() {
+ print!("{} {x + 1:b} {Struct(1, 2)}$0", 1);
+}
+"#,
+ ),
+ &add_macro_decl(
+ r#"
+fn main() {
+ print!("{} {:b} {}"$0, 1, x + 1, Struct(1, 2));
+}
+"#,
+ ),
+ );
+ }
+
+ #[test]
+ fn improper_commas() {
+ check_assist(
+ move_format_string_arg,
+ &add_macro_decl(
+ r#"
+fn main() {
+ print!("{} {x + 1:b} {Struct(1, 2)}$0", 1,);
+}
+"#,
+ ),
+ &add_macro_decl(
+ r#"
+fn main() {
+ print!("{} {:b} {}"$0, 1, x + 1, Struct(1, 2));
+}
+"#,
+ ),
+ );
+ }
+
+ #[test]
+ fn nested_tt() {
+ check_assist(
+ move_format_string_arg,
+ &add_macro_decl(
+ r#"
+fn main() {
+ print!("My name is {} {x$0 + x}", stringify!(Paperino))
+}
+"#,
+ ),
+ &add_macro_decl(
+ r#"
+fn main() {
+ print!("My name is {} {}"$0, stringify!(Paperino), x + x)
+}
+"#,
+ ),
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs
index 121f8b4a1..e57d1d065 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs
@@ -44,8 +44,11 @@ pub(crate) fn qualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let current_module = ctx.sema.scope(call.syntax())?.module();
let target_module_def = ModuleDef::from(resolved_call);
let item_in_ns = ItemInNs::from(target_module_def);
- let receiver_path = current_module
- .find_use_path(ctx.sema.db, item_for_path_search(ctx.sema.db, item_in_ns)?)?;
+ let receiver_path = current_module.find_use_path(
+ ctx.sema.db,
+ item_for_path_search(ctx.sema.db, item_in_ns)?,
+ ctx.config.prefer_no_std,
+ )?;
let qualify_candidate = QualifyCandidate::ImplMethod(ctx.sema.db, call, resolved_call);
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs
index 0c2e9da38..4b2af550b 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs
@@ -37,7 +37,8 @@ use crate::{
// ```
pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let (import_assets, syntax_under_caret) = find_importable_node(ctx)?;
- let mut proposed_imports = import_assets.search_for_relative_paths(&ctx.sema);
+ let mut proposed_imports =
+ import_assets.search_for_relative_paths(&ctx.sema, ctx.config.prefer_no_std);
if proposed_imports.is_empty() {
return None;
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs
index 59ea94ea1..bd2e8fbe3 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs
@@ -8,7 +8,8 @@ use syntax::{
use SyntaxKind::WHITESPACE;
use crate::{
- assist_context::AssistBuilder, utils::next_prev, AssistContext, AssistId, AssistKind, Assists,
+ assist_context::SourceChangeBuilder, utils::next_prev, AssistContext, AssistId, AssistKind,
+ Assists,
};
// Assist: remove_unused_param
@@ -88,7 +89,7 @@ pub(crate) fn remove_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) ->
fn process_usages(
ctx: &AssistContext<'_>,
- builder: &mut AssistBuilder,
+ builder: &mut SourceChangeBuilder,
file_id: FileId,
references: Vec<FileReference>,
arg_to_remove: usize,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
index bd50208da..9fd5e1886 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
@@ -10,7 +10,7 @@ use syntax::{
};
use crate::{
- assist_context::{AssistBuilder, AssistContext, Assists},
+ assist_context::{AssistContext, Assists, SourceChangeBuilder},
utils::{
add_trait_assoc_items_to_impl, filter_assoc_items, gen_trait_fn_body,
generate_trait_impl_text, render_snippet, Cursor, DefaultMethods,
@@ -85,7 +85,7 @@ pub(crate) fn replace_derive_with_manual_impl(
})
.flat_map(|trait_| {
current_module
- .find_use_path(ctx.sema.db, hir::ModuleDef::Trait(trait_))
+ .find_use_path(ctx.sema.db, hir::ModuleDef::Trait(trait_), ctx.config.prefer_no_std)
.as_ref()
.map(mod_path_to_ast)
.zip(Some(trait_))
@@ -224,7 +224,7 @@ fn impl_def_from_trait(
}
fn update_attribute(
- builder: &mut AssistBuilder,
+ builder: &mut SourceChangeBuilder,
old_derives: &[ast::Path],
old_tree: &ast::TokenTree,
old_trait_path: &ast::Path,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_or_with_or_else.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_or_with_or_else.rs
new file mode 100644
index 000000000..7d91be621
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_or_with_or_else.rs
@@ -0,0 +1,364 @@
+use ide_db::{
+ assists::{AssistId, AssistKind},
+ famous_defs::FamousDefs,
+};
+use syntax::{
+ ast::{self, make, Expr, HasArgList},
+ AstNode,
+};
+
+use crate::{AssistContext, Assists};
+
+// Assist: replace_or_with_or_else
+//
+// Replace `unwrap_or` with `unwrap_or_else` and `ok_or` with `ok_or_else`.
+//
+// ```
+// # //- minicore:option
+// fn foo() {
+// let a = Some(1);
+// a.unwra$0p_or(2);
+// }
+// ```
+// ->
+// ```
+// fn foo() {
+// let a = Some(1);
+// a.unwrap_or_else(|| 2);
+// }
+// ```
+pub(crate) fn replace_or_with_or_else(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let call: ast::MethodCallExpr = ctx.find_node_at_offset()?;
+
+ let kind = is_option_or_result(call.receiver()?, ctx)?;
+
+ let (name, arg_list) = (call.name_ref()?, call.arg_list()?);
+
+ let mut map_or = false;
+
+ let replace = match &*name.text() {
+ "unwrap_or" => "unwrap_or_else".to_string(),
+ "or" => "or_else".to_string(),
+ "ok_or" if kind == Kind::Option => "ok_or_else".to_string(),
+ "map_or" => {
+ map_or = true;
+ "map_or_else".to_string()
+ }
+ _ => return None,
+ };
+
+ let arg = match arg_list.args().collect::<Vec<_>>().as_slice() {
+ [] => make::arg_list(Vec::new()),
+ [first] => {
+ let param = into_closure(first);
+ make::arg_list(vec![param])
+ }
+ [first, second] if map_or => {
+ let param = into_closure(first);
+ make::arg_list(vec![param, second.clone()])
+ }
+ _ => return None,
+ };
+
+ acc.add(
+ AssistId("replace_or_with_or_else", AssistKind::RefactorRewrite),
+ format!("Replace {} with {}", name.text(), replace),
+ call.syntax().text_range(),
+ |builder| {
+ builder.replace(name.syntax().text_range(), replace);
+ builder.replace_ast(arg_list, arg)
+ },
+ )
+}
+
+fn into_closure(param: &Expr) -> Expr {
+ (|| {
+ if let ast::Expr::CallExpr(call) = param {
+ if call.arg_list()?.args().count() == 0 {
+ Some(call.expr()?.clone())
+ } else {
+ None
+ }
+ } else {
+ None
+ }
+ })()
+ .unwrap_or_else(|| make::expr_closure(None, param.clone()))
+}
+
+// Assist: replace_or_else_with_or
+//
+// Replace `unwrap_or_else` with `unwrap_or` and `ok_or_else` with `ok_or`.
+//
+// ```
+// # //- minicore:option
+// fn foo() {
+// let a = Some(1);
+// a.unwra$0p_or_else(|| 2);
+// }
+// ```
+// ->
+// ```
+// fn foo() {
+// let a = Some(1);
+// a.unwrap_or(2);
+// }
+// ```
+pub(crate) fn replace_or_else_with_or(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let call: ast::MethodCallExpr = ctx.find_node_at_offset()?;
+
+ let kind = is_option_or_result(call.receiver()?, ctx)?;
+
+ let (name, arg_list) = (call.name_ref()?, call.arg_list()?);
+
+ let mut map_or = false;
+ let replace = match &*name.text() {
+ "unwrap_or_else" => "unwrap_or".to_string(),
+ "or_else" => "or".to_string(),
+ "ok_or_else" if kind == Kind::Option => "ok_or".to_string(),
+ "map_or_else" => {
+ map_or = true;
+ "map_or".to_string()
+ }
+ _ => return None,
+ };
+
+ let arg = match arg_list.args().collect::<Vec<_>>().as_slice() {
+ [] => make::arg_list(Vec::new()),
+ [first] => {
+ let param = into_call(first);
+ make::arg_list(vec![param])
+ }
+ [first, second] if map_or => {
+ let param = into_call(first);
+ make::arg_list(vec![param, second.clone()])
+ }
+ _ => return None,
+ };
+
+ acc.add(
+ AssistId("replace_or_else_with_or", AssistKind::RefactorRewrite),
+ format!("Replace {} with {}", name.text(), replace),
+ call.syntax().text_range(),
+ |builder| {
+ builder.replace(name.syntax().text_range(), replace);
+ builder.replace_ast(arg_list, arg)
+ },
+ )
+}
+
+fn into_call(param: &Expr) -> Expr {
+ (|| {
+ if let ast::Expr::ClosureExpr(closure) = param {
+ if closure.param_list()?.params().count() == 0 {
+ Some(closure.body()?.clone())
+ } else {
+ None
+ }
+ } else {
+ None
+ }
+ })()
+ .unwrap_or_else(|| make::expr_call(param.clone(), make::arg_list(Vec::new())))
+}
+
+#[derive(PartialEq, Eq)]
+enum Kind {
+ Option,
+ Result,
+}
+
+fn is_option_or_result(receiver: Expr, ctx: &AssistContext<'_>) -> Option<Kind> {
+ let ty = ctx.sema.type_of_expr(&receiver)?.adjusted().as_adt()?.as_enum()?;
+ let option_enum =
+ FamousDefs(&ctx.sema, ctx.sema.scope(receiver.syntax())?.krate()).core_option_Option();
+
+ if let Some(option_enum) = option_enum {
+ if ty == option_enum {
+ return Some(Kind::Option);
+ }
+ }
+
+ let result_enum =
+ FamousDefs(&ctx.sema, ctx.sema.scope(receiver.syntax())?.krate()).core_result_Result();
+
+ if let Some(result_enum) = result_enum {
+ if ty == result_enum {
+ return Some(Kind::Result);
+ }
+ }
+
+ None
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn replace_or_with_or_else_simple() {
+ check_assist(
+ replace_or_with_or_else,
+ r#"
+//- minicore: option
+fn foo() {
+ let foo = Some(1);
+ return foo.unwrap_$0or(2);
+}
+"#,
+ r#"
+fn foo() {
+ let foo = Some(1);
+ return foo.unwrap_or_else(|| 2);
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_or_with_or_else_call() {
+ check_assist(
+ replace_or_with_or_else,
+ r#"
+//- minicore: option
+fn foo() {
+ let foo = Some(1);
+ return foo.unwrap_$0or(x());
+}
+"#,
+ r#"
+fn foo() {
+ let foo = Some(1);
+ return foo.unwrap_or_else(x);
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_or_with_or_else_block() {
+ check_assist(
+ replace_or_with_or_else,
+ r#"
+//- minicore: option
+fn foo() {
+ let foo = Some(1);
+ return foo.unwrap_$0or({
+ let mut x = bar();
+ for i in 0..10 {
+ x += i;
+ }
+ x
+ });
+}
+"#,
+ r#"
+fn foo() {
+ let foo = Some(1);
+ return foo.unwrap_or_else(|| {
+ let mut x = bar();
+ for i in 0..10 {
+ x += i;
+ }
+ x
+ });
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_or_else_with_or_simple() {
+ check_assist(
+ replace_or_else_with_or,
+ r#"
+//- minicore: option
+fn foo() {
+ let foo = Some(1);
+ return foo.unwrap_$0or_else(|| 2);
+}
+"#,
+ r#"
+fn foo() {
+ let foo = Some(1);
+ return foo.unwrap_or(2);
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_or_else_with_or_call() {
+ check_assist(
+ replace_or_else_with_or,
+ r#"
+//- minicore: option
+fn foo() {
+ let foo = Some(1);
+ return foo.unwrap_$0or_else(x);
+}
+"#,
+ r#"
+fn foo() {
+ let foo = Some(1);
+ return foo.unwrap_or(x());
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_or_else_with_or_result() {
+ check_assist(
+ replace_or_else_with_or,
+ r#"
+//- minicore: result
+fn foo() {
+ let foo = Ok(1);
+ return foo.unwrap_$0or_else(x);
+}
+"#,
+ r#"
+fn foo() {
+ let foo = Ok(1);
+ return foo.unwrap_or(x());
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_or_else_with_or_map() {
+ check_assist(
+ replace_or_else_with_or,
+ r#"
+//- minicore: result
+fn foo() {
+ let foo = Ok("foo");
+ return foo.map$0_or_else(|| 42, |v| v.len());
+}
+"#,
+ r#"
+fn foo() {
+ let foo = Ok("foo");
+ return foo.map_or(42, |v| v.len());
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_or_else_with_or_not_applicable() {
+ check_assist_not_applicable(
+ replace_or_else_with_or,
+ r#"
+fn foo() {
+ let foo = Ok(1);
+ return foo.unwrap_$0or_else(x);
+}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs
index 2419fa11c..dbbc56958 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs
@@ -67,6 +67,7 @@ pub(crate) fn replace_qualified_name_with_use(
ctx.sema.db,
module,
ctx.config.insert_use.prefix_kind,
+ ctx.config.prefer_no_std,
)
})
.flatten();
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs
index 6112e0945..521447c26 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs
@@ -1,5 +1,6 @@
+use hir::HirDisplay;
use syntax::{
- ast::{Expr, GenericArg},
+ ast::{Expr, GenericArg, GenericArgList},
ast::{LetStmt, Type::InferType},
AstNode, TextRange,
};
@@ -34,21 +35,7 @@ pub(crate) fn replace_turbofish_with_explicit_type(
let initializer = let_stmt.initializer()?;
- let generic_args = match &initializer {
- Expr::MethodCallExpr(ce) => ce.generic_arg_list()?,
- Expr::CallExpr(ce) => {
- if let Expr::PathExpr(pe) = ce.expr()? {
- pe.path()?.segment()?.generic_arg_list()?
- } else {
- cov_mark::hit!(not_applicable_if_non_path_function_call);
- return None;
- }
- }
- _ => {
- cov_mark::hit!(not_applicable_if_non_function_call_initializer);
- return None;
- }
- };
+ let generic_args = generic_arg_list(&initializer)?;
// Find range of ::<_>
let colon2 = generic_args.coloncolon_token()?;
@@ -65,7 +52,16 @@ pub(crate) fn replace_turbofish_with_explicit_type(
// An improvement would be to check that this is correctly part of the return value of the
// function call, or sub in the actual return type.
- let turbofish_type = &turbofish_args[0];
+ let returned_type = match ctx.sema.type_of_expr(&initializer) {
+ Some(returned_type) if !returned_type.original.contains_unknown() => {
+ let module = ctx.sema.scope(let_stmt.syntax())?.module();
+ returned_type.original.display_source_code(ctx.db(), module.into()).ok()?
+ }
+ _ => {
+ cov_mark::hit!(fallback_to_turbofish_type_if_type_info_not_available);
+ turbofish_args[0].to_string()
+ }
+ };
let initializer_start = initializer.syntax().text_range().start();
if ctx.offset() > turbofish_range.end() || ctx.offset() < initializer_start {
@@ -83,12 +79,12 @@ pub(crate) fn replace_turbofish_with_explicit_type(
"Replace turbofish with explicit type",
TextRange::new(initializer_start, turbofish_range.end()),
|builder| {
- builder.insert(ident_range.end(), format!(": {}", turbofish_type));
+ builder.insert(ident_range.end(), format!(": {}", returned_type));
builder.delete(turbofish_range);
},
);
} else if let Some(InferType(t)) = let_stmt.ty() {
- // If there's a type inferrence underscore, we can offer to replace it with the type in
+ // If there's a type inference underscore, we can offer to replace it with the type in
// the turbofish.
// let x: _ = fn::<...>();
let underscore_range = t.syntax().text_range();
@@ -98,7 +94,7 @@ pub(crate) fn replace_turbofish_with_explicit_type(
"Replace `_` with turbofish type",
turbofish_range,
|builder| {
- builder.replace(underscore_range, turbofish_type.to_string());
+ builder.replace(underscore_range, returned_type);
builder.delete(turbofish_range);
},
);
@@ -107,6 +103,26 @@ pub(crate) fn replace_turbofish_with_explicit_type(
None
}
+fn generic_arg_list(expr: &Expr) -> Option<GenericArgList> {
+ match expr {
+ Expr::MethodCallExpr(expr) => expr.generic_arg_list(),
+ Expr::CallExpr(expr) => {
+ if let Expr::PathExpr(pe) = expr.expr()? {
+ pe.path()?.segment()?.generic_arg_list()
+ } else {
+ cov_mark::hit!(not_applicable_if_non_path_function_call);
+ return None;
+ }
+ }
+ Expr::AwaitExpr(expr) => generic_arg_list(&expr.expr()?),
+ Expr::TryExpr(expr) => generic_arg_list(&expr.expr()?),
+ _ => {
+ cov_mark::hit!(not_applicable_if_non_function_call_initializer);
+ None
+ }
+ }
+}
+
#[cfg(test)]
mod tests {
use super::*;
@@ -115,6 +131,7 @@ mod tests {
#[test]
fn replaces_turbofish_for_vec_string() {
+ cov_mark::check!(fallback_to_turbofish_type_if_type_info_not_available);
check_assist(
replace_turbofish_with_explicit_type,
r#"
@@ -135,6 +152,7 @@ fn main() {
#[test]
fn replaces_method_calls() {
// foo.make() is a method call which uses a different expr in the let initializer
+ cov_mark::check!(fallback_to_turbofish_type_if_type_info_not_available);
check_assist(
replace_turbofish_with_explicit_type,
r#"
@@ -240,4 +258,108 @@ fn main() {
"#,
);
}
+
+ #[test]
+ fn replaces_turbofish_for_known_type() {
+ check_assist(
+ replace_turbofish_with_explicit_type,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let a = make$0::<i32>();
+}
+"#,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let a: i32 = make();
+}
+"#,
+ );
+ check_assist(
+ replace_turbofish_with_explicit_type,
+ r#"
+//- minicore: option
+fn make<T>() -> T {}
+fn main() {
+ let a = make$0::<Option<bool>>();
+}
+"#,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let a: Option<bool> = make();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn replaces_turbofish_not_same_type() {
+ check_assist(
+ replace_turbofish_with_explicit_type,
+ r#"
+//- minicore: option
+fn make<T>() -> Option<T> {}
+fn main() {
+ let a = make$0::<u128>();
+}
+"#,
+ r#"
+fn make<T>() -> Option<T> {}
+fn main() {
+ let a: Option<u128> = make();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn replaces_turbofish_for_type_with_defaulted_generic_param() {
+ check_assist(
+ replace_turbofish_with_explicit_type,
+ r#"
+struct HasDefault<T, U = i32>(T, U);
+fn make<T>() -> HasDefault<T> {}
+fn main() {
+ let a = make$0::<bool>();
+}
+"#,
+ r#"
+struct HasDefault<T, U = i32>(T, U);
+fn make<T>() -> HasDefault<T> {}
+fn main() {
+ let a: HasDefault<bool> = make();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn replaces_turbofish_try_await() {
+ check_assist(
+ replace_turbofish_with_explicit_type,
+ r#"
+//- minicore: option, future
+struct Fut<T>(T);
+impl<T> core::future::Future for Fut<T> {
+ type Output = Option<T>;
+}
+fn make<T>() -> Fut<T> {}
+fn main() {
+ let a = make$0::<bool>().await?;
+}
+"#,
+ r#"
+struct Fut<T>(T);
+impl<T> core::future::Future for Fut<T> {
+ type Output = Option<T>;
+}
+fn make<T>() -> Fut<T> {}
+fn main() {
+ let a: bool = make().await?;
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_match_arm.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_match_arm.rs
new file mode 100644
index 000000000..9565f0ee6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_match_arm.rs
@@ -0,0 +1,293 @@
+use syntax::{
+ algo::neighbor,
+ ast::{self, edit::IndentLevel, make, AstNode},
+ ted::{self, Position},
+ Direction, SyntaxKind, T,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: unmerge_match_arm
+//
+// Splits the current match with a `|` pattern into two arms with identical bodies.
+//
+// ```
+// enum Action { Move { distance: u32 }, Stop }
+//
+// fn handle(action: Action) {
+// match action {
+// Action::Move(..) $0| Action::Stop => foo(),
+// }
+// }
+// ```
+// ->
+// ```
+// enum Action { Move { distance: u32 }, Stop }
+//
+// fn handle(action: Action) {
+// match action {
+// Action::Move(..) => foo(),
+// Action::Stop => foo(),
+// }
+// }
+// ```
+pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let pipe_token = ctx.find_token_syntax_at_offset(T![|])?;
+ let or_pat = ast::OrPat::cast(pipe_token.parent()?)?.clone_for_update();
+ let match_arm = ast::MatchArm::cast(or_pat.syntax().parent()?)?;
+ let match_arm_body = match_arm.expr()?;
+
+ // We don't need to check for leading pipe because it is directly under `MatchArm`
+ // without `OrPat`.
+
+ let new_parent = match_arm.syntax().parent()?;
+ let old_parent_range = new_parent.text_range();
+
+ acc.add(
+ AssistId("unmerge_match_arm", AssistKind::RefactorRewrite),
+ "Unmerge match arm",
+ pipe_token.text_range(),
+ |edit| {
+ let pats_after = pipe_token
+ .siblings_with_tokens(Direction::Next)
+ .filter_map(|it| ast::Pat::cast(it.into_node()?));
+ // FIXME: We should add a leading pipe if the original arm has one.
+ let new_match_arm = make::match_arm(
+ pats_after,
+ match_arm.guard().and_then(|guard| guard.condition()),
+ match_arm_body,
+ )
+ .clone_for_update();
+
+ let mut pipe_index = pipe_token.index();
+ if pipe_token
+ .prev_sibling_or_token()
+ .map_or(false, |it| it.kind() == SyntaxKind::WHITESPACE)
+ {
+ pipe_index -= 1;
+ }
+ or_pat.syntax().splice_children(
+ pipe_index..or_pat.syntax().children_with_tokens().count(),
+ Vec::new(),
+ );
+
+ let mut insert_after_old_arm = Vec::new();
+
+ // A comma can be:
+ // - After the arm. In this case we always want to insert a comma after the newly
+ // inserted arm.
+ // - Missing after the arm, with no arms after. In this case we want to insert a
+ // comma before the newly inserted arm. It can not be necessary if there arm
+ // body is a block, but we don't bother to check that.
+ // - Missing after the arm with arms after, if the arm body is a block. In this case
+ // we don't want to insert a comma at all.
+ let has_comma_after =
+ std::iter::successors(match_arm.syntax().last_child_or_token(), |it| {
+ it.prev_sibling_or_token()
+ })
+ .map(|it| it.kind())
+ .skip_while(|it| it.is_trivia())
+ .next()
+ == Some(T![,]);
+ let has_arms_after = neighbor(&match_arm, Direction::Next).is_some();
+ if !has_comma_after && !has_arms_after {
+ insert_after_old_arm.push(make::token(T![,]).into());
+ }
+
+ let indent = IndentLevel::from_node(match_arm.syntax());
+ insert_after_old_arm.push(make::tokens::whitespace(&format!("\n{indent}")).into());
+
+ insert_after_old_arm.push(new_match_arm.syntax().clone().into());
+
+ ted::insert_all_raw(Position::after(match_arm.syntax()), insert_after_old_arm);
+
+ if has_comma_after {
+ ted::insert_raw(
+ Position::last_child_of(new_match_arm.syntax()),
+ make::token(T![,]),
+ );
+ }
+
+ edit.replace(old_parent_range, new_parent.to_string());
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn unmerge_match_arm_single_pipe() {
+ check_assist(
+ unmerge_match_arm,
+ r#"
+#[derive(Debug)]
+enum X { A, B, C }
+
+fn main() {
+ let x = X::A;
+ let y = match x {
+ X::A $0| X::B => { 1i32 }
+ X::C => { 2i32 }
+ };
+}
+"#,
+ r#"
+#[derive(Debug)]
+enum X { A, B, C }
+
+fn main() {
+ let x = X::A;
+ let y = match x {
+ X::A => { 1i32 }
+ X::B => { 1i32 }
+ X::C => { 2i32 }
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unmerge_match_arm_guard() {
+ check_assist(
+ unmerge_match_arm,
+ r#"
+#[derive(Debug)]
+enum X { A, B, C }
+
+fn main() {
+ let x = X::A;
+ let y = match x {
+ X::A $0| X::B if true => { 1i32 }
+ _ => { 2i32 }
+ };
+}
+"#,
+ r#"
+#[derive(Debug)]
+enum X { A, B, C }
+
+fn main() {
+ let x = X::A;
+ let y = match x {
+ X::A if true => { 1i32 }
+ X::B if true => { 1i32 }
+ _ => { 2i32 }
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unmerge_match_arm_leading_pipe() {
+ check_assist_not_applicable(
+ unmerge_match_arm,
+ r#"
+
+fn main() {
+ let y = match 0 {
+ |$0 0 => { 1i32 }
+ 1 => { 2i32 }
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unmerge_match_arm_multiple_pipes() {
+ check_assist(
+ unmerge_match_arm,
+ r#"
+#[derive(Debug)]
+enum X { A, B, C, D, E }
+
+fn main() {
+ let x = X::A;
+ let y = match x {
+ X::A | X::B |$0 X::C | X::D => 1i32,
+ X::E => 2i32,
+ };
+}
+"#,
+ r#"
+#[derive(Debug)]
+enum X { A, B, C, D, E }
+
+fn main() {
+ let x = X::A;
+ let y = match x {
+ X::A | X::B => 1i32,
+ X::C | X::D => 1i32,
+ X::E => 2i32,
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unmerge_match_arm_inserts_comma_if_required() {
+ check_assist(
+ unmerge_match_arm,
+ r#"
+#[derive(Debug)]
+enum X { A, B }
+
+fn main() {
+ let x = X::A;
+ let y = match x {
+ X::A $0| X::B => 1i32
+ };
+}
+"#,
+ r#"
+#[derive(Debug)]
+enum X { A, B }
+
+fn main() {
+ let x = X::A;
+ let y = match x {
+ X::A => 1i32,
+ X::B => 1i32
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unmerge_match_arm_inserts_comma_if_had_after() {
+ check_assist(
+ unmerge_match_arm,
+ r#"
+#[derive(Debug)]
+enum X { A, B }
+
+fn main() {
+ let x = X::A;
+ match x {
+ X::A $0| X::B => {},
+ }
+}
+"#,
+ r#"
+#[derive(Debug)]
+enum X { A, B }
+
+fn main() {
+ let x = X::A;
+ match x {
+ X::A => {},
+ X::B => {},
+ }
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs
index 3ce028e93..dac216b69 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs
@@ -1,5 +1,5 @@
use syntax::{
- ast::{self, make, HasVisibility},
+ ast::{self, edit_in_place::Removable, make, HasVisibility},
ted::{self, Position},
AstNode, SyntaxKind,
};
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_tuple.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_tuple.rs
new file mode 100644
index 000000000..25c58d086
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_tuple.rs
@@ -0,0 +1,159 @@
+use syntax::{
+ ast::{self, edit::AstNodeEdit},
+ AstNode, T,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: unwrap_tuple
+//
+// Unwrap the tuple to different variables.
+//
+// ```
+// # //- minicore: result
+// fn main() {
+// $0let (foo, bar) = ("Foo", "Bar");
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// let foo = "Foo";
+// let bar = "Bar";
+// }
+// ```
+pub(crate) fn unwrap_tuple(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let let_kw = ctx.find_token_syntax_at_offset(T![let])?;
+ let let_stmt = let_kw.parent().and_then(ast::LetStmt::cast)?;
+ let indent_level = let_stmt.indent_level().0 as usize;
+ let pat = let_stmt.pat()?;
+ let ty = let_stmt.ty();
+ let init = let_stmt.initializer()?;
+
+ // This only applies for tuple patterns, types, and initializers.
+ let tuple_pat = match pat {
+ ast::Pat::TuplePat(pat) => pat,
+ _ => return None,
+ };
+ let tuple_ty = ty.and_then(|it| match it {
+ ast::Type::TupleType(ty) => Some(ty),
+ _ => None,
+ });
+ let tuple_init = match init {
+ ast::Expr::TupleExpr(expr) => expr,
+ _ => return None,
+ };
+
+ if tuple_pat.fields().count() != tuple_init.fields().count() {
+ return None;
+ }
+ if let Some(tys) = &tuple_ty {
+ if tuple_pat.fields().count() != tys.fields().count() {
+ return None;
+ }
+ }
+
+ let parent = let_kw.parent()?;
+
+ acc.add(
+ AssistId("unwrap_tuple", AssistKind::RefactorRewrite),
+ "Unwrap tuple",
+ let_kw.text_range(),
+ |edit| {
+ let indents = " ".repeat(indent_level);
+
+ // If there is an ascribed type, insert that type for each declaration,
+ // otherwise, omit that type.
+ if let Some(tys) = tuple_ty {
+ let mut zipped_decls = String::new();
+ for (pat, ty, expr) in
+ itertools::izip!(tuple_pat.fields(), tys.fields(), tuple_init.fields())
+ {
+ zipped_decls.push_str(&format!("{}let {pat}: {ty} = {expr};\n", indents))
+ }
+ edit.replace(parent.text_range(), zipped_decls.trim());
+ } else {
+ let mut zipped_decls = String::new();
+ for (pat, expr) in itertools::izip!(tuple_pat.fields(), tuple_init.fields()) {
+ zipped_decls.push_str(&format!("{}let {pat} = {expr};\n", indents));
+ }
+ edit.replace(parent.text_range(), zipped_decls.trim());
+ }
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_assist;
+
+ use super::*;
+
+ #[test]
+ fn unwrap_tuples() {
+ check_assist(
+ unwrap_tuple,
+ r#"
+fn main() {
+ $0let (foo, bar) = ("Foo", "Bar");
+}
+"#,
+ r#"
+fn main() {
+ let foo = "Foo";
+ let bar = "Bar";
+}
+"#,
+ );
+
+ check_assist(
+ unwrap_tuple,
+ r#"
+fn main() {
+ $0let (foo, bar, baz) = ("Foo", "Bar", "Baz");
+}
+"#,
+ r#"
+fn main() {
+ let foo = "Foo";
+ let bar = "Bar";
+ let baz = "Baz";
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_tuple_with_types() {
+ check_assist(
+ unwrap_tuple,
+ r#"
+fn main() {
+ $0let (foo, bar): (u8, i32) = (5, 10);
+}
+"#,
+ r#"
+fn main() {
+ let foo: u8 = 5;
+ let bar: i32 = 10;
+}
+"#,
+ );
+
+ check_assist(
+ unwrap_tuple,
+ r#"
+fn main() {
+ $0let (foo, bar, baz): (u8, i32, f64) = (5, 10, 17.5);
+}
+"#,
+ r#"
+fn main() {
+ let foo: u8 = 5;
+ let bar: i32 = 10;
+ let baz: f64 = 17.5;
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
index fe87aa15f..a07318cef 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
@@ -121,7 +121,9 @@ mod handlers {
mod convert_iter_for_each_to_for;
mod convert_let_else_to_match;
mod convert_tuple_struct_to_named_struct;
+ mod convert_named_struct_to_tuple_struct;
mod convert_to_guarded_return;
+ mod convert_two_arm_bool_match_to_matches_macro;
mod convert_while_to_loop;
mod destructure_tuple_binding;
mod expand_glob_import;
@@ -135,6 +137,7 @@ mod handlers {
mod flip_binexpr;
mod flip_comma;
mod flip_trait_bound;
+ mod move_format_string_arg;
mod generate_constant;
mod generate_default_from_enum_variant;
mod generate_default_from_new;
@@ -179,12 +182,15 @@ mod handlers {
mod replace_try_expr_with_match;
mod replace_derive_with_manual_impl;
mod replace_if_let_with_match;
+ mod replace_or_with_or_else;
mod introduce_named_generic;
mod replace_let_with_if_let;
mod replace_qualified_name_with_use;
mod replace_string_with_char;
mod replace_turbofish_with_explicit_type;
mod split_import;
+ mod unmerge_match_arm;
+ mod unwrap_tuple;
mod sort_items;
mod toggle_ignore;
mod unmerge_use;
@@ -213,8 +219,10 @@ mod handlers {
convert_iter_for_each_to_for::convert_iter_for_each_to_for,
convert_iter_for_each_to_for::convert_for_loop_with_for_each,
convert_let_else_to_match::convert_let_else_to_match,
+ convert_named_struct_to_tuple_struct::convert_named_struct_to_tuple_struct,
convert_to_guarded_return::convert_to_guarded_return,
convert_tuple_struct_to_named_struct::convert_tuple_struct_to_named_struct,
+ convert_two_arm_bool_match_to_matches_macro::convert_two_arm_bool_match_to_matches_macro,
convert_while_to_loop::convert_while_to_loop,
destructure_tuple_binding::destructure_tuple_binding,
expand_glob_import::expand_glob_import,
@@ -243,12 +251,14 @@ mod handlers {
inline_call::inline_into_callers,
inline_local_variable::inline_local_variable,
inline_type_alias::inline_type_alias,
+ inline_type_alias::inline_type_alias_uses,
introduce_named_generic::introduce_named_generic,
introduce_named_lifetime::introduce_named_lifetime,
invert_if::invert_if,
merge_imports::merge_imports,
merge_match_arms::merge_match_arms,
move_bounds::move_bounds_to_where_clause,
+ move_format_string_arg::move_format_string_arg,
move_guard::move_arm_cond_to_match_guard,
move_guard::move_guard_to_arm_body,
move_module_to_file::move_module_to_file,
@@ -272,15 +282,19 @@ mod handlers {
replace_if_let_with_match::replace_if_let_with_match,
replace_if_let_with_match::replace_match_with_if_let,
replace_let_with_if_let::replace_let_with_if_let,
+ replace_or_with_or_else::replace_or_else_with_or,
+ replace_or_with_or_else::replace_or_with_or_else,
replace_turbofish_with_explicit_type::replace_turbofish_with_explicit_type,
replace_qualified_name_with_use::replace_qualified_name_with_use,
sort_items::sort_items,
split_import::split_import,
toggle_ignore::toggle_ignore,
+ unmerge_match_arm::unmerge_match_arm,
unmerge_use::unmerge_use,
unnecessary_async::unnecessary_async,
unwrap_block::unwrap_block,
unwrap_result_return_type::unwrap_result_return_type,
+ unwrap_tuple::unwrap_tuple,
wrap_return_type_in_result::wrap_return_type_in_result,
// These are manually sorted for better priorities. By default,
// priority is determined by the size of the target range (smaller
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
index 9cd66c6b3..f7f2417d0 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
@@ -29,6 +29,7 @@ pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig {
group: true,
skip_glob_imports: true,
},
+ prefer_no_std: false,
};
pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) {
@@ -95,8 +96,10 @@ fn check_doc_test(assist_id: &str, before: &str, after: &str) {
});
let actual = {
- let source_change =
- assist.source_change.expect("Assist did not contain any source changes");
+ let source_change = assist
+ .source_change
+ .filter(|it| !it.source_file_edits.is_empty() || !it.file_system_edits.is_empty())
+ .expect("Assist did not contain any source changes");
let mut actual = before;
if let Some(source_file_edit) = source_change.get_source_edit(file_id) {
source_file_edit.apply(&mut actual);
@@ -139,8 +142,10 @@ fn check(handler: Handler, before: &str, expected: ExpectedResult<'_>, assist_la
match (assist, expected) {
(Some(assist), ExpectedResult::After(after)) => {
- let source_change =
- assist.source_change.expect("Assist did not contain any source changes");
+ let source_change = assist
+ .source_change
+ .filter(|it| !it.source_file_edits.is_empty() || !it.file_system_edits.is_empty())
+ .expect("Assist did not contain any source changes");
let skip_header = source_change.source_file_edits.len() == 1
&& source_change.file_system_edits.len() == 0;
@@ -227,6 +232,7 @@ fn assist_order_field_struct() {
assert_eq!(assists.next().expect("expected assist").label, "Generate a getter method");
assert_eq!(assists.next().expect("expected assist").label, "Generate a mut getter method");
assert_eq!(assists.next().expect("expected assist").label, "Generate a setter method");
+ assert_eq!(assists.next().expect("expected assist").label, "Convert to tuple struct");
assert_eq!(assists.next().expect("expected assist").label, "Add `#[derive]`");
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
index 6eaab48a3..2c4000efe 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
@@ -408,6 +408,47 @@ fn main() {
}
#[test]
+fn doctest_convert_named_struct_to_tuple_struct() {
+ check_doc_test(
+ "convert_named_struct_to_tuple_struct",
+ r#####"
+struct Point$0 { x: f32, y: f32 }
+
+impl Point {
+ pub fn new(x: f32, y: f32) -> Self {
+ Point { x, y }
+ }
+
+ pub fn x(&self) -> f32 {
+ self.x
+ }
+
+ pub fn y(&self) -> f32 {
+ self.y
+ }
+}
+"#####,
+ r#####"
+struct Point(f32, f32);
+
+impl Point {
+ pub fn new(x: f32, y: f32) -> Self {
+ Point(x, y)
+ }
+
+ pub fn x(&self) -> f32 {
+ self.0
+ }
+
+ pub fn y(&self) -> f32 {
+ self.1
+ }
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_convert_to_guarded_return() {
check_doc_test(
"convert_to_guarded_return",
@@ -473,6 +514,26 @@ impl Point {
}
#[test]
+fn doctest_convert_two_arm_bool_match_to_matches_macro() {
+ check_doc_test(
+ "convert_two_arm_bool_match_to_matches_macro",
+ r#####"
+fn main() {
+ match scrutinee$0 {
+ Some(val) if val.cond() => true,
+ _ => false,
+ }
+}
+"#####,
+ r#####"
+fn main() {
+ matches!(scrutinee, Some(val) if val.cond())
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_convert_while_to_loop() {
check_doc_test(
"convert_while_to_loop",
@@ -1357,6 +1418,31 @@ fn main() {
}
#[test]
+fn doctest_inline_type_alias_uses() {
+ check_doc_test(
+ "inline_type_alias_uses",
+ r#####"
+type $0A = i32;
+fn id(x: A) -> A {
+ x
+};
+fn foo() {
+ let _: A = 3;
+}
+"#####,
+ r#####"
+
+fn id(x: i32) -> i32 {
+ x
+};
+fn foo() {
+ let _: i32 = 3;
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_introduce_named_generic() {
check_doc_test(
"introduce_named_generic",
@@ -1547,6 +1633,37 @@ fn apply<T, U, F>(f: F, x: T) -> U where F: FnOnce(T) -> U {
}
#[test]
+fn doctest_move_format_string_arg() {
+ check_doc_test(
+ "move_format_string_arg",
+ r#####"
+macro_rules! format_args {
+ ($lit:literal $(tt:tt)*) => { 0 },
+}
+macro_rules! print {
+ ($($arg:tt)*) => (std::io::_print(format_args!($($arg)*)));
+}
+
+fn main() {
+ print!("{x + 1}$0");
+}
+"#####,
+ r#####"
+macro_rules! format_args {
+ ($lit:literal $(tt:tt)*) => { 0 },
+}
+macro_rules! print {
+ ($($arg:tt)*) => (std::io::_print(format_args!($($arg)*)));
+}
+
+fn main() {
+ print!("{}"$0, x + 1);
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_move_from_mod_rs() {
check_doc_test(
"move_from_mod_rs",
@@ -1985,6 +2102,46 @@ fn handle(action: Action) {
}
#[test]
+fn doctest_replace_or_else_with_or() {
+ check_doc_test(
+ "replace_or_else_with_or",
+ r#####"
+//- minicore:option
+fn foo() {
+ let a = Some(1);
+ a.unwra$0p_or_else(|| 2);
+}
+"#####,
+ r#####"
+fn foo() {
+ let a = Some(1);
+ a.unwrap_or(2);
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_or_with_or_else() {
+ check_doc_test(
+ "replace_or_with_or_else",
+ r#####"
+//- minicore:option
+fn foo() {
+ let a = Some(1);
+ a.unwra$0p_or(2);
+}
+"#####,
+ r#####"
+fn foo() {
+ let a = Some(1);
+ a.unwrap_or_else(|| 2);
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_replace_qualified_name_with_use() {
check_doc_test(
"replace_qualified_name_with_use",
@@ -2183,6 +2340,32 @@ fn arithmetics {
}
#[test]
+fn doctest_unmerge_match_arm() {
+ check_doc_test(
+ "unmerge_match_arm",
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move(..) $0| Action::Stop => foo(),
+ }
+}
+"#####,
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move(..) => foo(),
+ Action::Stop => foo(),
+ }
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_unmerge_use() {
check_doc_test(
"unmerge_use",
@@ -2245,6 +2428,25 @@ fn foo() -> i32 { 42i32 }
}
#[test]
+fn doctest_unwrap_tuple() {
+ check_doc_test(
+ "unwrap_tuple",
+ r#####"
+//- minicore: result
+fn main() {
+ $0let (foo, bar) = ("Foo", "Bar");
+}
+"#####,
+ r#####"
+fn main() {
+ let foo = "Foo";
+ let bar = "Bar";
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_wrap_return_type_in_result() {
check_doc_test(
"wrap_return_type_in_result",
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
index 3e61d0741..db32e7182 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
@@ -2,8 +2,6 @@
use std::ops;
-use itertools::Itertools;
-
pub(crate) use gen_trait_fn_body::gen_trait_fn_body;
use hir::{db::HirDatabase, HirDisplay, Semantics};
use ide_db::{famous_defs::FamousDefs, path_transform::PathTransform, RootDatabase, SnippetCap};
@@ -12,15 +10,15 @@ use syntax::{
ast::{
self,
edit::{self, AstNodeEdit},
- edit_in_place::AttrsOwnerEdit,
+ edit_in_place::{AttrsOwnerEdit, Removable},
make, HasArgList, HasAttrs, HasGenericParams, HasName, HasTypeBounds, Whitespace,
},
- ted, AstNode, AstToken, Direction, SmolStr, SourceFile,
+ ted, AstNode, AstToken, Direction, SourceFile,
SyntaxKind::*,
SyntaxNode, TextRange, TextSize, T,
};
-use crate::assist_context::{AssistBuilder, AssistContext};
+use crate::assist_context::{AssistContext, SourceChangeBuilder};
pub(crate) mod suggest_name;
mod gen_trait_fn_body;
@@ -333,10 +331,14 @@ fn calc_depth(pat: &ast::Pat, depth: usize) -> usize {
// FIXME: change the new fn checking to a more semantic approach when that's more
// viable (e.g. we process proc macros, etc)
// FIXME: this partially overlaps with `find_impl_block_*`
+
+/// `find_struct_impl` looks for impl of a struct, but this also has additional feature
+/// where it takes a list of function names and check if they exist inside impl_, if
+/// even one match is found, it returns None
pub(crate) fn find_struct_impl(
ctx: &AssistContext<'_>,
adt: &ast::Adt,
- name: &str,
+ names: &[String],
) -> Option<Option<ast::Impl>> {
let db = ctx.db();
let module = adt.syntax().parent()?;
@@ -364,7 +366,7 @@ pub(crate) fn find_struct_impl(
});
if let Some(ref impl_blk) = block {
- if has_fn(impl_blk, name) {
+ if has_any_fn(impl_blk, names) {
return None;
}
}
@@ -372,12 +374,12 @@ pub(crate) fn find_struct_impl(
Some(block)
}
-fn has_fn(imp: &ast::Impl, rhs_name: &str) -> bool {
+fn has_any_fn(imp: &ast::Impl, names: &[String]) -> bool {
if let Some(il) = imp.assoc_item_list() {
for item in il.assoc_items() {
if let ast::AssocItem::Fn(f) = item {
if let Some(name) = f.name() {
- if name.text().eq_ignore_ascii_case(rhs_name) {
+ if names.iter().any(|n| n.eq_ignore_ascii_case(&name.text())) {
return true;
}
}
@@ -424,34 +426,44 @@ pub(crate) fn generate_trait_impl_text(adt: &ast::Adt, trait_text: &str, code: &
}
fn generate_impl_text_inner(adt: &ast::Adt, trait_text: Option<&str>, code: &str) -> String {
- let generic_params = adt.generic_param_list();
+ // Ensure lifetime params are before type & const params
+ let generic_params = adt.generic_param_list().map(|generic_params| {
+ let lifetime_params =
+ generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam);
+ let ty_or_const_params = generic_params.type_or_const_params().filter_map(|param| {
+ // remove defaults since they can't be specified in impls
+ match param {
+ ast::TypeOrConstParam::Type(param) => {
+ let param = param.clone_for_update();
+ param.remove_default();
+ Some(ast::GenericParam::TypeParam(param))
+ }
+ ast::TypeOrConstParam::Const(param) => {
+ let param = param.clone_for_update();
+ param.remove_default();
+ Some(ast::GenericParam::ConstParam(param))
+ }
+ }
+ });
+
+ make::generic_param_list(itertools::chain(lifetime_params, ty_or_const_params))
+ });
+
+ // FIXME: use syntax::make & mutable AST apis instead
+ // `trait_text` and `code` can't be opaque blobs of text
let mut buf = String::with_capacity(code.len());
+
+ // Copy any cfg attrs from the original adt
buf.push_str("\n\n");
- adt.attrs()
- .filter(|attr| attr.as_simple_call().map(|(name, _arg)| name == "cfg").unwrap_or(false))
- .for_each(|attr| buf.push_str(format!("{}\n", attr).as_str()));
+ let cfg_attrs = adt
+ .attrs()
+ .filter(|attr| attr.as_simple_call().map(|(name, _arg)| name == "cfg").unwrap_or(false));
+ cfg_attrs.for_each(|attr| buf.push_str(&format!("{attr}\n")));
+
+ // `impl{generic_params} {trait_text} for {name}{generic_params.to_generic_args()}`
buf.push_str("impl");
if let Some(generic_params) = &generic_params {
- let lifetimes = generic_params.lifetime_params().map(|lt| format!("{}", lt.syntax()));
- let toc_params = generic_params.type_or_const_params().map(|toc_param| {
- let type_param = match toc_param {
- ast::TypeOrConstParam::Type(x) => x,
- ast::TypeOrConstParam::Const(x) => return x.syntax().to_string(),
- };
- let mut buf = String::new();
- if let Some(it) = type_param.name() {
- format_to!(buf, "{}", it.syntax());
- }
- if let Some(it) = type_param.colon_token() {
- format_to!(buf, "{} ", it);
- }
- if let Some(it) = type_param.type_bound_list() {
- format_to!(buf, "{}", it.syntax());
- }
- buf
- });
- let generics = lifetimes.chain(toc_params).format(", ");
- format_to!(buf, "<{}>", generics);
+ format_to!(buf, "{generic_params}");
}
buf.push(' ');
if let Some(trait_text) = trait_text {
@@ -460,23 +472,15 @@ fn generate_impl_text_inner(adt: &ast::Adt, trait_text: Option<&str>, code: &str
}
buf.push_str(&adt.name().unwrap().text());
if let Some(generic_params) = generic_params {
- let lifetime_params = generic_params
- .lifetime_params()
- .filter_map(|it| it.lifetime())
- .map(|it| SmolStr::from(it.text()));
- let toc_params = generic_params
- .type_or_const_params()
- .filter_map(|it| it.name())
- .map(|it| SmolStr::from(it.text()));
- format_to!(buf, "<{}>", lifetime_params.chain(toc_params).format(", "))
+ format_to!(buf, "{}", generic_params.to_generic_args());
}
match adt.where_clause() {
Some(where_clause) => {
- format_to!(buf, "\n{}\n{{\n{}\n}}", where_clause, code);
+ format_to!(buf, "\n{where_clause}\n{{\n{code}\n}}");
}
None => {
- format_to!(buf, " {{\n{}\n}}", code);
+ format_to!(buf, " {{\n{code}\n}}");
}
}
@@ -484,7 +488,7 @@ fn generate_impl_text_inner(adt: &ast::Adt, trait_text: Option<&str>, code: &str
}
pub(crate) fn add_method_to_adt(
- builder: &mut AssistBuilder,
+ builder: &mut SourceChangeBuilder,
adt: &ast::Adt,
impl_def: Option<ast::Impl>,
method: &str,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs
index 779cdbc93..c521a10fc 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs
@@ -55,6 +55,7 @@ const USELESS_METHODS: &[&str] = &[
"iter",
"into_iter",
"iter_mut",
+ "into_future",
];
pub(crate) fn for_generic_parameter(ty: &ast::ImplTraitType) -> SmolStr {
@@ -75,7 +76,7 @@ pub(crate) fn for_generic_parameter(ty: &ast::ImplTraitType) -> SmolStr {
/// In current implementation, the function tries to get the name from
/// the following sources:
///
-/// * if expr is an argument to function/method, use paramter name
+/// * if expr is an argument to function/method, use parameter name
/// * if expr is a function/method call, use function name
/// * expression type name if it exists (E.g. `()`, `fn() -> ()` or `!` do not have names)
/// * fallback: `var_name`
@@ -85,7 +86,7 @@ pub(crate) fn for_generic_parameter(ty: &ast::ImplTraitType) -> SmolStr {
/// Currently it sticks to the first name found.
// FIXME: Microoptimize and return a `SmolStr` here.
pub(crate) fn for_variable(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) -> String {
- // `from_param` does not benifit from stripping
+ // `from_param` does not benefit from stripping
// it need the largest context possible
// so we check firstmost
if let Some(name) = from_param(expr, sema) {
diff --git a/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml
index 8c9d6b228..75835bce9 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml
@@ -11,10 +11,10 @@ doctest = false
[dependencies]
cov-mark = "2.0.0-pre.1"
-itertools = "0.10.3"
+itertools = "0.10.5"
-once_cell = "1.12.0"
-smallvec = "1.9.0"
+once_cell = "1.15.0"
+smallvec = "1.10.0"
stdx = { path = "../stdx", version = "0.0.0" }
syntax = { path = "../syntax", version = "0.0.0" }
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs
index 72579e602..296dfc142 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs
@@ -19,6 +19,7 @@ pub(crate) mod snippet;
pub(crate) mod r#type;
pub(crate) mod use_;
pub(crate) mod vis;
+pub(crate) mod env_vars;
use std::iter;
@@ -551,7 +552,11 @@ fn enum_variants_with_paths(
}
for variant in variants {
- if let Some(path) = ctx.module.find_use_path(ctx.db, hir::ModuleDef::from(variant)) {
+ if let Some(path) = ctx.module.find_use_path(
+ ctx.db,
+ hir::ModuleDef::from(variant),
+ ctx.config.prefer_no_std,
+ ) {
// Variants with trivial paths are already added by the existing completion logic,
// so we should avoid adding these twice
if path.segments().len() > 1 {
@@ -617,7 +622,6 @@ pub(super) fn complete_name_ref(
dot::complete_undotted_self(acc, ctx, path_ctx, expr_ctx);
item_list::complete_item_list_in_expr(acc, ctx, path_ctx, expr_ctx);
- record::complete_record_expr_func_update(acc, ctx, path_ctx, expr_ctx);
snippet::complete_expr_snippet(acc, ctx, path_ctx, expr_ctx);
}
PathKind::Type { location } => {
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs
index cf40ca489..02004ff7b 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs
@@ -19,7 +19,7 @@ pub(crate) fn complete_dot(
};
// Suggest .await syntax for types that implement Future trait
- if receiver_ty.impls_future(ctx.db) {
+ if receiver_ty.impls_into_future(ctx.db) {
let mut item =
CompletionItem::new(CompletionItemKind::Keyword, ctx.source_range(), "await");
item.detail("expr.await");
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/env_vars.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/env_vars.rs
new file mode 100644
index 000000000..09e95e53d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/env_vars.rs
@@ -0,0 +1,150 @@
+//! Completes environment variables defined by Cargo (https://doc.rust-lang.org/cargo/reference/environment-variables.html)
+use hir::Semantics;
+use ide_db::{syntax_helpers::node_ext::macro_call_for_string_token, RootDatabase};
+use syntax::ast::{self, IsString};
+
+use crate::{
+ completions::Completions, context::CompletionContext, CompletionItem, CompletionItemKind,
+};
+
+const CARGO_DEFINED_VARS: &[(&str, &str)] = &[
+ ("CARGO","Path to the cargo binary performing the build"),
+ ("CARGO_MANIFEST_DIR","The directory containing the manifest of your package"),
+ ("CARGO_PKG_VERSION","The full version of your package"),
+ ("CARGO_PKG_VERSION_MAJOR","The major version of your package"),
+ ("CARGO_PKG_VERSION_MINOR","The minor version of your package"),
+ ("CARGO_PKG_VERSION_PATCH","The patch version of your package"),
+ ("CARGO_PKG_VERSION_PRE","The pre-release version of your package"),
+ ("CARGO_PKG_AUTHORS","Colon separated list of authors from the manifest of your package"),
+ ("CARGO_PKG_NAME","The name of your package"),
+ ("CARGO_PKG_DESCRIPTION","The description from the manifest of your package"),
+ ("CARGO_PKG_HOMEPAGE","The home page from the manifest of your package"),
+ ("CARGO_PKG_REPOSITORY","The repository from the manifest of your package"),
+ ("CARGO_PKG_LICENSE","The license from the manifest of your package"),
+ ("CARGO_PKG_LICENSE_FILE","The license file from the manifest of your package"),
+ ("CARGO_PKG_RUST_VERSION","The Rust version from the manifest of your package. Note that this is the minimum Rust version supported by the package, not the current Rust version"),
+ ("CARGO_CRATE_NAME","The name of the crate that is currently being compiled"),
+ ("CARGO_BIN_NAME","The name of the binary that is currently being compiled (if it is a binary). This name does not include any file extension, such as .exe"),
+ ("CARGO_PRIMARY_PACKAGE","This environment variable will be set if the package being built is primary. Primary packages are the ones the user selected on the command-line, either with -p flags or the defaults based on the current directory and the default workspace members. This environment variable will not be set when building dependencies. This is only set when compiling the package (not when running binaries or tests)"),
+ ("CARGO_TARGET_TMPDIR","Only set when building integration test or benchmark code. This is a path to a directory inside the target directory where integration tests or benchmarks are free to put any data needed by the tests/benches. Cargo initially creates this directory but doesn't manage its content in any way, this is the responsibility of the test code")
+];
+
+pub(crate) fn complete_cargo_env_vars(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ expanded: &ast::String,
+) -> Option<()> {
+ guard_env_macro(expanded, &ctx.sema)?;
+ let range = expanded.text_range_between_quotes()?;
+
+ CARGO_DEFINED_VARS.iter().for_each(|(var, detail)| {
+ let mut item = CompletionItem::new(CompletionItemKind::Keyword, range, var);
+ item.detail(*detail);
+ item.add_to(acc);
+ });
+
+ Some(())
+}
+
+fn guard_env_macro(string: &ast::String, semantics: &Semantics<'_, RootDatabase>) -> Option<()> {
+ let call = macro_call_for_string_token(string)?;
+ let name = call.path()?.segment()?.name_ref()?;
+ let makro = semantics.resolve_macro_call(&call)?;
+ let db = semantics.db;
+
+ match name.text().as_str() {
+ "env" | "option_env" if makro.kind(db) == hir::MacroKind::BuiltIn => Some(()),
+ _ => None,
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_edit, completion_list};
+
+ fn check(macro_name: &str) {
+ check_edit(
+ "CARGO_BIN_NAME",
+ &format!(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! {} {{
+ ($var:literal) => {{ 0 }}
+ }}
+
+ fn main() {{
+ let foo = {}!("CAR$0");
+ }}
+ "#,
+ macro_name, macro_name
+ ),
+ &format!(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! {} {{
+ ($var:literal) => {{ 0 }}
+ }}
+
+ fn main() {{
+ let foo = {}!("CARGO_BIN_NAME");
+ }}
+ "#,
+ macro_name, macro_name
+ ),
+ );
+ }
+ #[test]
+ fn completes_env_variable_in_env() {
+ check("env")
+ }
+
+ #[test]
+ fn completes_env_variable_in_option_env() {
+ check("option_env");
+ }
+
+ #[test]
+ fn doesnt_complete_in_random_strings() {
+ let fixture = r#"
+ fn main() {
+ let foo = "CA$0";
+ }
+ "#;
+
+ let completions = completion_list(fixture);
+ assert!(completions.is_empty(), "Completions weren't empty: {}", completions);
+ }
+
+ #[test]
+ fn doesnt_complete_in_random_macro() {
+ let fixture = r#"
+ macro_rules! bar {
+ ($($arg:tt)*) => { 0 }
+ }
+
+ fn main() {
+ let foo = bar!("CA$0");
+
+ }
+ "#;
+
+ let completions = completion_list(fixture);
+ assert!(completions.is_empty(), "Completions weren't empty: {}", completions);
+ }
+
+ #[test]
+ fn doesnt_complete_for_shadowed_macro() {
+ let fixture = r#"
+ macro_rules! env {
+ ($var:literal) => { 0 }
+ }
+
+ fn main() {
+ let foo = env!("CA$0");
+ }
+ "#;
+
+ let completions = completion_list(fixture);
+ assert!(completions.is_empty(), "Completions weren't empty: {}", completions)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs
index 5d0ddaaf2..3192b21cf 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs
@@ -1,8 +1,10 @@
//! Completion of names from the current scope in expression position.
use hir::ScopeDef;
+use syntax::ast;
use crate::{
+ completions::record::add_default_update,
context::{ExprCtx, PathCompletionCtx, Qualified},
CompletionContext, Completions,
};
@@ -163,7 +165,11 @@ pub(crate) fn complete_expr_path(
hir::Adt::Struct(strukt) => {
let path = ctx
.module
- .find_use_path(ctx.db, hir::ModuleDef::from(strukt))
+ .find_use_path(
+ ctx.db,
+ hir::ModuleDef::from(strukt),
+ ctx.config.prefer_no_std,
+ )
.filter(|it| it.len() > 1);
acc.add_struct_literal(ctx, path_ctx, strukt, path, None);
@@ -181,7 +187,11 @@ pub(crate) fn complete_expr_path(
hir::Adt::Union(un) => {
let path = ctx
.module
- .find_use_path(ctx.db, hir::ModuleDef::from(un))
+ .find_use_path(
+ ctx.db,
+ hir::ModuleDef::from(un),
+ ctx.config.prefer_no_std,
+ )
.filter(|it| it.len() > 1);
acc.add_union_literal(ctx, un, path, None);
@@ -219,60 +229,90 @@ pub(crate) fn complete_expr_path(
_ => (),
});
- if is_func_update.is_none() {
- let mut add_keyword =
- |kw, snippet| acc.add_keyword_snippet_expr(ctx, incomplete_let, kw, snippet);
+ match is_func_update {
+ Some(record_expr) => {
+ let ty = ctx.sema.type_of_expr(&ast::Expr::RecordExpr(record_expr.clone()));
- if !in_block_expr {
- add_keyword("unsafe", "unsafe {\n $0\n}");
- }
- add_keyword("match", "match $1 {\n $0\n}");
- add_keyword("while", "while $1 {\n $0\n}");
- add_keyword("while let", "while let $1 = $2 {\n $0\n}");
- add_keyword("loop", "loop {\n $0\n}");
- if in_match_guard {
- add_keyword("if", "if $0");
- } else {
- add_keyword("if", "if $1 {\n $0\n}");
+ match ty.as_ref().and_then(|t| t.original.as_adt()) {
+ Some(hir::Adt::Union(_)) => (),
+ _ => {
+ cov_mark::hit!(functional_update);
+ let missing_fields =
+ ctx.sema.record_literal_missing_fields(record_expr);
+ if !missing_fields.is_empty() {
+ add_default_update(acc, ctx, ty);
+ }
+ }
+ };
}
- add_keyword("if let", "if let $1 = $2 {\n $0\n}");
- add_keyword("for", "for $1 in $2 {\n $0\n}");
- add_keyword("true", "true");
- add_keyword("false", "false");
+ None => {
+ let mut add_keyword = |kw, snippet| {
+ acc.add_keyword_snippet_expr(ctx, incomplete_let, kw, snippet)
+ };
- if in_condition || in_block_expr {
- add_keyword("let", "let");
- }
+ if !in_block_expr {
+ add_keyword("unsafe", "unsafe {\n $0\n}");
+ }
+ add_keyword("match", "match $1 {\n $0\n}");
+ add_keyword("while", "while $1 {\n $0\n}");
+ add_keyword("while let", "while let $1 = $2 {\n $0\n}");
+ add_keyword("loop", "loop {\n $0\n}");
+ if in_match_guard {
+ add_keyword("if", "if $0");
+ } else {
+ add_keyword("if", "if $1 {\n $0\n}");
+ }
+ add_keyword("if let", "if let $1 = $2 {\n $0\n}");
+ add_keyword("for", "for $1 in $2 {\n $0\n}");
+ add_keyword("true", "true");
+ add_keyword("false", "false");
- if after_if_expr {
- add_keyword("else", "else {\n $0\n}");
- add_keyword("else if", "else if $1 {\n $0\n}");
- }
+ if in_condition || in_block_expr {
+ add_keyword("let", "let");
+ }
- if wants_mut_token {
- add_keyword("mut", "mut ");
- }
+ if after_if_expr {
+ add_keyword("else", "else {\n $0\n}");
+ add_keyword("else if", "else if $1 {\n $0\n}");
+ }
- if in_loop_body {
- if in_block_expr {
- add_keyword("continue", "continue;");
- add_keyword("break", "break;");
- } else {
- add_keyword("continue", "continue");
- add_keyword("break", "break");
+ if wants_mut_token {
+ add_keyword("mut", "mut ");
+ }
+
+ if in_loop_body {
+ if in_block_expr {
+ add_keyword("continue", "continue;");
+ add_keyword("break", "break;");
+ } else {
+ add_keyword("continue", "continue");
+ add_keyword("break", "break");
+ }
}
- }
- if let Some(ty) = innermost_ret_ty {
- add_keyword(
- "return",
- match (in_block_expr, ty.is_unit()) {
- (true, true) => "return ;",
- (true, false) => "return;",
- (false, true) => "return $0",
- (false, false) => "return",
- },
- );
+ if let Some(ret_ty) = innermost_ret_ty {
+ add_keyword(
+ "return",
+ match (ret_ty.is_unit(), in_block_expr) {
+ (true, true) => {
+ cov_mark::hit!(return_unit_block);
+ "return;"
+ }
+ (true, false) => {
+ cov_mark::hit!(return_unit_no_block);
+ "return"
+ }
+ (false, true) => {
+ cov_mark::hit!(return_value_block);
+ "return $0;"
+ }
+ (false, false) => {
+ cov_mark::hit!(return_value_no_block);
+ "return $0"
+ }
+ },
+ );
+ }
}
}
}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs
index f04cc15d7..364969af9 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs
@@ -262,7 +262,11 @@ fn import_on_the_fly(
acc.add_all(
import_assets
- .search_for_imports(&ctx.sema, ctx.config.insert_use.prefix_kind)
+ .search_for_imports(
+ &ctx.sema,
+ ctx.config.insert_use.prefix_kind,
+ ctx.config.prefer_no_std,
+ )
.into_iter()
.filter(ns_filter)
.filter(|import| {
@@ -306,7 +310,11 @@ fn import_on_the_fly_pat_(
acc.add_all(
import_assets
- .search_for_imports(&ctx.sema, ctx.config.insert_use.prefix_kind)
+ .search_for_imports(
+ &ctx.sema,
+ ctx.config.insert_use.prefix_kind,
+ ctx.config.prefer_no_std,
+ )
.into_iter()
.filter(ns_filter)
.filter(|import| {
@@ -344,7 +352,7 @@ fn import_on_the_fly_method(
let user_input_lowercased = potential_import_name.to_lowercase();
import_assets
- .search_for_imports(&ctx.sema, ctx.config.insert_use.prefix_kind)
+ .search_for_imports(&ctx.sema, ctx.config.insert_use.prefix_kind, ctx.config.prefer_no_std)
.into_iter()
.filter(|import| {
!ctx.is_item_hidden(&import.item_to_import)
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs
index e9256803c..e82cbfdcb 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs
@@ -38,7 +38,7 @@ use ide_db::{
};
use syntax::{
ast::{self, edit_in_place::AttrsOwnerEdit},
- AstNode, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, T,
+ AstNode, SyntaxElement, SyntaxKind, TextRange, T,
};
use text_edit::TextEdit;
@@ -85,20 +85,36 @@ fn complete_trait_impl_name(
name: &Option<ast::Name>,
kind: ImplCompletionKind,
) -> Option<()> {
- let token = ctx.token.clone();
let item = match name {
Some(name) => name.syntax().parent(),
- None => if token.kind() == SyntaxKind::WHITESPACE { token.prev_token()? } else { token }
- .parent(),
+ None => {
+ let token = &ctx.token;
+ match token.kind() {
+ SyntaxKind::WHITESPACE => token.prev_token()?,
+ _ => token.clone(),
+ }
+ .parent()
+ }
}?;
- complete_trait_impl(
- acc,
- ctx,
- kind,
- replacement_range(ctx, &item),
- // item -> ASSOC_ITEM_LIST -> IMPL
- &ast::Impl::cast(item.parent()?.parent()?)?,
- );
+ let item = ctx.sema.original_syntax_node(&item)?;
+ // item -> ASSOC_ITEM_LIST -> IMPL
+ let impl_def = ast::Impl::cast(item.parent()?.parent()?)?;
+ let replacement_range = {
+ // ctx.sema.original_ast_node(item)?;
+ let first_child = item
+ .children_with_tokens()
+ .find(|child| {
+ !matches!(
+ child.kind(),
+ SyntaxKind::COMMENT | SyntaxKind::WHITESPACE | SyntaxKind::ATTR
+ )
+ })
+ .unwrap_or_else(|| SyntaxElement::Node(item.clone()));
+
+ TextRange::new(first_child.text_range().start(), ctx.source_range().end())
+ };
+
+ complete_trait_impl(acc, ctx, kind, replacement_range, &impl_def);
Some(())
}
@@ -233,7 +249,8 @@ fn add_type_alias_impl(
type_alias: hir::TypeAlias,
) {
let alias_name = type_alias.name(ctx.db);
- let (alias_name, escaped_name) = (alias_name.to_smol_str(), alias_name.escaped().to_smol_str());
+ let (alias_name, escaped_name) =
+ (alias_name.unescaped().to_smol_str(), alias_name.to_smol_str());
let label = format!("type {} =", alias_name);
let replacement = format!("type {} = ", escaped_name);
@@ -340,17 +357,6 @@ fn function_declaration(node: &ast::Fn, needs_whitespace: bool) -> String {
syntax.trim_end().to_owned()
}
-fn replacement_range(ctx: &CompletionContext<'_>, item: &SyntaxNode) -> TextRange {
- let first_child = item
- .children_with_tokens()
- .find(|child| {
- !matches!(child.kind(), SyntaxKind::COMMENT | SyntaxKind::WHITESPACE | SyntaxKind::ATTR)
- })
- .unwrap_or_else(|| SyntaxElement::Node(item.clone()));
-
- TextRange::new(first_child.text_range().start(), ctx.source_range().end())
-}
-
#[cfg(test)]
mod tests {
use expect_test::{expect, Expect};
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs
index 3989a451b..1d03c8cc5 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs
@@ -75,16 +75,17 @@ impl Future for A {}
fn foo(a: A) { a.$0 }
"#,
expect![[r#"
- kw await expr.await
- sn box Box::new(expr)
- sn call function(expr)
- sn dbg dbg!(expr)
- sn dbgr dbg!(&expr)
- sn let let
- sn letm let mut
- sn match match expr {}
- sn ref &expr
- sn refm &mut expr
+ kw await expr.await
+ me into_future() (as IntoFuture) fn(self) -> <Self as IntoFuture>::IntoFuture
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
"#]],
);
@@ -98,18 +99,45 @@ fn foo() {
}
"#,
expect![[r#"
- kw await expr.await
- sn box Box::new(expr)
- sn call function(expr)
- sn dbg dbg!(expr)
- sn dbgr dbg!(&expr)
- sn let let
- sn letm let mut
- sn match match expr {}
- sn ref &expr
- sn refm &mut expr
+ kw await expr.await
+ me into_future() (use core::future::IntoFuture) fn(self) -> <Self as IntoFuture>::IntoFuture
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
"#]],
- )
+ );
+ }
+
+ #[test]
+ fn test_completion_await_impls_into_future() {
+ check(
+ r#"
+//- minicore: future
+use core::future::*;
+struct A {}
+impl IntoFuture for A {}
+fn foo(a: A) { a.$0 }
+"#,
+ expect![[r#"
+ kw await expr.await
+ me into_future() (as IntoFuture) fn(self) -> <Self as IntoFuture>::IntoFuture
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ "#]],
+ );
}
#[test]
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs
index 9c975b929..950731eb4 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs
@@ -53,6 +53,7 @@ pub(crate) fn complete_mod(
let existing_mod_declarations = current_module
.children(ctx.db)
.filter_map(|module| Some(module.name(ctx.db)?.to_string()))
+ .filter(|module| module != ctx.original_token.text())
.collect::<FxHashSet<_>>();
let module_declaration_file =
@@ -351,4 +352,23 @@ fn ignored_bar() {}
"#]],
);
}
+
+ #[test]
+ fn semi_colon_completion() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+//- /foo.rs
+mod bar {
+ mod baz$0
+}
+//- /foo/bar/baz.rs
+fn baz() {}
+"#,
+ expect![[r#"
+ md baz;
+ "#]],
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs
index 71d2d9d43..58d5bf114 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs
@@ -145,6 +145,7 @@ pub(crate) fn complete_pattern_path(
u.ty(ctx.db)
}
hir::PathResolution::Def(hir::ModuleDef::BuiltinType(ty)) => ty.ty(ctx.db),
+ hir::PathResolution::Def(hir::ModuleDef::TypeAlias(ty)) => ty.ty(ctx.db),
_ => return,
};
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix/format_like.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix/format_like.rs
index 6b94347e0..b43bdb9ab 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix/format_like.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix/format_like.rs
@@ -16,8 +16,11 @@
//
// image::https://user-images.githubusercontent.com/48062697/113020656-b560f500-917a-11eb-87de-02991f61beb8.gif[]
-use ide_db::SnippetCap;
-use syntax::ast::{self, AstToken};
+use ide_db::{
+ syntax_helpers::format_string_exprs::{parse_format_exprs, with_placeholders},
+ SnippetCap,
+};
+use syntax::{ast, AstToken};
use crate::{
completions::postfix::build_postfix_snippet_builder, context::CompletionContext, Completions,
@@ -43,250 +46,24 @@ pub(crate) fn add_format_like_completions(
cap: SnippetCap,
receiver_text: &ast::String,
) {
- let input = match string_literal_contents(receiver_text) {
- // It's not a string literal, do not parse input.
- Some(input) => input,
- None => return,
- };
-
let postfix_snippet = match build_postfix_snippet_builder(ctx, cap, dot_receiver) {
Some(it) => it,
None => return,
};
- let mut parser = FormatStrParser::new(input);
- if parser.parse().is_ok() {
+ if let Ok((out, exprs)) = parse_format_exprs(receiver_text.text()) {
+ let exprs = with_placeholders(exprs);
for (label, macro_name) in KINDS {
- let snippet = parser.to_suggestion(macro_name);
+ let snippet = format!(r#"{}({}, {})"#, macro_name, out, exprs.join(", "));
postfix_snippet(label, macro_name, &snippet).add_to(acc);
}
}
}
-/// Checks whether provided item is a string literal.
-fn string_literal_contents(item: &ast::String) -> Option<String> {
- let item = item.text();
- if item.len() >= 2 && item.starts_with('\"') && item.ends_with('\"') {
- return Some(item[1..item.len() - 1].to_owned());
- }
-
- None
-}
-
-/// Parser for a format-like string. It is more allowing in terms of string contents,
-/// as we expect variable placeholders to be filled with expressions.
-#[derive(Debug)]
-pub(crate) struct FormatStrParser {
- input: String,
- output: String,
- extracted_expressions: Vec<String>,
- state: State,
- parsed: bool,
-}
-
-#[derive(Debug, Clone, Copy, PartialEq)]
-enum State {
- NotExpr,
- MaybeExpr,
- Expr,
- MaybeIncorrect,
- FormatOpts,
-}
-
-impl FormatStrParser {
- pub(crate) fn new(input: String) -> Self {
- Self {
- input,
- output: String::new(),
- extracted_expressions: Vec::new(),
- state: State::NotExpr,
- parsed: false,
- }
- }
-
- pub(crate) fn parse(&mut self) -> Result<(), ()> {
- let mut current_expr = String::new();
-
- let mut placeholder_id = 1;
-
- // Count of open braces inside of an expression.
- // We assume that user knows what they're doing, thus we treat it like a correct pattern, e.g.
- // "{MyStruct { val_a: 0, val_b: 1 }}".
- let mut inexpr_open_count = 0;
-
- // We need to escape '\' and '$'. See the comments on `get_receiver_text()` for detail.
- let mut chars = self.input.chars().peekable();
- while let Some(chr) = chars.next() {
- match (self.state, chr) {
- (State::NotExpr, '{') => {
- self.output.push(chr);
- self.state = State::MaybeExpr;
- }
- (State::NotExpr, '}') => {
- self.output.push(chr);
- self.state = State::MaybeIncorrect;
- }
- (State::NotExpr, _) => {
- if matches!(chr, '\\' | '$') {
- self.output.push('\\');
- }
- self.output.push(chr);
- }
- (State::MaybeIncorrect, '}') => {
- // It's okay, we met "}}".
- self.output.push(chr);
- self.state = State::NotExpr;
- }
- (State::MaybeIncorrect, _) => {
- // Error in the string.
- return Err(());
- }
- (State::MaybeExpr, '{') => {
- self.output.push(chr);
- self.state = State::NotExpr;
- }
- (State::MaybeExpr, '}') => {
- // This is an empty sequence '{}'. Replace it with placeholder.
- self.output.push(chr);
- self.extracted_expressions.push(format!("${}", placeholder_id));
- placeholder_id += 1;
- self.state = State::NotExpr;
- }
- (State::MaybeExpr, _) => {
- if matches!(chr, '\\' | '$') {
- current_expr.push('\\');
- }
- current_expr.push(chr);
- self.state = State::Expr;
- }
- (State::Expr, '}') => {
- if inexpr_open_count == 0 {
- self.output.push(chr);
- self.extracted_expressions.push(current_expr.trim().into());
- current_expr = String::new();
- self.state = State::NotExpr;
- } else {
- // We're closing one brace met before inside of the expression.
- current_expr.push(chr);
- inexpr_open_count -= 1;
- }
- }
- (State::Expr, ':') if chars.peek().copied() == Some(':') => {
- // path seperator
- current_expr.push_str("::");
- chars.next();
- }
- (State::Expr, ':') => {
- if inexpr_open_count == 0 {
- // We're outside of braces, thus assume that it's a specifier, like "{Some(value):?}"
- self.output.push(chr);
- self.extracted_expressions.push(current_expr.trim().into());
- current_expr = String::new();
- self.state = State::FormatOpts;
- } else {
- // We're inside of braced expression, assume that it's a struct field name/value delimeter.
- current_expr.push(chr);
- }
- }
- (State::Expr, '{') => {
- current_expr.push(chr);
- inexpr_open_count += 1;
- }
- (State::Expr, _) => {
- if matches!(chr, '\\' | '$') {
- current_expr.push('\\');
- }
- current_expr.push(chr);
- }
- (State::FormatOpts, '}') => {
- self.output.push(chr);
- self.state = State::NotExpr;
- }
- (State::FormatOpts, _) => {
- if matches!(chr, '\\' | '$') {
- self.output.push('\\');
- }
- self.output.push(chr);
- }
- }
- }
-
- if self.state != State::NotExpr {
- return Err(());
- }
-
- self.parsed = true;
- Ok(())
- }
-
- pub(crate) fn to_suggestion(&self, macro_name: &str) -> String {
- assert!(self.parsed, "Attempt to get a suggestion from not parsed expression");
-
- let expressions_as_string = self.extracted_expressions.join(", ");
- format!(r#"{}("{}", {})"#, macro_name, self.output, expressions_as_string)
- }
-}
-
#[cfg(test)]
mod tests {
use super::*;
- use expect_test::{expect, Expect};
-
- fn check(input: &str, expect: &Expect) {
- let mut parser = FormatStrParser::new((*input).to_owned());
- let outcome_repr = if parser.parse().is_ok() {
- // Parsing should be OK, expected repr is "string; expr_1, expr_2".
- if parser.extracted_expressions.is_empty() {
- parser.output
- } else {
- format!("{}; {}", parser.output, parser.extracted_expressions.join(", "))
- }
- } else {
- // Parsing should fail, expected repr is "-".
- "-".to_owned()
- };
-
- expect.assert_eq(&outcome_repr);
- }
-
- #[test]
- fn format_str_parser() {
- let test_vector = &[
- ("no expressions", expect![["no expressions"]]),
- (r"no expressions with \$0$1", expect![r"no expressions with \\\$0\$1"]),
- ("{expr} is {2 + 2}", expect![["{} is {}; expr, 2 + 2"]]),
- ("{expr:?}", expect![["{:?}; expr"]]),
- ("{expr:1$}", expect![[r"{:1\$}; expr"]]),
- ("{$0}", expect![[r"{}; \$0"]]),
- ("{malformed", expect![["-"]]),
- ("malformed}", expect![["-"]]),
- ("{{correct", expect![["{{correct"]]),
- ("correct}}", expect![["correct}}"]]),
- ("{correct}}}", expect![["{}}}; correct"]]),
- ("{correct}}}}}", expect![["{}}}}}; correct"]]),
- ("{incorrect}}", expect![["-"]]),
- ("placeholders {} {}", expect![["placeholders {} {}; $1, $2"]]),
- ("mixed {} {2 + 2} {}", expect![["mixed {} {} {}; $1, 2 + 2, $2"]]),
- (
- "{SomeStruct { val_a: 0, val_b: 1 }}",
- expect![["{}; SomeStruct { val_a: 0, val_b: 1 }"]],
- ),
- ("{expr:?} is {2.32f64:.5}", expect![["{:?} is {:.5}; expr, 2.32f64"]]),
- (
- "{SomeStruct { val_a: 0, val_b: 1 }:?}",
- expect![["{:?}; SomeStruct { val_a: 0, val_b: 1 }"]],
- ),
- ("{ 2 + 2 }", expect![["{}; 2 + 2"]]),
- ("{strsim::jaro_winkle(a)}", expect![["{}; strsim::jaro_winkle(a)"]]),
- ("{foo::bar::baz()}", expect![["{}; foo::bar::baz()"]]),
- ("{foo::bar():?}", expect![["{:?}; foo::bar()"]]),
- ];
-
- for (input, output) in test_vector {
- check(input, output)
- }
- }
#[test]
fn test_into_suggestion() {
@@ -302,10 +79,10 @@ mod tests {
];
for (kind, input, output) in test_vector {
- let mut parser = FormatStrParser::new((*input).to_owned());
- parser.parse().expect("Parsing must succeed");
-
- assert_eq!(&parser.to_suggestion(*kind), output);
+ let (parsed_string, exprs) = parse_format_exprs(input).unwrap();
+ let exprs = with_placeholders(exprs);
+ let snippet = format!(r#"{}("{}", {})"#, kind, parsed_string, exprs.join(", "));
+ assert_eq!(&snippet, output);
}
}
}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs
index 1c9042390..5d96fbd30 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs
@@ -3,7 +3,7 @@ use ide_db::SymbolKind;
use syntax::ast::{self, Expr};
use crate::{
- context::{DotAccess, DotAccessKind, ExprCtx, PathCompletionCtx, PatternContext, Qualified},
+ context::{DotAccess, DotAccessKind, PatternContext},
CompletionContext, CompletionItem, CompletionItemKind, CompletionRelevance,
CompletionRelevancePostfixMatch, Completions,
};
@@ -14,7 +14,24 @@ pub(crate) fn complete_record_pattern_fields(
pattern_ctx: &PatternContext,
) {
if let PatternContext { record_pat: Some(record_pat), .. } = pattern_ctx {
- complete_fields(acc, ctx, ctx.sema.record_pattern_missing_fields(record_pat));
+ let ty = ctx.sema.type_of_pat(&ast::Pat::RecordPat(record_pat.clone()));
+ let missing_fields = match ty.as_ref().and_then(|t| t.original.as_adt()) {
+ Some(hir::Adt::Union(un)) => {
+ // ctx.sema.record_pat_missing_fields will always return
+ // an empty Vec on a union literal. This is normally
+ // reasonable, but here we'd like to present the full list
+ // of fields if the literal is empty.
+ let were_fields_specified =
+ record_pat.record_pat_field_list().and_then(|fl| fl.fields().next()).is_some();
+
+ match were_fields_specified {
+ false => un.fields(ctx.db).into_iter().map(|f| (f, f.ty(ctx.db))).collect(),
+ true => return,
+ }
+ }
+ _ => ctx.sema.record_pattern_missing_fields(record_pat),
+ };
+ complete_fields(acc, ctx, missing_fields);
}
}
@@ -42,8 +59,13 @@ pub(crate) fn complete_record_expr_fields(
}
_ => {
let missing_fields = ctx.sema.record_literal_missing_fields(record_expr);
- add_default_update(acc, ctx, ty, &missing_fields);
+
+ if !missing_fields.is_empty() {
+ cov_mark::hit!(functional_update_field);
+ add_default_update(acc, ctx, ty);
+ }
if dot_prefix {
+ cov_mark::hit!(functional_update_one_dot);
let mut item =
CompletionItem::new(CompletionItemKind::Snippet, ctx.source_range(), "..");
item.insert_text(".");
@@ -56,41 +78,18 @@ pub(crate) fn complete_record_expr_fields(
complete_fields(acc, ctx, missing_fields);
}
-// FIXME: This should probably be part of complete_path_expr
-pub(crate) fn complete_record_expr_func_update(
- acc: &mut Completions,
- ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
- expr_ctx: &ExprCtx,
-) {
- if !matches!(path_ctx.qualified, Qualified::No) {
- return;
- }
- if let ExprCtx { is_func_update: Some(record_expr), .. } = expr_ctx {
- let ty = ctx.sema.type_of_expr(&Expr::RecordExpr(record_expr.clone()));
-
- match ty.as_ref().and_then(|t| t.original.as_adt()) {
- Some(hir::Adt::Union(_)) => (),
- _ => {
- let missing_fields = ctx.sema.record_literal_missing_fields(record_expr);
- add_default_update(acc, ctx, ty, &missing_fields);
- }
- };
- }
-}
-
-fn add_default_update(
+pub(crate) fn add_default_update(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
ty: Option<hir::TypeInfo>,
- missing_fields: &[(hir::Field, hir::Type)],
) {
let default_trait = ctx.famous_defs().core_default_Default();
- let impl_default_trait = default_trait
+ let impls_default_trait = default_trait
.zip(ty.as_ref())
.map_or(false, |(default_trait, ty)| ty.original.impls_trait(ctx.db, default_trait, &[]));
- if impl_default_trait && !missing_fields.is_empty() {
+ if impls_default_trait {
// FIXME: This should make use of scope_def like completions so we get all the other goodies
+ // that is we should handle this like actually completing the default function
let completion_text = "..Default::default()";
let mut item = CompletionItem::new(SymbolKind::Field, ctx.source_range(), completion_text);
let completion_text =
@@ -130,7 +129,7 @@ mod tests {
#[test]
fn literal_struct_completion_edit() {
check_edit(
- "FooDesc {…}",
+ "FooDesc{}",
r#"
struct FooDesc { pub bar: bool }
@@ -155,7 +154,7 @@ fn baz() {
#[test]
fn literal_struct_impl_self_completion() {
check_edit(
- "Self {…}",
+ "Self{}",
r#"
struct Foo {
bar: u64,
@@ -181,7 +180,7 @@ impl Foo {
);
check_edit(
- "Self(…)",
+ "Self()",
r#"
mod submod {
pub struct Foo(pub u64);
@@ -210,7 +209,7 @@ impl submod::Foo {
#[test]
fn literal_struct_completion_from_sub_modules() {
check_edit(
- "submod::Struct {…}",
+ "submod::Struct{}",
r#"
mod submod {
pub struct Struct {
@@ -239,7 +238,7 @@ fn f() -> submod::Struct {
#[test]
fn literal_struct_complexion_module() {
check_edit(
- "FooDesc {…}",
+ "FooDesc{}",
r#"
mod _69latrick {
pub struct FooDesc { pub six: bool, pub neuf: Vec<String>, pub bar: bool }
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/config.rs b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs
index 80d6af281..a0f5e81b4 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/config.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs
@@ -17,6 +17,7 @@ pub struct CompletionConfig {
pub callable: Option<CallableSnippets>,
pub snippet_cap: Option<SnippetCap>,
pub insert_use: InsertUseConfig,
+ pub prefer_no_std: bool,
pub snippets: Vec<Snippet>,
}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
index e35f79d2b..9850813a0 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
@@ -1,4 +1,4 @@
-//! See `CompletionContext` structure.
+//! See [`CompletionContext`] structure.
mod analysis;
#[cfg(test)]
@@ -23,7 +23,10 @@ use syntax::{
};
use text_edit::Indel;
-use crate::CompletionConfig;
+use crate::{
+ context::analysis::{expand_and_analyze, AnalysisResult},
+ CompletionConfig,
+};
const COMPLETION_MARKER: &str = "intellijRulezz";
@@ -64,8 +67,11 @@ pub(crate) struct PathCompletionCtx {
pub(super) qualified: Qualified,
/// The parent of the path we are completing.
pub(super) parent: Option<ast::Path>,
+ #[allow(dead_code)]
/// The path of which we are completing the segment
pub(super) path: ast::Path,
+ /// The path of which we are completing the segment in the original file
+ pub(crate) original_path: Option<ast::Path>,
pub(super) kind: PathKind,
/// Whether the path segment has type args or not.
pub(super) has_type_args: bool,
@@ -134,6 +140,7 @@ pub(crate) struct ExprCtx {
pub(crate) in_condition: bool,
pub(crate) incomplete_let: bool,
pub(crate) ref_expr_parent: Option<ast::RefExpr>,
+ /// The surrounding RecordExpression we are completing a functional update
pub(crate) is_func_update: Option<ast::RecordExpr>,
pub(crate) self_param: Option<hir::SelfParam>,
pub(crate) innermost_ret_ty: Option<hir::Type>,
@@ -557,15 +564,27 @@ impl<'a> CompletionContext<'a> {
let edit = Indel::insert(offset, COMPLETION_MARKER.to_string());
parse.reparse(&edit).tree()
};
- let fake_ident_token =
- file_with_fake_ident.syntax().token_at_offset(offset).right_biased()?;
+ // always pick the token to the immediate left of the cursor, as that is what we are actually
+ // completing on
let original_token = original_file.syntax().token_at_offset(offset).left_biased()?;
- let token = sema.descend_into_macros_single(original_token.clone());
+
+ let AnalysisResult {
+ analysis,
+ expected: (expected_type, expected_name),
+ qualifier_ctx,
+ token,
+ offset,
+ } = expand_and_analyze(
+ &sema,
+ original_file.syntax().clone(),
+ file_with_fake_ident.syntax().clone(),
+ offset,
+ &original_token,
+ )?;
// adjust for macro input, this still fails if there is no token written yet
- let scope_offset = if original_token == token { offset } else { token.text_range().end() };
- let scope = sema.scope_at_offset(&token.parent()?, scope_offset)?;
+ let scope = sema.scope_at_offset(&token.parent()?, offset)?;
let krate = scope.krate();
let module = scope.module();
@@ -579,7 +598,7 @@ impl<'a> CompletionContext<'a> {
let depth_from_crate_root = iter::successors(module.parent(db), |m| m.parent(db)).count();
- let mut ctx = CompletionContext {
+ let ctx = CompletionContext {
sema,
scope,
db,
@@ -589,19 +608,13 @@ impl<'a> CompletionContext<'a> {
token,
krate,
module,
- expected_name: None,
- expected_type: None,
- qualifier_ctx: Default::default(),
+ expected_name,
+ expected_type,
+ qualifier_ctx,
locals,
depth_from_crate_root,
};
- let ident_ctx = ctx.expand_and_analyze(
- original_file.syntax().clone(),
- file_with_fake_ident.syntax().clone(),
- offset,
- fake_ident_token,
- )?;
- Some((ctx, ident_ctx))
+ Some((ctx, analysis))
}
}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
index 22ec7cead..04111ec7e 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
@@ -11,1060 +11,1094 @@ use syntax::{
};
use crate::context::{
- AttrCtx, CompletionAnalysis, CompletionContext, DotAccess, DotAccessKind, ExprCtx,
- ItemListKind, LifetimeContext, LifetimeKind, NameContext, NameKind, NameRefContext,
- NameRefKind, ParamContext, ParamKind, PathCompletionCtx, PathKind, PatternContext,
- PatternRefutability, Qualified, QualifierCtx, TypeAscriptionTarget, TypeLocation,
- COMPLETION_MARKER,
+ AttrCtx, CompletionAnalysis, DotAccess, DotAccessKind, ExprCtx, ItemListKind, LifetimeContext,
+ LifetimeKind, NameContext, NameKind, NameRefContext, NameRefKind, ParamContext, ParamKind,
+ PathCompletionCtx, PathKind, PatternContext, PatternRefutability, Qualified, QualifierCtx,
+ TypeAscriptionTarget, TypeLocation, COMPLETION_MARKER,
};
-impl<'a> CompletionContext<'a> {
- /// Expand attributes and macro calls at the current cursor position for both the original file
- /// and fake file repeatedly. As soon as one of the two expansions fail we stop so the original
- /// and speculative states stay in sync.
- pub(super) fn expand_and_analyze(
- &mut self,
- mut original_file: SyntaxNode,
- mut speculative_file: SyntaxNode,
- mut offset: TextSize,
- mut fake_ident_token: SyntaxToken,
- ) -> Option<CompletionAnalysis> {
- let _p = profile::span("CompletionContext::expand_and_fill");
- let mut derive_ctx = None;
-
- 'expansion: loop {
- let parent_item =
- |item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
- let ancestor_items = iter::successors(
- Option::zip(
- find_node_at_offset::<ast::Item>(&original_file, offset),
- find_node_at_offset::<ast::Item>(&speculative_file, offset),
+struct ExpansionResult {
+ original_file: SyntaxNode,
+ speculative_file: SyntaxNode,
+ offset: TextSize,
+ fake_ident_token: SyntaxToken,
+ derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize, ast::Attr)>,
+}
+
+pub(super) struct AnalysisResult {
+ pub(super) analysis: CompletionAnalysis,
+ pub(super) expected: (Option<Type>, Option<ast::NameOrNameRef>),
+ pub(super) qualifier_ctx: QualifierCtx,
+ pub(super) token: SyntaxToken,
+ pub(super) offset: TextSize,
+}
+
+pub(super) fn expand_and_analyze(
+ sema: &Semantics<'_, RootDatabase>,
+ original_file: SyntaxNode,
+ speculative_file: SyntaxNode,
+ offset: TextSize,
+ original_token: &SyntaxToken,
+) -> Option<AnalysisResult> {
+ // as we insert after the offset, right biased will *always* pick the identifier no matter
+ // if there is an ident already typed or not
+ let fake_ident_token = speculative_file.token_at_offset(offset).right_biased()?;
+ // the relative offset between the cursor and the *identifier* token we are completing on
+ let relative_offset = offset - fake_ident_token.text_range().start();
+ // make the offset point to the start of the original token, as that is what the
+ // intermediate offsets calculated in expansion always points to
+ let offset = offset - relative_offset;
+ let expansion = expand(sema, original_file, speculative_file, offset, fake_ident_token);
+ // add the relative offset back, so that left_biased finds the proper token
+ let offset = expansion.offset + relative_offset;
+ let token = expansion.original_file.token_at_offset(offset).left_biased()?;
+
+ analyze(sema, expansion, original_token, &token).map(|(analysis, expected, qualifier_ctx)| {
+ AnalysisResult { analysis, expected, qualifier_ctx, token, offset }
+ })
+}
+
+/// Expand attributes and macro calls at the current cursor position for both the original file
+/// and fake file repeatedly. As soon as one of the two expansions fail we stop so the original
+/// and speculative states stay in sync.
+fn expand(
+ sema: &Semantics<'_, RootDatabase>,
+ mut original_file: SyntaxNode,
+ mut speculative_file: SyntaxNode,
+ mut offset: TextSize,
+ mut fake_ident_token: SyntaxToken,
+) -> ExpansionResult {
+ let _p = profile::span("CompletionContext::expand");
+ let mut derive_ctx = None;
+
+ 'expansion: loop {
+ let parent_item =
+ |item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
+ let ancestor_items = iter::successors(
+ Option::zip(
+ find_node_at_offset::<ast::Item>(&original_file, offset),
+ find_node_at_offset::<ast::Item>(&speculative_file, offset),
+ ),
+ |(a, b)| parent_item(a).zip(parent_item(b)),
+ );
+
+ // first try to expand attributes as these are always the outermost macro calls
+ 'ancestors: for (actual_item, item_with_fake_ident) in ancestor_items {
+ match (
+ sema.expand_attr_macro(&actual_item),
+ sema.speculative_expand_attr_macro(
+ &actual_item,
+ &item_with_fake_ident,
+ fake_ident_token.clone(),
),
- |(a, b)| parent_item(a).zip(parent_item(b)),
- );
-
- // first try to expand attributes as these are always the outermost macro calls
- 'ancestors: for (actual_item, item_with_fake_ident) in ancestor_items {
- match (
- self.sema.expand_attr_macro(&actual_item),
- self.sema.speculative_expand_attr_macro(
- &actual_item,
- &item_with_fake_ident,
- fake_ident_token.clone(),
- ),
- ) {
- // maybe parent items have attributes, so continue walking the ancestors
- (None, None) => continue 'ancestors,
- // successful expansions
- (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
- let new_offset = fake_mapped_token.text_range().start();
- if new_offset > actual_expansion.text_range().end() {
- // offset outside of bounds from the original expansion,
- // stop here to prevent problems from happening
- break 'expansion;
- }
- original_file = actual_expansion;
- speculative_file = fake_expansion;
- fake_ident_token = fake_mapped_token;
- offset = new_offset;
- continue 'expansion;
+ ) {
+ // maybe parent items have attributes, so continue walking the ancestors
+ (None, None) => continue 'ancestors,
+ // successful expansions
+ (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
+ let new_offset = fake_mapped_token.text_range().start();
+ if new_offset > actual_expansion.text_range().end() {
+ // offset outside of bounds from the original expansion,
+ // stop here to prevent problems from happening
+ break 'expansion;
}
- // exactly one expansion failed, inconsistent state so stop expanding completely
- _ => break 'expansion,
+ original_file = actual_expansion;
+ speculative_file = fake_expansion;
+ fake_ident_token = fake_mapped_token;
+ offset = new_offset;
+ continue 'expansion;
}
+ // exactly one expansion failed, inconsistent state so stop expanding completely
+ _ => break 'expansion,
}
+ }
- // No attributes have been expanded, so look for macro_call! token trees or derive token trees
- let orig_tt = match find_node_at_offset::<ast::TokenTree>(&original_file, offset) {
- Some(it) => it,
- None => break 'expansion,
- };
- let spec_tt = match find_node_at_offset::<ast::TokenTree>(&speculative_file, offset) {
- Some(it) => it,
- None => break 'expansion,
- };
+ // No attributes have been expanded, so look for macro_call! token trees or derive token trees
+ let orig_tt = match find_node_at_offset::<ast::TokenTree>(&original_file, offset) {
+ Some(it) => it,
+ None => break 'expansion,
+ };
+ let spec_tt = match find_node_at_offset::<ast::TokenTree>(&speculative_file, offset) {
+ Some(it) => it,
+ None => break 'expansion,
+ };
- // Expand pseudo-derive expansion
- if let (Some(orig_attr), Some(spec_attr)) = (
- orig_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
- spec_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
+ // Expand pseudo-derive expansion
+ if let (Some(orig_attr), Some(spec_attr)) = (
+ orig_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
+ spec_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
+ ) {
+ if let (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) = (
+ sema.expand_derive_as_pseudo_attr_macro(&orig_attr),
+ sema.speculative_expand_derive_as_pseudo_attr_macro(
+ &orig_attr,
+ &spec_attr,
+ fake_ident_token.clone(),
+ ),
) {
- if let (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) = (
- self.sema.expand_derive_as_pseudo_attr_macro(&orig_attr),
- self.sema.speculative_expand_derive_as_pseudo_attr_macro(
- &orig_attr,
- &spec_attr,
- fake_ident_token.clone(),
- ),
- ) {
- derive_ctx = Some((
- actual_expansion,
- fake_expansion,
- fake_mapped_token.text_range().start(),
- orig_attr,
- ));
- }
- // at this point we won't have any more successful expansions, so stop
+ derive_ctx = Some((
+ actual_expansion,
+ fake_expansion,
+ fake_mapped_token.text_range().start(),
+ orig_attr,
+ ));
+ }
+ // at this point we won't have any more successful expansions, so stop
+ break 'expansion;
+ }
+
+ // Expand fn-like macro calls
+ if let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
+ orig_tt.syntax().ancestors().find_map(ast::MacroCall::cast),
+ spec_tt.syntax().ancestors().find_map(ast::MacroCall::cast),
+ ) {
+ let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text());
+ let mac_call_path1 =
+ macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text());
+
+ // inconsistent state, stop expanding
+ if mac_call_path0 != mac_call_path1 {
break 'expansion;
}
+ let speculative_args = match macro_call_with_fake_ident.token_tree() {
+ Some(tt) => tt,
+ None => break 'expansion,
+ };
- // Expand fn-like macro calls
- if let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
- orig_tt.syntax().ancestors().find_map(ast::MacroCall::cast),
- spec_tt.syntax().ancestors().find_map(ast::MacroCall::cast),
+ match (
+ sema.expand(&actual_macro_call),
+ sema.speculative_expand(
+ &actual_macro_call,
+ &speculative_args,
+ fake_ident_token.clone(),
+ ),
) {
- let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text());
- let mac_call_path1 =
- macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text());
-
- // inconsistent state, stop expanding
- if mac_call_path0 != mac_call_path1 {
- break 'expansion;
- }
- let speculative_args = match macro_call_with_fake_ident.token_tree() {
- Some(tt) => tt,
- None => break 'expansion,
- };
-
- match (
- self.sema.expand(&actual_macro_call),
- self.sema.speculative_expand(
- &actual_macro_call,
- &speculative_args,
- fake_ident_token.clone(),
- ),
- ) {
- // successful expansions
- (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
- let new_offset = fake_mapped_token.text_range().start();
- if new_offset > actual_expansion.text_range().end() {
- // offset outside of bounds from the original expansion,
- // stop here to prevent problems from happening
- break 'expansion;
- }
- original_file = actual_expansion;
- speculative_file = fake_expansion;
- fake_ident_token = fake_mapped_token;
- offset = new_offset;
- continue 'expansion;
+ // successful expansions
+ (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
+ let new_offset = fake_mapped_token.text_range().start();
+ if new_offset > actual_expansion.text_range().end() {
+ // offset outside of bounds from the original expansion,
+ // stop here to prevent problems from happening
+ break 'expansion;
}
- // at least on expansion failed, we won't have anything to expand from this point
- // onwards so break out
- _ => break 'expansion,
+ original_file = actual_expansion;
+ speculative_file = fake_expansion;
+ fake_ident_token = fake_mapped_token;
+ offset = new_offset;
+ continue 'expansion;
}
+ // at least on expansion failed, we won't have anything to expand from this point
+ // onwards so break out
+ _ => break 'expansion,
}
-
- // none of our states have changed so stop the loop
- break 'expansion;
}
- self.analyze(&original_file, speculative_file, offset, derive_ctx)
+ // none of our states have changed so stop the loop
+ break 'expansion;
}
+ ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx }
+}
- /// Calculate the expected type and name of the cursor position.
- fn expected_type_and_name(
- &self,
- name_like: &ast::NameLike,
- ) -> (Option<Type>, Option<NameOrNameRef>) {
- let mut node = match self.token.parent() {
- Some(it) => it,
- None => return (None, None),
- };
+/// Fill the completion context, this is what does semantic reasoning about the surrounding context
+/// of the completion location.
+fn analyze(
+ sema: &Semantics<'_, RootDatabase>,
+ expansion_result: ExpansionResult,
+ original_token: &SyntaxToken,
+ self_token: &SyntaxToken,
+) -> Option<(CompletionAnalysis, (Option<Type>, Option<ast::NameOrNameRef>), QualifierCtx)> {
+ let _p = profile::span("CompletionContext::analyze");
+ let ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx } =
+ expansion_result;
+ let syntax_element = NodeOrToken::Token(fake_ident_token);
+ if is_in_token_of_for_loop(syntax_element.clone()) {
+ // for pat $0
+ // there is nothing to complete here except `in` keyword
+ // don't bother populating the context
+ // FIXME: the completion calculations should end up good enough
+ // such that this special case becomes unnecessary
+ return None;
+ }
- let strip_refs = |mut ty: Type| match name_like {
- ast::NameLike::NameRef(n) => {
- let p = match n.syntax().parent() {
- Some(it) => it,
- None => return ty,
- };
- let top_syn = match_ast! {
- match p {
- ast::FieldExpr(e) => e
- .syntax()
- .ancestors()
- .map_while(ast::FieldExpr::cast)
- .last()
- .map(|it| it.syntax().clone()),
- ast::PathSegment(e) => e
- .syntax()
- .ancestors()
- .skip(1)
- .take_while(|it| ast::Path::can_cast(it.kind()) || ast::PathExpr::can_cast(it.kind()))
- .find_map(ast::PathExpr::cast)
- .map(|it| it.syntax().clone()),
- _ => None
- }
+ // Overwrite the path kind for derives
+ if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx {
+ if let Some(ast::NameLike::NameRef(name_ref)) =
+ find_node_at_offset(&file_with_fake_ident, offset)
+ {
+ let parent = name_ref.syntax().parent()?;
+ let (mut nameref_ctx, _) = classify_name_ref(&sema, &original_file, name_ref, parent)?;
+ if let NameRefKind::Path(path_ctx) = &mut nameref_ctx.kind {
+ path_ctx.kind = PathKind::Derive {
+ existing_derives: sema
+ .resolve_derive_macro(&origin_attr)
+ .into_iter()
+ .flatten()
+ .flatten()
+ .collect(),
};
- let top_syn = match top_syn {
- Some(it) => it,
- None => return ty,
- };
- for _ in top_syn.ancestors().skip(1).map_while(ast::RefExpr::cast) {
- cov_mark::hit!(expected_type_fn_param_ref);
- ty = ty.strip_reference();
- }
- ty
}
- _ => ty,
- };
+ return Some((
+ CompletionAnalysis::NameRef(nameref_ctx),
+ (None, None),
+ QualifierCtx::default(),
+ ));
+ }
+ return None;
+ }
- loop {
- break match_ast! {
- match node {
- ast::LetStmt(it) => {
- cov_mark::hit!(expected_type_let_with_leading_char);
- cov_mark::hit!(expected_type_let_without_leading_char);
- let ty = it.pat()
- .and_then(|pat| self.sema.type_of_pat(&pat))
- .or_else(|| it.initializer().and_then(|it| self.sema.type_of_expr(&it)))
- .map(TypeInfo::original);
- let name = match it.pat() {
- Some(ast::Pat::IdentPat(ident)) => ident.name().map(NameOrNameRef::Name),
- Some(_) | None => None,
- };
-
- (ty, name)
- },
- ast::LetExpr(it) => {
- cov_mark::hit!(expected_type_if_let_without_leading_char);
- let ty = it.pat()
- .and_then(|pat| self.sema.type_of_pat(&pat))
- .or_else(|| it.expr().and_then(|it| self.sema.type_of_expr(&it)))
- .map(TypeInfo::original);
- (ty, None)
- },
- ast::ArgList(_) => {
- cov_mark::hit!(expected_type_fn_param);
- ActiveParameter::at_token(
- &self.sema,
- self.token.clone(),
- ).map(|ap| {
- let name = ap.ident().map(NameOrNameRef::Name);
-
- let ty = strip_refs(ap.ty);
- (Some(ty), name)
- })
- .unwrap_or((None, None))
- },
- ast::RecordExprFieldList(it) => {
- // wouldn't try {} be nice...
- (|| {
- if self.token.kind() == T![..]
- || self.token.prev_token().map(|t| t.kind()) == Some(T![..])
- {
- cov_mark::hit!(expected_type_struct_func_update);
- let record_expr = it.syntax().parent().and_then(ast::RecordExpr::cast)?;
- let ty = self.sema.type_of_expr(&record_expr.into())?;
- Some((
- Some(ty.original),
- None
- ))
- } else {
- cov_mark::hit!(expected_type_struct_field_without_leading_char);
- let expr_field = self.token.prev_sibling_or_token()?
- .into_node()
- .and_then(ast::RecordExprField::cast)?;
- let (_, _, ty) = self.sema.resolve_record_field(&expr_field)?;
- Some((
- Some(ty),
- expr_field.field_name().map(NameOrNameRef::NameRef),
- ))
- }
- })().unwrap_or((None, None))
- },
- ast::RecordExprField(it) => {
- if let Some(expr) = it.expr() {
- cov_mark::hit!(expected_type_struct_field_with_leading_char);
- (
- self.sema.type_of_expr(&expr).map(TypeInfo::original),
- it.field_name().map(NameOrNameRef::NameRef),
- )
- } else {
- cov_mark::hit!(expected_type_struct_field_followed_by_comma);
- let ty = self.sema.resolve_record_field(&it)
- .map(|(_, _, ty)| ty);
- (
- ty,
- it.field_name().map(NameOrNameRef::NameRef),
- )
- }
- },
- // match foo { $0 }
- // match foo { ..., pat => $0 }
- ast::MatchExpr(it) => {
- let on_arrow = previous_non_trivia_token(self.token.clone()).map_or(false, |it| T![=>] == it.kind());
-
- let ty = if on_arrow {
- // match foo { ..., pat => $0 }
- cov_mark::hit!(expected_type_match_arm_body_without_leading_char);
- cov_mark::hit!(expected_type_match_arm_body_with_leading_char);
- self.sema.type_of_expr(&it.into())
- } else {
- // match foo { $0 }
- cov_mark::hit!(expected_type_match_arm_without_leading_char);
- it.expr().and_then(|e| self.sema.type_of_expr(&e))
- }.map(TypeInfo::original);
- (ty, None)
- },
- ast::IfExpr(it) => {
- let ty = it.condition()
- .and_then(|e| self.sema.type_of_expr(&e))
- .map(TypeInfo::original);
- (ty, None)
- },
- ast::IdentPat(it) => {
- cov_mark::hit!(expected_type_if_let_with_leading_char);
- cov_mark::hit!(expected_type_match_arm_with_leading_char);
- let ty = self.sema.type_of_pat(&ast::Pat::from(it)).map(TypeInfo::original);
- (ty, None)
- },
- ast::Fn(it) => {
- cov_mark::hit!(expected_type_fn_ret_with_leading_char);
- cov_mark::hit!(expected_type_fn_ret_without_leading_char);
- let def = self.sema.to_def(&it);
- (def.map(|def| def.ret_type(self.db)), None)
- },
- ast::ClosureExpr(it) => {
- let ty = self.sema.type_of_expr(&it.into());
- ty.and_then(|ty| ty.original.as_callable(self.db))
- .map(|c| (Some(c.return_type()), None))
- .unwrap_or((None, None))
- },
- ast::ParamList(_) => (None, None),
- ast::Stmt(_) => (None, None),
- ast::Item(_) => (None, None),
- _ => {
- match node.parent() {
- Some(n) => {
- node = n;
- continue;
- },
- None => (None, None),
- }
- },
+ let name_like = match find_node_at_offset(&speculative_file, offset) {
+ Some(it) => it,
+ None => {
+ let analysis = if let Some(original) = ast::String::cast(original_token.clone()) {
+ CompletionAnalysis::String {
+ original,
+ expanded: ast::String::cast(self_token.clone()),
+ }
+ } else {
+ // Fix up trailing whitespace problem
+ // #[attr(foo = $0
+ let token = syntax::algo::skip_trivia_token(self_token.clone(), Direction::Prev)?;
+ let p = token.parent()?;
+ if p.kind() == SyntaxKind::TOKEN_TREE
+ && p.ancestors().any(|it| it.kind() == SyntaxKind::META)
+ {
+ let colon_prefix = previous_non_trivia_token(self_token.clone())
+ .map_or(false, |it| T![:] == it.kind());
+ CompletionAnalysis::UnexpandedAttrTT {
+ fake_attribute_under_caret: syntax_element
+ .ancestors()
+ .find_map(ast::Attr::cast),
+ colon_prefix,
+ }
+ } else {
+ return None;
}
};
+ return Some((analysis, (None, None), QualifierCtx::default()));
}
- }
-
- /// Fill the completion context, this is what does semantic reasoning about the surrounding context
- /// of the completion location.
- fn analyze(
- &mut self,
- original_file: &SyntaxNode,
- file_with_fake_ident: SyntaxNode,
- offset: TextSize,
- derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize, ast::Attr)>,
- ) -> Option<CompletionAnalysis> {
- let fake_ident_token = file_with_fake_ident.token_at_offset(offset).right_biased()?;
- let syntax_element = NodeOrToken::Token(fake_ident_token);
- if is_in_token_of_for_loop(syntax_element.clone()) {
- // for pat $0
- // there is nothing to complete here except `in` keyword
- // don't bother populating the context
- // FIXME: the completion calculations should end up good enough
- // such that this special case becomes unnecessary
- return None;
+ };
+ let expected = expected_type_and_name(sema, &self_token, &name_like);
+ let mut qual_ctx = QualifierCtx::default();
+ let analysis = match name_like {
+ ast::NameLike::Lifetime(lifetime) => {
+ CompletionAnalysis::Lifetime(classify_lifetime(sema, &original_file, lifetime)?)
+ }
+ ast::NameLike::NameRef(name_ref) => {
+ let parent = name_ref.syntax().parent()?;
+ let (nameref_ctx, qualifier_ctx) =
+ classify_name_ref(sema, &original_file, name_ref, parent.clone())?;
+ qual_ctx = qualifier_ctx;
+ CompletionAnalysis::NameRef(nameref_ctx)
+ }
+ ast::NameLike::Name(name) => {
+ let name_ctx = classify_name(sema, &original_file, name)?;
+ CompletionAnalysis::Name(name_ctx)
}
+ };
+ Some((analysis, expected, qual_ctx))
+}
- // Overwrite the path kind for derives
- if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx {
- if let Some(ast::NameLike::NameRef(name_ref)) =
- find_node_at_offset(&file_with_fake_ident, offset)
- {
- let parent = name_ref.syntax().parent()?;
- let (mut nameref_ctx, _) =
- Self::classify_name_ref(&self.sema, &original_file, name_ref, parent)?;
- if let NameRefKind::Path(path_ctx) = &mut nameref_ctx.kind {
- path_ctx.kind = PathKind::Derive {
- existing_derives: self
- .sema
- .resolve_derive_macro(&origin_attr)
- .into_iter()
- .flatten()
- .flatten()
- .collect(),
- };
+/// Calculate the expected type and name of the cursor position.
+fn expected_type_and_name(
+ sema: &Semantics<'_, RootDatabase>,
+ token: &SyntaxToken,
+ name_like: &ast::NameLike,
+) -> (Option<Type>, Option<NameOrNameRef>) {
+ let mut node = match token.parent() {
+ Some(it) => it,
+ None => return (None, None),
+ };
+
+ let strip_refs = |mut ty: Type| match name_like {
+ ast::NameLike::NameRef(n) => {
+ let p = match n.syntax().parent() {
+ Some(it) => it,
+ None => return ty,
+ };
+ let top_syn = match_ast! {
+ match p {
+ ast::FieldExpr(e) => e
+ .syntax()
+ .ancestors()
+ .map_while(ast::FieldExpr::cast)
+ .last()
+ .map(|it| it.syntax().clone()),
+ ast::PathSegment(e) => e
+ .syntax()
+ .ancestors()
+ .skip(1)
+ .take_while(|it| ast::Path::can_cast(it.kind()) || ast::PathExpr::can_cast(it.kind()))
+ .find_map(ast::PathExpr::cast)
+ .map(|it| it.syntax().clone()),
+ _ => None
}
- return Some(CompletionAnalysis::NameRef(nameref_ctx));
+ };
+ let top_syn = match top_syn {
+ Some(it) => it,
+ None => return ty,
+ };
+ for _ in top_syn.ancestors().skip(1).map_while(ast::RefExpr::cast) {
+ cov_mark::hit!(expected_type_fn_param_ref);
+ ty = ty.strip_reference();
}
- return None;
+ ty
}
+ _ => ty,
+ };
- let name_like = match find_node_at_offset(&file_with_fake_ident, offset) {
- Some(it) => it,
- None => {
- let analysis =
- if let Some(original) = ast::String::cast(self.original_token.clone()) {
- CompletionAnalysis::String {
- original,
- expanded: ast::String::cast(self.token.clone()),
- }
- } else {
- // Fix up trailing whitespace problem
- // #[attr(foo = $0
- let token =
- syntax::algo::skip_trivia_token(self.token.clone(), Direction::Prev)?;
- let p = token.parent()?;
- if p.kind() == SyntaxKind::TOKEN_TREE
- && p.ancestors().any(|it| it.kind() == SyntaxKind::META)
+ loop {
+ break match_ast! {
+ match node {
+ ast::LetStmt(it) => {
+ cov_mark::hit!(expected_type_let_with_leading_char);
+ cov_mark::hit!(expected_type_let_without_leading_char);
+ let ty = it.pat()
+ .and_then(|pat| sema.type_of_pat(&pat))
+ .or_else(|| it.initializer().and_then(|it| sema.type_of_expr(&it)))
+ .map(TypeInfo::original);
+ let name = match it.pat() {
+ Some(ast::Pat::IdentPat(ident)) => ident.name().map(NameOrNameRef::Name),
+ Some(_) | None => None,
+ };
+
+ (ty, name)
+ },
+ ast::LetExpr(it) => {
+ cov_mark::hit!(expected_type_if_let_without_leading_char);
+ let ty = it.pat()
+ .and_then(|pat| sema.type_of_pat(&pat))
+ .or_else(|| it.expr().and_then(|it| sema.type_of_expr(&it)))
+ .map(TypeInfo::original);
+ (ty, None)
+ },
+ ast::ArgList(_) => {
+ cov_mark::hit!(expected_type_fn_param);
+ ActiveParameter::at_token(
+ &sema,
+ token.clone(),
+ ).map(|ap| {
+ let name = ap.ident().map(NameOrNameRef::Name);
+
+ let ty = strip_refs(ap.ty);
+ (Some(ty), name)
+ })
+ .unwrap_or((None, None))
+ },
+ ast::RecordExprFieldList(it) => {
+ // wouldn't try {} be nice...
+ (|| {
+ if token.kind() == T![..]
+ ||token.prev_token().map(|t| t.kind()) == Some(T![..])
{
- let colon_prefix = previous_non_trivia_token(self.token.clone())
- .map_or(false, |it| T![:] == it.kind());
- CompletionAnalysis::UnexpandedAttrTT {
- fake_attribute_under_caret: syntax_element
- .ancestors()
- .find_map(ast::Attr::cast),
- colon_prefix,
- }
+ cov_mark::hit!(expected_type_struct_func_update);
+ let record_expr = it.syntax().parent().and_then(ast::RecordExpr::cast)?;
+ let ty = sema.type_of_expr(&record_expr.into())?;
+ Some((
+ Some(ty.original),
+ None
+ ))
} else {
- return None;
+ cov_mark::hit!(expected_type_struct_field_without_leading_char);
+ let expr_field = token.prev_sibling_or_token()?
+ .into_node()
+ .and_then(ast::RecordExprField::cast)?;
+ let (_, _, ty) = sema.resolve_record_field(&expr_field)?;
+ Some((
+ Some(ty),
+ expr_field.field_name().map(NameOrNameRef::NameRef),
+ ))
}
- };
- return Some(analysis);
+ })().unwrap_or((None, None))
+ },
+ ast::RecordExprField(it) => {
+ if let Some(expr) = it.expr() {
+ cov_mark::hit!(expected_type_struct_field_with_leading_char);
+ (
+ sema.type_of_expr(&expr).map(TypeInfo::original),
+ it.field_name().map(NameOrNameRef::NameRef),
+ )
+ } else {
+ cov_mark::hit!(expected_type_struct_field_followed_by_comma);
+ let ty = sema.resolve_record_field(&it)
+ .map(|(_, _, ty)| ty);
+ (
+ ty,
+ it.field_name().map(NameOrNameRef::NameRef),
+ )
+ }
+ },
+ // match foo { $0 }
+ // match foo { ..., pat => $0 }
+ ast::MatchExpr(it) => {
+ let on_arrow = previous_non_trivia_token(token.clone()).map_or(false, |it| T![=>] == it.kind());
+
+ let ty = if on_arrow {
+ // match foo { ..., pat => $0 }
+ cov_mark::hit!(expected_type_match_arm_body_without_leading_char);
+ cov_mark::hit!(expected_type_match_arm_body_with_leading_char);
+ sema.type_of_expr(&it.into())
+ } else {
+ // match foo { $0 }
+ cov_mark::hit!(expected_type_match_arm_without_leading_char);
+ it.expr().and_then(|e| sema.type_of_expr(&e))
+ }.map(TypeInfo::original);
+ (ty, None)
+ },
+ ast::IfExpr(it) => {
+ let ty = it.condition()
+ .and_then(|e| sema.type_of_expr(&e))
+ .map(TypeInfo::original);
+ (ty, None)
+ },
+ ast::IdentPat(it) => {
+ cov_mark::hit!(expected_type_if_let_with_leading_char);
+ cov_mark::hit!(expected_type_match_arm_with_leading_char);
+ let ty = sema.type_of_pat(&ast::Pat::from(it)).map(TypeInfo::original);
+ (ty, None)
+ },
+ ast::Fn(it) => {
+ cov_mark::hit!(expected_type_fn_ret_with_leading_char);
+ cov_mark::hit!(expected_type_fn_ret_without_leading_char);
+ let def = sema.to_def(&it);
+ (def.map(|def| def.ret_type(sema.db)), None)
+ },
+ ast::ClosureExpr(it) => {
+ let ty = sema.type_of_expr(&it.into());
+ ty.and_then(|ty| ty.original.as_callable(sema.db))
+ .map(|c| (Some(c.return_type()), None))
+ .unwrap_or((None, None))
+ },
+ ast::ParamList(_) => (None, None),
+ ast::Stmt(_) => (None, None),
+ ast::Item(_) => (None, None),
+ _ => {
+ match node.parent() {
+ Some(n) => {
+ node = n;
+ continue;
+ },
+ None => (None, None),
+ }
+ },
}
};
- (self.expected_type, self.expected_name) = self.expected_type_and_name(&name_like);
- let analysis = match name_like {
- ast::NameLike::Lifetime(lifetime) => CompletionAnalysis::Lifetime(
- Self::classify_lifetime(&self.sema, original_file, lifetime)?,
- ),
- ast::NameLike::NameRef(name_ref) => {
- let parent = name_ref.syntax().parent()?;
- let (nameref_ctx, qualifier_ctx) =
- Self::classify_name_ref(&self.sema, &original_file, name_ref, parent.clone())?;
+ }
+}
- self.qualifier_ctx = qualifier_ctx;
- CompletionAnalysis::NameRef(nameref_ctx)
- }
- ast::NameLike::Name(name) => {
- let name_ctx = Self::classify_name(&self.sema, original_file, name)?;
- CompletionAnalysis::Name(name_ctx)
- }
- };
- Some(analysis)
+fn classify_lifetime(
+ _sema: &Semantics<'_, RootDatabase>,
+ original_file: &SyntaxNode,
+ lifetime: ast::Lifetime,
+) -> Option<LifetimeContext> {
+ let parent = lifetime.syntax().parent()?;
+ if parent.kind() == SyntaxKind::ERROR {
+ return None;
}
- fn classify_lifetime(
- _sema: &Semantics<'_, RootDatabase>,
- original_file: &SyntaxNode,
- lifetime: ast::Lifetime,
- ) -> Option<LifetimeContext> {
- let parent = lifetime.syntax().parent()?;
- if parent.kind() == SyntaxKind::ERROR {
- return None;
+ let kind = match_ast! {
+ match parent {
+ ast::LifetimeParam(param) => LifetimeKind::LifetimeParam {
+ is_decl: param.lifetime().as_ref() == Some(&lifetime),
+ param
+ },
+ ast::BreakExpr(_) => LifetimeKind::LabelRef,
+ ast::ContinueExpr(_) => LifetimeKind::LabelRef,
+ ast::Label(_) => LifetimeKind::LabelDef,
+ _ => LifetimeKind::Lifetime,
}
+ };
+ let lifetime = find_node_at_offset(&original_file, lifetime.syntax().text_range().start());
- let kind = match_ast! {
- match parent {
- ast::LifetimeParam(param) => LifetimeKind::LifetimeParam {
- is_decl: param.lifetime().as_ref() == Some(&lifetime),
- param
- },
- ast::BreakExpr(_) => LifetimeKind::LabelRef,
- ast::ContinueExpr(_) => LifetimeKind::LabelRef,
- ast::Label(_) => LifetimeKind::LabelDef,
- _ => LifetimeKind::Lifetime,
- }
- };
- let lifetime = find_node_at_offset(&original_file, lifetime.syntax().text_range().start());
+ Some(LifetimeContext { lifetime, kind })
+}
- Some(LifetimeContext { lifetime, kind })
- }
+fn classify_name(
+ sema: &Semantics<'_, RootDatabase>,
+ original_file: &SyntaxNode,
+ name: ast::Name,
+) -> Option<NameContext> {
+ let parent = name.syntax().parent()?;
+ let kind = match_ast! {
+ match parent {
+ ast::Const(_) => NameKind::Const,
+ ast::ConstParam(_) => NameKind::ConstParam,
+ ast::Enum(_) => NameKind::Enum,
+ ast::Fn(_) => NameKind::Function,
+ ast::IdentPat(bind_pat) => {
+ let mut pat_ctx = pattern_context_for(sema, original_file, bind_pat.into());
+ if let Some(record_field) = ast::RecordPatField::for_field_name(&name) {
+ pat_ctx.record_pat = find_node_in_file_compensated(sema, original_file, &record_field.parent_record_pat());
+ }
- fn classify_name(
- sema: &Semantics<'_, RootDatabase>,
- original_file: &SyntaxNode,
- name: ast::Name,
- ) -> Option<NameContext> {
- let parent = name.syntax().parent()?;
- let kind = match_ast! {
- match parent {
- ast::Const(_) => NameKind::Const,
- ast::ConstParam(_) => NameKind::ConstParam,
- ast::Enum(_) => NameKind::Enum,
- ast::Fn(_) => NameKind::Function,
- ast::IdentPat(bind_pat) => {
- let mut pat_ctx = pattern_context_for(sema, original_file, bind_pat.into());
- if let Some(record_field) = ast::RecordPatField::for_field_name(&name) {
- pat_ctx.record_pat = find_node_in_file_compensated(sema, original_file, &record_field.parent_record_pat());
- }
+ NameKind::IdentPat(pat_ctx)
+ },
+ ast::MacroDef(_) => NameKind::MacroDef,
+ ast::MacroRules(_) => NameKind::MacroRules,
+ ast::Module(module) => NameKind::Module(module),
+ ast::RecordField(_) => NameKind::RecordField,
+ ast::Rename(_) => NameKind::Rename,
+ ast::SelfParam(_) => NameKind::SelfParam,
+ ast::Static(_) => NameKind::Static,
+ ast::Struct(_) => NameKind::Struct,
+ ast::Trait(_) => NameKind::Trait,
+ ast::TypeAlias(_) => NameKind::TypeAlias,
+ ast::TypeParam(_) => NameKind::TypeParam,
+ ast::Union(_) => NameKind::Union,
+ ast::Variant(_) => NameKind::Variant,
+ _ => return None,
+ }
+ };
+ let name = find_node_at_offset(&original_file, name.syntax().text_range().start());
+ Some(NameContext { name, kind })
+}
- NameKind::IdentPat(pat_ctx)
- },
- ast::MacroDef(_) => NameKind::MacroDef,
- ast::MacroRules(_) => NameKind::MacroRules,
- ast::Module(module) => NameKind::Module(module),
- ast::RecordField(_) => NameKind::RecordField,
- ast::Rename(_) => NameKind::Rename,
- ast::SelfParam(_) => NameKind::SelfParam,
- ast::Static(_) => NameKind::Static,
- ast::Struct(_) => NameKind::Struct,
- ast::Trait(_) => NameKind::Trait,
- ast::TypeAlias(_) => NameKind::TypeAlias,
- ast::TypeParam(_) => NameKind::TypeParam,
- ast::Union(_) => NameKind::Union,
- ast::Variant(_) => NameKind::Variant,
- _ => return None,
- }
- };
- let name = find_node_at_offset(&original_file, name.syntax().text_range().start());
- Some(NameContext { name, kind })
+fn classify_name_ref(
+ sema: &Semantics<'_, RootDatabase>,
+ original_file: &SyntaxNode,
+ name_ref: ast::NameRef,
+ parent: SyntaxNode,
+) -> Option<(NameRefContext, QualifierCtx)> {
+ let nameref = find_node_at_offset(&original_file, name_ref.syntax().text_range().start());
+
+ let make_res = |kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default());
+
+ if let Some(record_field) = ast::RecordExprField::for_field_name(&name_ref) {
+ let dot_prefix = previous_non_trivia_token(name_ref.syntax().clone())
+ .map_or(false, |it| T![.] == it.kind());
+
+ return find_node_in_file_compensated(
+ sema,
+ original_file,
+ &record_field.parent_record_lit(),
+ )
+ .map(|expr| NameRefKind::RecordExpr { expr, dot_prefix })
+ .map(make_res);
}
-
- fn classify_name_ref(
- sema: &Semantics<'_, RootDatabase>,
- original_file: &SyntaxNode,
- name_ref: ast::NameRef,
- parent: SyntaxNode,
- ) -> Option<(NameRefContext, QualifierCtx)> {
- let nameref = find_node_at_offset(&original_file, name_ref.syntax().text_range().start());
-
- let make_res =
- |kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default());
-
- if let Some(record_field) = ast::RecordExprField::for_field_name(&name_ref) {
- let dot_prefix = previous_non_trivia_token(name_ref.syntax().clone())
- .map_or(false, |it| T![.] == it.kind());
-
- return find_node_in_file_compensated(
+ if let Some(record_field) = ast::RecordPatField::for_field_name_ref(&name_ref) {
+ let kind = NameRefKind::Pattern(PatternContext {
+ param_ctx: None,
+ has_type_ascription: false,
+ ref_token: None,
+ mut_token: None,
+ record_pat: find_node_in_file_compensated(
sema,
original_file,
- &record_field.parent_record_lit(),
+ &record_field.parent_record_pat(),
+ ),
+ ..pattern_context_for(
+ sema,
+ original_file,
+ record_field.parent_record_pat().clone().into(),
)
- .map(|expr| NameRefKind::RecordExpr { expr, dot_prefix })
- .map(make_res);
+ });
+ return Some(make_res(kind));
+ }
+
+ let segment = match_ast! {
+ match parent {
+ ast::PathSegment(segment) => segment,
+ ast::FieldExpr(field) => {
+ let receiver = find_opt_node_in_file(original_file, field.expr());
+ let receiver_is_ambiguous_float_literal = match &receiver {
+ Some(ast::Expr::Literal(l)) => matches! {
+ l.kind(),
+ ast::LiteralKind::FloatNumber { .. } if l.syntax().last_token().map_or(false, |it| it.text().ends_with('.'))
+ },
+ _ => false,
+ };
+ let kind = NameRefKind::DotAccess(DotAccess {
+ receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
+ kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal },
+ receiver
+ });
+ return Some(make_res(kind));
+ },
+ ast::MethodCallExpr(method) => {
+ let receiver = find_opt_node_in_file(original_file, method.receiver());
+ let kind = NameRefKind::DotAccess(DotAccess {
+ receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
+ kind: DotAccessKind::Method { has_parens: method.arg_list().map_or(false, |it| it.l_paren_token().is_some()) },
+ receiver
+ });
+ return Some(make_res(kind));
+ },
+ _ => return None,
}
- if let Some(record_field) = ast::RecordPatField::for_field_name_ref(&name_ref) {
- let kind = NameRefKind::Pattern(PatternContext {
- param_ctx: None,
- has_type_ascription: false,
- ref_token: None,
- mut_token: None,
- record_pat: find_node_in_file_compensated(
- sema,
- original_file,
- &record_field.parent_record_pat(),
- ),
- ..pattern_context_for(
- sema,
- original_file,
- record_field.parent_record_pat().clone().into(),
- )
- });
- return Some(make_res(kind));
+ };
+
+ let path = segment.parent_path();
+ let original_path = find_node_in_file_compensated(sema, original_file, &path);
+
+ let mut path_ctx = PathCompletionCtx {
+ has_call_parens: false,
+ has_macro_bang: false,
+ qualified: Qualified::No,
+ parent: None,
+ path: path.clone(),
+ original_path,
+ kind: PathKind::Item { kind: ItemListKind::SourceFile },
+ has_type_args: false,
+ use_tree_parent: false,
+ };
+
+ let is_in_block = |it: &SyntaxNode| {
+ it.parent()
+ .map(|node| {
+ ast::ExprStmt::can_cast(node.kind()) || ast::StmtList::can_cast(node.kind())
+ })
+ .unwrap_or(false)
+ };
+ let func_update_record = |syn: &SyntaxNode| {
+ if let Some(record_expr) = syn.ancestors().nth(2).and_then(ast::RecordExpr::cast) {
+ find_node_in_file_compensated(sema, original_file, &record_expr)
+ } else {
+ None
+ }
+ };
+ let after_if_expr = |node: SyntaxNode| {
+ let prev_expr = (|| {
+ let prev_sibling = non_trivia_sibling(node.into(), Direction::Prev)?.into_node()?;
+ ast::ExprStmt::cast(prev_sibling)?.expr()
+ })();
+ matches!(prev_expr, Some(ast::Expr::IfExpr(_)))
+ };
+
+ // We do not want to generate path completions when we are sandwiched between an item decl signature and its body.
+ // ex. trait Foo $0 {}
+ // in these cases parser recovery usually kicks in for our inserted identifier, causing it
+ // to either be parsed as an ExprStmt or a MacroCall, depending on whether it is in a block
+ // expression or an item list.
+ // The following code checks if the body is missing, if it is we either cut off the body
+ // from the item or it was missing in the first place
+ let inbetween_body_and_decl_check = |node: SyntaxNode| {
+ if let Some(NodeOrToken::Node(n)) =
+ syntax::algo::non_trivia_sibling(node.into(), syntax::Direction::Prev)
+ {
+ if let Some(item) = ast::Item::cast(n) {
+ let is_inbetween = match &item {
+ ast::Item::Const(it) => it.body().is_none(),
+ ast::Item::Enum(it) => it.variant_list().is_none(),
+ ast::Item::ExternBlock(it) => it.extern_item_list().is_none(),
+ ast::Item::Fn(it) => it.body().is_none(),
+ ast::Item::Impl(it) => it.assoc_item_list().is_none(),
+ ast::Item::Module(it) => it.item_list().is_none(),
+ ast::Item::Static(it) => it.body().is_none(),
+ ast::Item::Struct(it) => it.field_list().is_none(),
+ ast::Item::Trait(it) => it.assoc_item_list().is_none(),
+ ast::Item::TypeAlias(it) => it.ty().is_none(),
+ ast::Item::Union(it) => it.record_field_list().is_none(),
+ _ => false,
+ };
+ if is_inbetween {
+ return Some(item);
+ }
+ }
}
+ None
+ };
- let segment = match_ast! {
+ let type_location = |node: &SyntaxNode| {
+ let parent = node.parent()?;
+ let res = match_ast! {
match parent {
- ast::PathSegment(segment) => segment,
- ast::FieldExpr(field) => {
- let receiver = find_opt_node_in_file(original_file, field.expr());
- let receiver_is_ambiguous_float_literal = match &receiver {
- Some(ast::Expr::Literal(l)) => matches! {
- l.kind(),
- ast::LiteralKind::FloatNumber { .. } if l.syntax().last_token().map_or(false, |it| it.text().ends_with('.'))
- },
- _ => false,
+ ast::Const(it) => {
+ let name = find_opt_node_in_file(original_file, it.name())?;
+ let original = ast::Const::cast(name.syntax().parent()?)?;
+ TypeLocation::TypeAscription(TypeAscriptionTarget::Const(original.body()))
+ },
+ ast::RetType(it) => {
+ if it.thin_arrow_token().is_none() {
+ return None;
+ }
+ let parent = match ast::Fn::cast(parent.parent()?) {
+ Some(x) => x.param_list(),
+ None => ast::ClosureExpr::cast(parent.parent()?)?.param_list(),
};
- let kind = NameRefKind::DotAccess(DotAccess {
- receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
- kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal },
- receiver
- });
- return Some(make_res(kind));
+
+ let parent = find_opt_node_in_file(original_file, parent)?.syntax().parent()?;
+ TypeLocation::TypeAscription(TypeAscriptionTarget::RetType(match_ast! {
+ match parent {
+ ast::ClosureExpr(it) => {
+ it.body()
+ },
+ ast::Fn(it) => {
+ it.body().map(ast::Expr::BlockExpr)
+ },
+ _ => return None,
+ }
+ }))
},
- ast::MethodCallExpr(method) => {
- let receiver = find_opt_node_in_file(original_file, method.receiver());
- let kind = NameRefKind::DotAccess(DotAccess {
- receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
- kind: DotAccessKind::Method { has_parens: method.arg_list().map_or(false, |it| it.l_paren_token().is_some()) },
- receiver
- });
- return Some(make_res(kind));
+ ast::Param(it) => {
+ if it.colon_token().is_none() {
+ return None;
+ }
+ TypeLocation::TypeAscription(TypeAscriptionTarget::FnParam(find_opt_node_in_file(original_file, it.pat())))
},
+ ast::LetStmt(it) => {
+ if it.colon_token().is_none() {
+ return None;
+ }
+ TypeLocation::TypeAscription(TypeAscriptionTarget::Let(find_opt_node_in_file(original_file, it.pat())))
+ },
+ ast::Impl(it) => {
+ match it.trait_() {
+ Some(t) if t.syntax() == node => TypeLocation::ImplTrait,
+ _ => match it.self_ty() {
+ Some(t) if t.syntax() == node => TypeLocation::ImplTarget,
+ _ => return None,
+ },
+ }
+ },
+ ast::TypeBound(_) => TypeLocation::TypeBound,
+ // is this case needed?
+ ast::TypeBoundList(_) => TypeLocation::TypeBound,
+ ast::GenericArg(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, it.syntax().parent().and_then(ast::GenericArgList::cast))),
+ // is this case needed?
+ ast::GenericArgList(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, Some(it))),
+ ast::TupleField(_) => TypeLocation::TupleField,
_ => return None,
}
};
+ Some(res)
+ };
- let path = segment.parent_path();
- let mut path_ctx = PathCompletionCtx {
- has_call_parens: false,
- has_macro_bang: false,
- qualified: Qualified::No,
- parent: None,
- path: path.clone(),
- kind: PathKind::Item { kind: ItemListKind::SourceFile },
- has_type_args: false,
- use_tree_parent: false,
- };
-
- let is_in_block = |it: &SyntaxNode| {
- it.parent()
- .map(|node| {
- ast::ExprStmt::can_cast(node.kind()) || ast::StmtList::can_cast(node.kind())
- })
- .unwrap_or(false)
- };
- let func_update_record = |syn: &SyntaxNode| {
- if let Some(record_expr) = syn.ancestors().nth(2).and_then(ast::RecordExpr::cast) {
- find_node_in_file_compensated(sema, original_file, &record_expr)
+ let is_in_condition = |it: &ast::Expr| {
+ (|| {
+ let parent = it.syntax().parent()?;
+ if let Some(expr) = ast::WhileExpr::cast(parent.clone()) {
+ Some(expr.condition()? == *it)
+ } else if let Some(expr) = ast::IfExpr::cast(parent) {
+ Some(expr.condition()? == *it)
} else {
None
}
- };
- let after_if_expr = |node: SyntaxNode| {
- let prev_expr = (|| {
- let prev_sibling = non_trivia_sibling(node.into(), Direction::Prev)?.into_node()?;
- ast::ExprStmt::cast(prev_sibling)?.expr()
- })();
- matches!(prev_expr, Some(ast::Expr::IfExpr(_)))
- };
+ })()
+ .unwrap_or(false)
+ };
- // We do not want to generate path completions when we are sandwiched between an item decl signature and its body.
- // ex. trait Foo $0 {}
- // in these cases parser recovery usually kicks in for our inserted identifier, causing it
- // to either be parsed as an ExprStmt or a MacroCall, depending on whether it is in a block
- // expression or an item list.
- // The following code checks if the body is missing, if it is we either cut off the body
- // from the item or it was missing in the first place
- let inbetween_body_and_decl_check = |node: SyntaxNode| {
- if let Some(NodeOrToken::Node(n)) =
- syntax::algo::non_trivia_sibling(node.into(), syntax::Direction::Prev)
- {
- if let Some(item) = ast::Item::cast(n) {
- let is_inbetween = match &item {
- ast::Item::Const(it) => it.body().is_none(),
- ast::Item::Enum(it) => it.variant_list().is_none(),
- ast::Item::ExternBlock(it) => it.extern_item_list().is_none(),
- ast::Item::Fn(it) => it.body().is_none(),
- ast::Item::Impl(it) => it.assoc_item_list().is_none(),
- ast::Item::Module(it) => it.item_list().is_none(),
- ast::Item::Static(it) => it.body().is_none(),
- ast::Item::Struct(it) => it.field_list().is_none(),
- ast::Item::Trait(it) => it.assoc_item_list().is_none(),
- ast::Item::TypeAlias(it) => it.ty().is_none(),
- ast::Item::Union(it) => it.record_field_list().is_none(),
- _ => false,
- };
- if is_inbetween {
- return Some(item);
+ let make_path_kind_expr = |expr: ast::Expr| {
+ let it = expr.syntax();
+ let in_block_expr = is_in_block(it);
+ let in_loop_body = is_in_loop_body(it);
+ let after_if_expr = after_if_expr(it.clone());
+ let ref_expr_parent =
+ path.as_single_name_ref().and_then(|_| it.parent()).and_then(ast::RefExpr::cast);
+ let (innermost_ret_ty, self_param) = {
+ let find_ret_ty = |it: SyntaxNode| {
+ if let Some(item) = ast::Item::cast(it.clone()) {
+ match item {
+ ast::Item::Fn(f) => Some(sema.to_def(&f).map(|it| it.ret_type(sema.db))),
+ ast::Item::MacroCall(_) => None,
+ _ => Some(None),
}
- }
- }
- None
- };
-
- let type_location = |node: &SyntaxNode| {
- let parent = node.parent()?;
- let res = match_ast! {
- match parent {
- ast::Const(it) => {
- let name = find_opt_node_in_file(original_file, it.name())?;
- let original = ast::Const::cast(name.syntax().parent()?)?;
- TypeLocation::TypeAscription(TypeAscriptionTarget::Const(original.body()))
- },
- ast::RetType(it) => {
- if it.thin_arrow_token().is_none() {
- return None;
- }
- let parent = match ast::Fn::cast(parent.parent()?) {
- Some(x) => x.param_list(),
- None => ast::ClosureExpr::cast(parent.parent()?)?.param_list(),
- };
-
- let parent = find_opt_node_in_file(original_file, parent)?.syntax().parent()?;
- TypeLocation::TypeAscription(TypeAscriptionTarget::RetType(match_ast! {
- match parent {
- ast::ClosureExpr(it) => {
- it.body()
- },
- ast::Fn(it) => {
- it.body().map(ast::Expr::BlockExpr)
- },
- _ => return None,
- }
- }))
- },
- ast::Param(it) => {
- if it.colon_token().is_none() {
- return None;
- }
- TypeLocation::TypeAscription(TypeAscriptionTarget::FnParam(find_opt_node_in_file(original_file, it.pat())))
- },
- ast::LetStmt(it) => {
- if it.colon_token().is_none() {
- return None;
- }
- TypeLocation::TypeAscription(TypeAscriptionTarget::Let(find_opt_node_in_file(original_file, it.pat())))
- },
- ast::Impl(it) => {
- match it.trait_() {
- Some(t) if t.syntax() == node => TypeLocation::ImplTrait,
- _ => match it.self_ty() {
- Some(t) if t.syntax() == node => TypeLocation::ImplTarget,
- _ => return None,
- },
- }
- },
- ast::TypeBound(_) => TypeLocation::TypeBound,
- // is this case needed?
- ast::TypeBoundList(_) => TypeLocation::TypeBound,
- ast::GenericArg(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, it.syntax().parent().and_then(ast::GenericArgList::cast))),
- // is this case needed?
- ast::GenericArgList(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, Some(it))),
- ast::TupleField(_) => TypeLocation::TupleField,
- _ => return None,
- }
- };
- Some(res)
- };
-
- let is_in_condition = |it: &ast::Expr| {
- (|| {
- let parent = it.syntax().parent()?;
- if let Some(expr) = ast::WhileExpr::cast(parent.clone()) {
- Some(expr.condition()? == *it)
- } else if let Some(expr) = ast::IfExpr::cast(parent) {
- Some(expr.condition()? == *it)
} else {
- None
- }
- })()
- .unwrap_or(false)
- };
-
- let make_path_kind_expr = |expr: ast::Expr| {
- let it = expr.syntax();
- let in_block_expr = is_in_block(it);
- let in_loop_body = is_in_loop_body(it);
- let after_if_expr = after_if_expr(it.clone());
- let ref_expr_parent =
- path.as_single_name_ref().and_then(|_| it.parent()).and_then(ast::RefExpr::cast);
- let (innermost_ret_ty, self_param) = {
- let find_ret_ty = |it: SyntaxNode| {
- if let Some(item) = ast::Item::cast(it.clone()) {
- match item {
- ast::Item::Fn(f) => {
- Some(sema.to_def(&f).map(|it| it.ret_type(sema.db)))
- }
- ast::Item::MacroCall(_) => None,
- _ => Some(None),
- }
- } else {
- let expr = ast::Expr::cast(it)?;
- let callable = match expr {
- // FIXME
- // ast::Expr::BlockExpr(b) if b.async_token().is_some() || b.try_token().is_some() => sema.type_of_expr(b),
- ast::Expr::ClosureExpr(_) => sema.type_of_expr(&expr),
- _ => return None,
- };
- Some(
- callable
- .and_then(|c| c.adjusted().as_callable(sema.db))
- .map(|it| it.return_type()),
- )
- }
- };
- let find_fn_self_param = |it| match it {
- ast::Item::Fn(fn_) => {
- Some(sema.to_def(&fn_).and_then(|it| it.self_param(sema.db)))
- }
- ast::Item::MacroCall(_) => None,
- _ => Some(None),
- };
-
- match find_node_in_file_compensated(sema, original_file, &expr) {
- Some(it) => {
- let innermost_ret_ty = sema
- .ancestors_with_macros(it.syntax().clone())
- .find_map(find_ret_ty)
- .flatten();
-
- let self_param = sema
- .ancestors_with_macros(it.syntax().clone())
- .filter_map(ast::Item::cast)
- .find_map(find_fn_self_param)
- .flatten();
- (innermost_ret_ty, self_param)
- }
- None => (None, None),
+ let expr = ast::Expr::cast(it)?;
+ let callable = match expr {
+ // FIXME
+ // ast::Expr::BlockExpr(b) if b.async_token().is_some() || b.try_token().is_some() => sema.type_of_expr(b),
+ ast::Expr::ClosureExpr(_) => sema.type_of_expr(&expr),
+ _ => return None,
+ };
+ Some(
+ callable
+ .and_then(|c| c.adjusted().as_callable(sema.db))
+ .map(|it| it.return_type()),
+ )
}
};
- let is_func_update = func_update_record(it);
- let in_condition = is_in_condition(&expr);
- let incomplete_let = it
- .parent()
- .and_then(ast::LetStmt::cast)
- .map_or(false, |it| it.semicolon_token().is_none());
- let impl_ = fetch_immediate_impl(sema, original_file, expr.syntax());
-
- let in_match_guard = match it.parent().and_then(ast::MatchArm::cast) {
- Some(arm) => arm
- .fat_arrow_token()
- .map_or(true, |arrow| it.text_range().start() < arrow.text_range().start()),
- None => false,
+ let find_fn_self_param = |it| match it {
+ ast::Item::Fn(fn_) => Some(sema.to_def(&fn_).and_then(|it| it.self_param(sema.db))),
+ ast::Item::MacroCall(_) => None,
+ _ => Some(None),
};
- PathKind::Expr {
- expr_ctx: ExprCtx {
- in_block_expr,
- in_loop_body,
- after_if_expr,
- in_condition,
- ref_expr_parent,
- is_func_update,
- innermost_ret_ty,
- self_param,
- incomplete_let,
- impl_,
- in_match_guard,
- },
+ match find_node_in_file_compensated(sema, original_file, &expr) {
+ Some(it) => {
+ let innermost_ret_ty = sema
+ .ancestors_with_macros(it.syntax().clone())
+ .find_map(find_ret_ty)
+ .flatten();
+
+ let self_param = sema
+ .ancestors_with_macros(it.syntax().clone())
+ .filter_map(ast::Item::cast)
+ .find_map(find_fn_self_param)
+ .flatten();
+ (innermost_ret_ty, self_param)
+ }
+ None => (None, None),
}
};
- let make_path_kind_type = |ty: ast::Type| {
- let location = type_location(ty.syntax());
- PathKind::Type { location: location.unwrap_or(TypeLocation::Other) }
+ let is_func_update = func_update_record(it);
+ let in_condition = is_in_condition(&expr);
+ let incomplete_let = it
+ .parent()
+ .and_then(ast::LetStmt::cast)
+ .map_or(false, |it| it.semicolon_token().is_none());
+ let impl_ = fetch_immediate_impl(sema, original_file, expr.syntax());
+
+ let in_match_guard = match it.parent().and_then(ast::MatchArm::cast) {
+ Some(arm) => arm
+ .fat_arrow_token()
+ .map_or(true, |arrow| it.text_range().start() < arrow.text_range().start()),
+ None => false,
};
- let mut kind_macro_call = |it: ast::MacroCall| {
- path_ctx.has_macro_bang = it.excl_token().is_some();
- let parent = it.syntax().parent()?;
- // Any path in an item list will be treated as a macro call by the parser
- let kind = match_ast! {
- match parent {
- ast::MacroExpr(expr) => make_path_kind_expr(expr.into()),
- ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())},
- ast::MacroType(ty) => make_path_kind_type(ty.into()),
- ast::ItemList(_) => PathKind::Item { kind: ItemListKind::Module },
- ast::AssocItemList(_) => PathKind::Item { kind: match parent.parent() {
- Some(it) => match_ast! {
- match it {
- ast::Trait(_) => ItemListKind::Trait,
- ast::Impl(it) => if it.trait_().is_some() {
- ItemListKind::TraitImpl(find_node_in_file_compensated(sema, original_file, &it))
- } else {
- ItemListKind::Impl
- },
- _ => return None
- }
- },
- None => return None,
- } },
- ast::ExternItemList(_) => PathKind::Item { kind: ItemListKind::ExternBlock },
- ast::SourceFile(_) => PathKind::Item { kind: ItemListKind::SourceFile },
- _ => return None,
- }
- };
- Some(kind)
- };
- let make_path_kind_attr = |meta: ast::Meta| {
- let attr = meta.parent_attr()?;
- let kind = attr.kind();
- let attached = attr.syntax().parent()?;
- let is_trailing_outer_attr = kind != AttrKind::Inner
- && non_trivia_sibling(attr.syntax().clone().into(), syntax::Direction::Next)
- .is_none();
- let annotated_item_kind =
- if is_trailing_outer_attr { None } else { Some(attached.kind()) };
- Some(PathKind::Attr { attr_ctx: AttrCtx { kind, annotated_item_kind } })
- };
+ PathKind::Expr {
+ expr_ctx: ExprCtx {
+ in_block_expr,
+ in_loop_body,
+ after_if_expr,
+ in_condition,
+ ref_expr_parent,
+ is_func_update,
+ innermost_ret_ty,
+ self_param,
+ incomplete_let,
+ impl_,
+ in_match_guard,
+ },
+ }
+ };
+ let make_path_kind_type = |ty: ast::Type| {
+ let location = type_location(ty.syntax());
+ PathKind::Type { location: location.unwrap_or(TypeLocation::Other) }
+ };
- // Infer the path kind
- let parent = path.syntax().parent()?;
+ let mut kind_macro_call = |it: ast::MacroCall| {
+ path_ctx.has_macro_bang = it.excl_token().is_some();
+ let parent = it.syntax().parent()?;
+ // Any path in an item list will be treated as a macro call by the parser
let kind = match_ast! {
match parent {
- ast::PathType(it) => make_path_kind_type(it.into()),
- ast::PathExpr(it) => {
- if let Some(p) = it.syntax().parent() {
- if ast::ExprStmt::can_cast(p.kind()) {
- if let Some(kind) = inbetween_body_and_decl_check(p) {
- return Some(make_res(NameRefKind::Keyword(kind)));
- }
- }
- }
-
- path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind()));
-
- make_path_kind_expr(it.into())
- },
- ast::TupleStructPat(it) => {
- path_ctx.has_call_parens = true;
- PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
- },
- ast::RecordPat(it) => {
- path_ctx.has_call_parens = true;
- PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
- },
- ast::PathPat(it) => {
- PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
- },
- ast::MacroCall(it) => {
- // A macro call in this position is usually a result of parsing recovery, so check that
- if let Some(kind) = inbetween_body_and_decl_check(it.syntax().clone()) {
- return Some(make_res(NameRefKind::Keyword(kind)));
- }
-
- kind_macro_call(it)?
- },
- ast::Meta(meta) => make_path_kind_attr(meta)?,
- ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
- ast::UseTree(_) => PathKind::Use,
- // completing inside a qualifier
- ast::Path(parent) => {
- path_ctx.parent = Some(parent.clone());
- let parent = iter::successors(Some(parent), |it| it.parent_path()).last()?.syntax().parent()?;
- match_ast! {
- match parent {
- ast::PathType(it) => make_path_kind_type(it.into()),
- ast::PathExpr(it) => {
- path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind()));
-
- make_path_kind_expr(it.into())
- },
- ast::TupleStructPat(it) => {
- path_ctx.has_call_parens = true;
- PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
- },
- ast::RecordPat(it) => {
- path_ctx.has_call_parens = true;
- PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
- },
- ast::PathPat(it) => {
- PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
- },
- ast::MacroCall(it) => {
- kind_macro_call(it)?
+ ast::MacroExpr(expr) => make_path_kind_expr(expr.into()),
+ ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())},
+ ast::MacroType(ty) => make_path_kind_type(ty.into()),
+ ast::ItemList(_) => PathKind::Item { kind: ItemListKind::Module },
+ ast::AssocItemList(_) => PathKind::Item { kind: match parent.parent() {
+ Some(it) => match_ast! {
+ match it {
+ ast::Trait(_) => ItemListKind::Trait,
+ ast::Impl(it) => if it.trait_().is_some() {
+ ItemListKind::TraitImpl(find_node_in_file_compensated(sema, original_file, &it))
+ } else {
+ ItemListKind::Impl
},
- ast::Meta(meta) => make_path_kind_attr(meta)?,
- ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
- ast::UseTree(_) => PathKind::Use,
- ast::RecordExpr(it) => make_path_kind_expr(it.into()),
- _ => return None,
+ _ => return None
}
- }
- },
- ast::RecordExpr(it) => make_path_kind_expr(it.into()),
+ },
+ None => return None,
+ } },
+ ast::ExternItemList(_) => PathKind::Item { kind: ItemListKind::ExternBlock },
+ ast::SourceFile(_) => PathKind::Item { kind: ItemListKind::SourceFile },
_ => return None,
}
};
+ Some(kind)
+ };
+ let make_path_kind_attr = |meta: ast::Meta| {
+ let attr = meta.parent_attr()?;
+ let kind = attr.kind();
+ let attached = attr.syntax().parent()?;
+ let is_trailing_outer_attr = kind != AttrKind::Inner
+ && non_trivia_sibling(attr.syntax().clone().into(), syntax::Direction::Next).is_none();
+ let annotated_item_kind = if is_trailing_outer_attr { None } else { Some(attached.kind()) };
+ Some(PathKind::Attr { attr_ctx: AttrCtx { kind, annotated_item_kind } })
+ };
- path_ctx.kind = kind;
- path_ctx.has_type_args = segment.generic_arg_list().is_some();
+ // Infer the path kind
+ let parent = path.syntax().parent()?;
+ let kind = match_ast! {
+ match parent {
+ ast::PathType(it) => make_path_kind_type(it.into()),
+ ast::PathExpr(it) => {
+ if let Some(p) = it.syntax().parent() {
+ if ast::ExprStmt::can_cast(p.kind()) {
+ if let Some(kind) = inbetween_body_and_decl_check(p) {
+ return Some(make_res(NameRefKind::Keyword(kind)));
+ }
+ }
+ }
- // calculate the qualifier context
- if let Some((qualifier, use_tree_parent)) = path_or_use_tree_qualifier(&path) {
- path_ctx.use_tree_parent = use_tree_parent;
- if !use_tree_parent && segment.coloncolon_token().is_some() {
- path_ctx.qualified = Qualified::Absolute;
- } else {
- let qualifier = qualifier
- .segment()
- .and_then(|it| find_node_in_file(original_file, &it))
- .map(|it| it.parent_path());
- if let Some(qualifier) = qualifier {
- let type_anchor = match qualifier.segment().and_then(|it| it.kind()) {
- Some(ast::PathSegmentKind::Type {
- type_ref: Some(type_ref),
- trait_ref,
- }) if qualifier.qualifier().is_none() => Some((type_ref, trait_ref)),
- _ => None,
- };
+ path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind()));
+
+ make_path_kind_expr(it.into())
+ },
+ ast::TupleStructPat(it) => {
+ path_ctx.has_call_parens = true;
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
+ },
+ ast::RecordPat(it) => {
+ path_ctx.has_call_parens = true;
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
+ },
+ ast::PathPat(it) => {
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
+ },
+ ast::MacroCall(it) => {
+ // A macro call in this position is usually a result of parsing recovery, so check that
+ if let Some(kind) = inbetween_body_and_decl_check(it.syntax().clone()) {
+ return Some(make_res(NameRefKind::Keyword(kind)));
+ }
- path_ctx.qualified = if let Some((ty, trait_ref)) = type_anchor {
- let ty = match ty {
- ast::Type::InferType(_) => None,
- ty => sema.resolve_type(&ty),
- };
- let trait_ = trait_ref.and_then(|it| sema.resolve_trait(&it.path()?));
- Qualified::TypeAnchor { ty, trait_ }
- } else {
- let res = sema.resolve_path(&qualifier);
-
- // For understanding how and why super_chain_len is calculated the way it
- // is check the documentation at it's definition
- let mut segment_count = 0;
- let super_count =
- iter::successors(Some(qualifier.clone()), |p| p.qualifier())
- .take_while(|p| {
- p.segment()
- .and_then(|s| {
- segment_count += 1;
- s.super_token()
- })
- .is_some()
- })
- .count();
+ kind_macro_call(it)?
+ },
+ ast::Meta(meta) => make_path_kind_attr(meta)?,
+ ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
+ ast::UseTree(_) => PathKind::Use,
+ // completing inside a qualifier
+ ast::Path(parent) => {
+ path_ctx.parent = Some(parent.clone());
+ let parent = iter::successors(Some(parent), |it| it.parent_path()).last()?.syntax().parent()?;
+ match_ast! {
+ match parent {
+ ast::PathType(it) => make_path_kind_type(it.into()),
+ ast::PathExpr(it) => {
+ path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind()));
+
+ make_path_kind_expr(it.into())
+ },
+ ast::TupleStructPat(it) => {
+ path_ctx.has_call_parens = true;
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
+ },
+ ast::RecordPat(it) => {
+ path_ctx.has_call_parens = true;
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
+ },
+ ast::PathPat(it) => {
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
+ },
+ ast::MacroCall(it) => {
+ kind_macro_call(it)?
+ },
+ ast::Meta(meta) => make_path_kind_attr(meta)?,
+ ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
+ ast::UseTree(_) => PathKind::Use,
+ ast::RecordExpr(it) => make_path_kind_expr(it.into()),
+ _ => return None,
+ }
+ }
+ },
+ ast::RecordExpr(it) => make_path_kind_expr(it.into()),
+ _ => return None,
+ }
+ };
- let super_chain_len =
- if segment_count > super_count { None } else { Some(super_count) };
+ path_ctx.kind = kind;
+ path_ctx.has_type_args = segment.generic_arg_list().is_some();
- Qualified::With { path: qualifier, resolution: res, super_chain_len }
+ // calculate the qualifier context
+ if let Some((qualifier, use_tree_parent)) = path_or_use_tree_qualifier(&path) {
+ path_ctx.use_tree_parent = use_tree_parent;
+ if !use_tree_parent && segment.coloncolon_token().is_some() {
+ path_ctx.qualified = Qualified::Absolute;
+ } else {
+ let qualifier = qualifier
+ .segment()
+ .and_then(|it| find_node_in_file(original_file, &it))
+ .map(|it| it.parent_path());
+ if let Some(qualifier) = qualifier {
+ let type_anchor = match qualifier.segment().and_then(|it| it.kind()) {
+ Some(ast::PathSegmentKind::Type { type_ref: Some(type_ref), trait_ref })
+ if qualifier.qualifier().is_none() =>
+ {
+ Some((type_ref, trait_ref))
}
+ _ => None,
};
- }
- } else if let Some(segment) = path.segment() {
- if segment.coloncolon_token().is_some() {
- path_ctx.qualified = Qualified::Absolute;
- }
- }
- let mut qualifier_ctx = QualifierCtx::default();
- if path_ctx.is_trivial_path() {
- // fetch the full expression that may have qualifiers attached to it
- let top_node = match path_ctx.kind {
- PathKind::Expr { expr_ctx: ExprCtx { in_block_expr: true, .. } } => {
- parent.ancestors().find(|it| ast::PathExpr::can_cast(it.kind())).and_then(|p| {
- let parent = p.parent()?;
- if ast::StmtList::can_cast(parent.kind()) {
- Some(p)
- } else if ast::ExprStmt::can_cast(parent.kind()) {
- Some(parent)
- } else {
- None
- }
- })
- }
- PathKind::Item { .. } => {
- parent.ancestors().find(|it| ast::MacroCall::can_cast(it.kind()))
+ path_ctx.qualified = if let Some((ty, trait_ref)) = type_anchor {
+ let ty = match ty {
+ ast::Type::InferType(_) => None,
+ ty => sema.resolve_type(&ty),
+ };
+ let trait_ = trait_ref.and_then(|it| sema.resolve_trait(&it.path()?));
+ Qualified::TypeAnchor { ty, trait_ }
+ } else {
+ let res = sema.resolve_path(&qualifier);
+
+ // For understanding how and why super_chain_len is calculated the way it
+ // is check the documentation at it's definition
+ let mut segment_count = 0;
+ let super_count = iter::successors(Some(qualifier.clone()), |p| p.qualifier())
+ .take_while(|p| {
+ p.segment()
+ .and_then(|s| {
+ segment_count += 1;
+ s.super_token()
+ })
+ .is_some()
+ })
+ .count();
+
+ let super_chain_len =
+ if segment_count > super_count { None } else { Some(super_count) };
+
+ Qualified::With { path: qualifier, resolution: res, super_chain_len }
}
- _ => None,
};
- if let Some(top) = top_node {
- if let Some(NodeOrToken::Node(error_node)) =
- syntax::algo::non_trivia_sibling(top.clone().into(), syntax::Direction::Prev)
- {
- if error_node.kind() == SyntaxKind::ERROR {
- qualifier_ctx.unsafe_tok = error_node
- .children_with_tokens()
- .filter_map(NodeOrToken::into_token)
- .find(|it| it.kind() == T![unsafe]);
- qualifier_ctx.vis_node =
- error_node.children().find_map(ast::Visibility::cast);
+ }
+ } else if let Some(segment) = path.segment() {
+ if segment.coloncolon_token().is_some() {
+ path_ctx.qualified = Qualified::Absolute;
+ }
+ }
+
+ let mut qualifier_ctx = QualifierCtx::default();
+ if path_ctx.is_trivial_path() {
+ // fetch the full expression that may have qualifiers attached to it
+ let top_node = match path_ctx.kind {
+ PathKind::Expr { expr_ctx: ExprCtx { in_block_expr: true, .. } } => {
+ parent.ancestors().find(|it| ast::PathExpr::can_cast(it.kind())).and_then(|p| {
+ let parent = p.parent()?;
+ if ast::StmtList::can_cast(parent.kind()) {
+ Some(p)
+ } else if ast::ExprStmt::can_cast(parent.kind()) {
+ Some(parent)
+ } else {
+ None
}
+ })
+ }
+ PathKind::Item { .. } => {
+ parent.ancestors().find(|it| ast::MacroCall::can_cast(it.kind()))
+ }
+ _ => None,
+ };
+ if let Some(top) = top_node {
+ if let Some(NodeOrToken::Node(error_node)) =
+ syntax::algo::non_trivia_sibling(top.clone().into(), syntax::Direction::Prev)
+ {
+ if error_node.kind() == SyntaxKind::ERROR {
+ qualifier_ctx.unsafe_tok = error_node
+ .children_with_tokens()
+ .filter_map(NodeOrToken::into_token)
+ .find(|it| it.kind() == T![unsafe]);
+ qualifier_ctx.vis_node = error_node.children().find_map(ast::Visibility::cast);
}
+ }
- if let PathKind::Item { .. } = path_ctx.kind {
- if qualifier_ctx.none() {
- if let Some(t) = top.first_token() {
- if let Some(prev) = t
- .prev_token()
- .and_then(|t| syntax::algo::skip_trivia_token(t, Direction::Prev))
- {
- if ![T![;], T!['}'], T!['{']].contains(&prev.kind()) {
- // This was inferred to be an item position path, but it seems
- // to be part of some other broken node which leaked into an item
- // list
- return None;
- }
+ if let PathKind::Item { .. } = path_ctx.kind {
+ if qualifier_ctx.none() {
+ if let Some(t) = top.first_token() {
+ if let Some(prev) = t
+ .prev_token()
+ .and_then(|t| syntax::algo::skip_trivia_token(t, Direction::Prev))
+ {
+ if ![T![;], T!['}'], T!['{']].contains(&prev.kind()) {
+ // This was inferred to be an item position path, but it seems
+ // to be part of some other broken node which leaked into an item
+ // list
+ return None;
}
}
}
}
}
}
- Some((NameRefContext { nameref, kind: NameRefKind::Path(path_ctx) }, qualifier_ctx))
}
+ Some((NameRefContext { nameref, kind: NameRefKind::Path(path_ctx) }, qualifier_ctx))
}
fn pattern_context_for(
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
index ae1a440d0..9d0044e55 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
@@ -183,6 +183,7 @@ pub fn completions(
CompletionAnalysis::String { original, expanded: Some(expanded) } => {
completions::extern_abi::complete_extern_abi(acc, ctx, expanded);
completions::format_string::format_string(acc, ctx, original, expanded);
+ completions::env_vars::complete_cargo_env_vars(acc, ctx, expanded);
}
CompletionAnalysis::UnexpandedAttrTT {
colon_prefix,
@@ -234,7 +235,12 @@ pub fn resolve_completion_edits(
);
let import = items_with_name
.filter_map(|candidate| {
- current_module.find_use_path_prefixed(db, candidate, config.insert_use.prefix_kind)
+ current_module.find_use_path_prefixed(
+ db,
+ candidate,
+ config.insert_use.prefix_kind,
+ config.prefer_no_std,
+ )
})
.find(|mod_path| mod_path.to_string() == full_import_path);
if let Some(import_path) = import {
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
index 946134b0f..86302cb06 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
@@ -117,7 +117,7 @@ pub(crate) fn render_field(
) -> CompletionItem {
let is_deprecated = ctx.is_deprecated(field);
let name = field.name(ctx.db());
- let (name, escaped_name) = (name.to_smol_str(), name.escaped().to_smol_str());
+ let (name, escaped_name) = (name.unescaped().to_smol_str(), name.to_smol_str());
let mut item = CompletionItem::new(
SymbolKind::Field,
ctx.source_range(),
@@ -283,8 +283,8 @@ fn render_resolution_path(
let name = local_name.to_smol_str();
let mut item = render_resolution_simple_(ctx, &local_name, import_to_add, resolution);
- if local_name.escaped().is_escaped() {
- item.insert_text(local_name.escaped().to_smol_str());
+ if local_name.is_escaped() {
+ item.insert_text(local_name.to_smol_str());
}
// Add `<>` for generic types
let type_path_no_ty_args = matches!(
@@ -306,7 +306,7 @@ fn render_resolution_path(
item.lookup_by(name.clone())
.label(SmolStr::from_iter([&name, "<…>"]))
.trigger_call_info()
- .insert_snippet(cap, format!("{}<$0>", local_name.escaped()));
+ .insert_snippet(cap, format!("{}<$0>", local_name));
}
}
}
@@ -323,9 +323,7 @@ fn render_resolution_path(
..CompletionRelevance::default()
});
- if let Some(ref_match) = compute_ref_match(completion, &ty) {
- item.ref_match(ref_match, path_ctx.path.syntax().text_range().start());
- }
+ path_ref_match(completion, path_ctx, &ty, &mut item);
};
item
}
@@ -342,7 +340,8 @@ fn render_resolution_simple_(
let ctx = ctx.import_to_add(import_to_add);
let kind = res_to_kind(resolution);
- let mut item = CompletionItem::new(kind, ctx.source_range(), local_name.to_smol_str());
+ let mut item =
+ CompletionItem::new(kind, ctx.source_range(), local_name.unescaped().to_smol_str());
item.set_relevance(ctx.completion_relevance())
.set_documentation(scope_def_docs(db, resolution))
.set_deprecated(scope_def_is_deprecated(&ctx, resolution));
@@ -452,6 +451,29 @@ fn compute_ref_match(
None
}
+fn path_ref_match(
+ completion: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ ty: &hir::Type,
+ item: &mut Builder,
+) {
+ if let Some(original_path) = &path_ctx.original_path {
+ // At least one char was typed by the user already, in that case look for the original path
+ if let Some(original_path) = completion.sema.original_ast_node(original_path.clone()) {
+ if let Some(ref_match) = compute_ref_match(completion, ty) {
+ item.ref_match(ref_match, original_path.syntax().text_range().start());
+ }
+ }
+ } else {
+ // completion requested on an empty identifier, there is no path here yet.
+ // FIXME: This might create inconsistent completions where we show a ref match in macro inputs
+ // as long as nothing was typed yet
+ if let Some(ref_match) = compute_ref_match(completion, ty) {
+ item.ref_match(ref_match, completion.position.offset);
+ }
+ }
+}
+
#[cfg(test)]
mod tests {
use std::cmp;
@@ -564,6 +586,7 @@ fn main() { Foo::Fo$0 }
kind: SymbolKind(
Variant,
),
+ lookup: "Foo{}",
detail: "Foo { x: i32, y: i32 }",
},
]
@@ -590,6 +613,7 @@ fn main() { Foo::Fo$0 }
kind: SymbolKind(
Variant,
),
+ lookup: "Foo()",
detail: "Foo(i32, i32)",
},
]
@@ -706,7 +730,7 @@ fn main() { let _: m::Spam = S$0 }
kind: SymbolKind(
Variant,
),
- lookup: "Spam::Bar(…)",
+ lookup: "Spam::Bar()",
detail: "m::Spam::Bar(i32)",
relevance: CompletionRelevance {
exact_name_match: false,
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs
index a810eef18..93ea825e0 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs
@@ -13,7 +13,7 @@ pub(crate) fn render_const(ctx: RenderContext<'_>, const_: hir::Const) -> Option
fn render(ctx: RenderContext<'_>, const_: hir::Const) -> Option<CompletionItem> {
let db = ctx.db();
let name = const_.name(db)?;
- let (name, escaped_name) = (name.to_smol_str(), name.escaped().to_smol_str());
+ let (name, escaped_name) = (name.unescaped().to_smol_str(), name.to_smol_str());
let detail = const_.display(db).to_string();
let mut item = CompletionItem::new(SymbolKind::Const, ctx.source_range(), name.clone());
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs
index 4b5535718..376120846 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs
@@ -52,10 +52,10 @@ fn render(
let (call, escaped_call) = match &func_kind {
FuncKind::Method(_, Some(receiver)) => (
- format!("{}.{}", receiver, &name).into(),
- format!("{}.{}", receiver.escaped(), name.escaped()).into(),
+ format!("{}.{}", receiver.unescaped(), name.unescaped()).into(),
+ format!("{}.{}", receiver, name).into(),
),
- _ => (name.to_smol_str(), name.escaped().to_smol_str()),
+ _ => (name.unescaped().to_smol_str(), name.to_smol_str()),
};
let mut item = CompletionItem::new(
if func.self_param(db).is_some() {
@@ -79,24 +79,24 @@ fn render(
..ctx.completion_relevance()
});
- if let Some(ref_match) = compute_ref_match(completion, &ret_type) {
- match func_kind {
- FuncKind::Function(path_ctx) => {
- item.ref_match(ref_match, path_ctx.path.syntax().text_range().start());
- }
- FuncKind::Method(DotAccess { receiver: Some(receiver), .. }, _) => {
- if let Some(original_expr) = completion.sema.original_ast_node(receiver.clone()) {
+ match func_kind {
+ FuncKind::Function(path_ctx) => {
+ super::path_ref_match(completion, path_ctx, &ret_type, &mut item);
+ }
+ FuncKind::Method(DotAccess { receiver: Some(receiver), .. }, _) => {
+ if let Some(original_expr) = completion.sema.original_ast_node(receiver.clone()) {
+ if let Some(ref_match) = compute_ref_match(completion, &ret_type) {
item.ref_match(ref_match, original_expr.syntax().text_range().start());
}
}
- _ => (),
}
+ _ => (),
}
item.set_documentation(ctx.docs(func))
.set_deprecated(ctx.is_deprecated(func) || ctx.is_deprecated_assoc_item(func))
.detail(detail(db, func))
- .lookup_by(name.to_smol_str());
+ .lookup_by(name.unescaped().to_smol_str());
match ctx.completion.config.snippet_cap {
Some(cap) => {
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs
index 91a253f8f..0c791ac57 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs
@@ -2,16 +2,15 @@
use hir::{db::HirDatabase, Documentation, HasAttrs, StructKind};
use ide_db::SymbolKind;
-use syntax::AstNode;
use crate::{
context::{CompletionContext, PathCompletionCtx, PathKind},
item::{Builder, CompletionItem},
render::{
- compute_ref_match, compute_type_match,
+ compute_type_match,
variant::{
- format_literal_label, render_record_lit, render_tuple_lit, visible_fields,
- RenderedLiteral,
+ format_literal_label, format_literal_lookup, render_record_lit, render_tuple_lit,
+ visible_fields, RenderedLiteral,
},
RenderContext,
},
@@ -73,7 +72,7 @@ fn render(
None => (name.clone().into(), name.into(), false),
};
let (qualified_name, escaped_qualified_name) =
- (qualified_name.to_string(), qualified_name.escaped().to_string());
+ (qualified_name.unescaped().to_string(), qualified_name.to_string());
let snippet_cap = ctx.snippet_cap();
let mut rendered = match kind {
@@ -97,13 +96,20 @@ fn render(
if !should_add_parens {
kind = StructKind::Unit;
}
+ let label = format_literal_label(&qualified_name, kind);
+ let lookup = if qualified {
+ format_literal_lookup(&short_qualified_name.to_string(), kind)
+ } else {
+ format_literal_lookup(&qualified_name, kind)
+ };
let mut item = CompletionItem::new(
CompletionItemKind::SymbolKind(thing.symbol_kind()),
ctx.source_range(),
- format_literal_label(&qualified_name, kind),
+ label,
);
+ item.lookup_by(lookup);
item.detail(rendered.detail);
match snippet_cap {
@@ -111,9 +117,6 @@ fn render(
None => item.insert_text(rendered.literal),
};
- if qualified {
- item.lookup_by(format_literal_label(&short_qualified_name.to_string(), kind));
- }
item.set_documentation(thing.docs(db)).set_deprecated(thing.is_deprecated(&ctx));
let ty = thing.ty(db);
@@ -121,9 +124,8 @@ fn render(
type_match: compute_type_match(ctx.completion, &ty),
..ctx.completion_relevance()
});
- if let Some(ref_match) = compute_ref_match(completion, &ty) {
- item.ref_match(ref_match, path_ctx.path.syntax().text_range().start());
- }
+
+ super::path_ref_match(completion, path_ctx, &ty, &mut item);
if let Some(import_to_add) = ctx.import_to_add {
item.add_import(import_to_add);
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs
index ca2269f13..eabd0bd17 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs
@@ -46,7 +46,7 @@ fn render(
ctx.source_range()
};
- let (name, escaped_name) = (name.to_smol_str(), name.escaped().to_smol_str());
+ let (name, escaped_name) = (name.unescaped().to_smol_str(), name.to_smol_str());
let docs = ctx.docs(macro_);
let docs_str = docs.as_ref().map(Documentation::as_str).unwrap_or_default();
let is_fn_like = macro_.is_fn_like(completion.db);
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs
index 34a384f2f..c845ff21a 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs
@@ -8,7 +8,7 @@ use syntax::SmolStr;
use crate::{
context::{ParamContext, ParamKind, PathCompletionCtx, PatternContext},
render::{
- variant::{format_literal_label, visible_fields},
+ variant::{format_literal_label, format_literal_lookup, visible_fields},
RenderContext,
},
CompletionItem, CompletionItemKind,
@@ -31,12 +31,13 @@ pub(crate) fn render_struct_pat(
}
let name = local_name.unwrap_or_else(|| strukt.name(ctx.db()));
- let (name, escaped_name) = (name.to_smol_str(), name.escaped().to_smol_str());
+ let (name, escaped_name) = (name.unescaped().to_smol_str(), name.to_smol_str());
let kind = strukt.kind(ctx.db());
let label = format_literal_label(name.as_str(), kind);
+ let lookup = format_literal_lookup(name.as_str(), kind);
let pat = render_pat(&ctx, pattern_ctx, &escaped_name, kind, &visible_fields, fields_omitted)?;
- Some(build_completion(ctx, label, pat, strukt))
+ Some(build_completion(ctx, label, lookup, pat, strukt))
}
pub(crate) fn render_variant_pat(
@@ -53,18 +54,21 @@ pub(crate) fn render_variant_pat(
let (visible_fields, fields_omitted) = visible_fields(ctx.completion, &fields, variant)?;
let (name, escaped_name) = match path {
- Some(path) => (path.to_string().into(), path.escaped().to_string().into()),
+ Some(path) => (path.unescaped().to_string().into(), path.to_string().into()),
None => {
let name = local_name.unwrap_or_else(|| variant.name(ctx.db()));
- (name.to_smol_str(), name.escaped().to_smol_str())
+ (name.unescaped().to_smol_str(), name.to_smol_str())
}
};
- let (label, pat) = match path_ctx {
- Some(PathCompletionCtx { has_call_parens: true, .. }) => (name, escaped_name.to_string()),
+ let (label, lookup, pat) = match path_ctx {
+ Some(PathCompletionCtx { has_call_parens: true, .. }) => {
+ (name.clone(), name, escaped_name.to_string())
+ }
_ => {
let kind = variant.kind(ctx.db());
let label = format_literal_label(name.as_str(), kind);
+ let lookup = format_literal_lookup(name.as_str(), kind);
let pat = render_pat(
&ctx,
pattern_ctx,
@@ -73,16 +77,17 @@ pub(crate) fn render_variant_pat(
&visible_fields,
fields_omitted,
)?;
- (label, pat)
+ (label, lookup, pat)
}
};
- Some(build_completion(ctx, label, pat, variant))
+ Some(build_completion(ctx, label, lookup, pat, variant))
}
fn build_completion(
ctx: RenderContext<'_>,
label: SmolStr,
+ lookup: SmolStr,
pat: String,
def: impl HasAttrs + Copy,
) -> CompletionItem {
@@ -90,6 +95,7 @@ fn build_completion(
item.set_documentation(ctx.docs(def))
.set_deprecated(ctx.is_deprecated(def))
.detail(&pat)
+ .lookup_by(lookup)
.set_relevance(ctx.completion_relevance());
match ctx.snippet_cap() {
Some(snippet_cap) => item.insert_snippet(snippet_cap, pat),
@@ -146,7 +152,7 @@ fn render_record_as_pat(
format!(
"{name} {{ {}{} }}",
fields.enumerate().format_with(", ", |(idx, field), f| {
- f(&format_args!("{}${}", field.name(db).escaped(), idx + 1))
+ f(&format_args!("{}${}", field.name(db), idx + 1))
}),
if fields_omitted { ", .." } else { "" },
name = name
@@ -155,7 +161,7 @@ fn render_record_as_pat(
None => {
format!(
"{name} {{ {}{} }}",
- fields.map(|field| field.name(db).escaped().to_smol_str()).format(", "),
+ fields.map(|field| field.name(db).to_smol_str()).format(", "),
if fields_omitted { ", .." } else { "" },
name = name
)
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs
index f1b23c76e..de919429f 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs
@@ -32,11 +32,11 @@ fn render(
let name = type_alias.name(db);
let (name, escaped_name) = if with_eq {
(
+ SmolStr::from_iter([&name.unescaped().to_smol_str(), " = "]),
SmolStr::from_iter([&name.to_smol_str(), " = "]),
- SmolStr::from_iter([&name.escaped().to_smol_str(), " = "]),
)
} else {
- (name.to_smol_str(), name.escaped().to_smol_str())
+ (name.unescaped().to_smol_str(), name.to_smol_str())
};
let detail = type_alias.display(db).to_string();
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs
index 9c9540a9b..54e97dd57 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs
@@ -6,7 +6,7 @@ use itertools::Itertools;
use crate::{
render::{
- variant::{format_literal_label, visible_fields},
+ variant::{format_literal_label, format_literal_lookup, visible_fields},
RenderContext,
},
CompletionItem, CompletionItemKind,
@@ -21,16 +21,19 @@ pub(crate) fn render_union_literal(
let name = local_name.unwrap_or_else(|| un.name(ctx.db()));
let (qualified_name, escaped_qualified_name) = match path {
- Some(p) => (p.to_string(), p.escaped().to_string()),
- None => (name.to_string(), name.escaped().to_string()),
+ Some(p) => (p.unescaped().to_string(), p.to_string()),
+ None => (name.unescaped().to_string(), name.to_string()),
};
-
+ let label = format_literal_label(&name.to_smol_str(), StructKind::Record);
+ let lookup = format_literal_lookup(&name.to_smol_str(), StructKind::Record);
let mut item = CompletionItem::new(
CompletionItemKind::SymbolKind(SymbolKind::Union),
ctx.source_range(),
- format_literal_label(&name.to_smol_str(), StructKind::Record),
+ label,
);
+ item.lookup_by(lookup);
+
let fields = un.fields(ctx.db());
let (fields, fields_omitted) = visible_fields(ctx.completion, &fields, un)?;
@@ -42,15 +45,15 @@ pub(crate) fn render_union_literal(
format!(
"{} {{ ${{1|{}|}}: ${{2:()}} }}$0",
escaped_qualified_name,
- fields.iter().map(|field| field.name(ctx.db()).escaped().to_smol_str()).format(",")
+ fields.iter().map(|field| field.name(ctx.db()).to_smol_str()).format(",")
)
} else {
format!(
"{} {{ {} }}",
escaped_qualified_name,
- fields.iter().format_with(", ", |field, f| {
- f(&format_args!("{}: ()", field.name(ctx.db()).escaped()))
- })
+ fields
+ .iter()
+ .format_with(", ", |field, f| { f(&format_args!("{}: ()", field.name(ctx.db()))) })
)
};
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs
index 003a0c11e..24e6abdc9 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs
@@ -24,9 +24,9 @@ pub(crate) fn render_record_lit(
) -> RenderedLiteral {
let completions = fields.iter().enumerate().format_with(", ", |(idx, field), f| {
if snippet_cap.is_some() {
- f(&format_args!("{}: ${{{}:()}}", field.name(db).escaped(), idx + 1))
+ f(&format_args!("{}: ${{{}:()}}", field.name(db), idx + 1))
} else {
- f(&format_args!("{}: ()", field.name(db).escaped()))
+ f(&format_args!("{}: ()", field.name(db)))
}
});
@@ -94,3 +94,12 @@ pub(crate) fn format_literal_label(name: &str, kind: StructKind) -> SmolStr {
StructKind::Unit => name.into(),
}
}
+
+/// Format a struct, etc. literal option for lookup used in completions filtering.
+pub(crate) fn format_literal_lookup(name: &str, kind: StructKind) -> SmolStr {
+ match kind {
+ StructKind::Tuple => SmolStr::from_iter([name, "()"]),
+ StructKind::Record => SmolStr::from_iter([name, "{}"]),
+ StructKind::Unit => name.into(),
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs b/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs
index dc1039fa6..f3b8eae4f 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs
@@ -174,8 +174,12 @@ fn import_edits(ctx: &CompletionContext<'_>, requires: &[GreenNode]) -> Option<V
hir::PathResolution::Def(def) => def.into(),
_ => return None,
};
- let path =
- ctx.module.find_use_path_prefixed(ctx.db, item, ctx.config.insert_use.prefix_kind)?;
+ let path = ctx.module.find_use_path_prefixed(
+ ctx.db,
+ item,
+ ctx.config.insert_use.prefix_kind,
+ ctx.config.prefer_no_std,
+ )?;
Some((path.len() > 1).then(|| LocatedImport::new(path.clone(), item, item, None)))
};
let mut res = Vec::with_capacity(requires.len());
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
index cf826648d..9e2beb9ee 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
@@ -66,6 +66,7 @@ pub(crate) const TEST_CONFIG: CompletionConfig = CompletionConfig {
enable_private_editable: false,
callable: Some(CallableSnippets::FillArguments),
snippet_cap: SnippetCap::new(true),
+ prefer_no_std: false,
insert_use: InsertUseConfig {
granularity: ImportGranularity::Crate,
prefix_kind: PrefixKind::Plain,
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
index 925081ebf..8e26d889f 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
@@ -1,7 +1,7 @@
//! Completion tests for expressions.
use expect_test::{expect, Expect};
-use crate::tests::{completion_list, BASE_ITEMS_FIXTURE};
+use crate::tests::{check_edit, completion_list, BASE_ITEMS_FIXTURE};
fn check(ra_fixture: &str, expect: Expect) {
let actual = completion_list(&format!("{}{}", BASE_ITEMS_FIXTURE, ra_fixture));
@@ -670,3 +670,78 @@ fn main() {
"#]],
);
}
+
+#[test]
+fn varaiant_with_struct() {
+ check_empty(
+ r#"
+pub struct YoloVariant {
+ pub f: usize
+}
+
+pub enum HH {
+ Yolo(YoloVariant),
+}
+
+fn brr() {
+ let t = HH::Yolo(Y$0);
+}
+"#,
+ expect![[r#"
+ en HH
+ fn brr() fn()
+ st YoloVariant
+ st YoloVariant {…} YoloVariant { f: usize }
+ bt u32
+ kw crate::
+ kw false
+ kw for
+ kw if
+ kw if let
+ kw loop
+ kw match
+ kw return
+ kw self::
+ kw true
+ kw unsafe
+ kw while
+ kw while let
+ "#]],
+ );
+}
+
+#[test]
+fn return_unit_block() {
+ cov_mark::check!(return_unit_block);
+ check_edit("return", r#"fn f() { if true { $0 } }"#, r#"fn f() { if true { return; } }"#);
+}
+
+#[test]
+fn return_unit_no_block() {
+ cov_mark::check!(return_unit_no_block);
+ check_edit(
+ "return",
+ r#"fn f() { match () { () => $0 } }"#,
+ r#"fn f() { match () { () => return } }"#,
+ );
+}
+
+#[test]
+fn return_value_block() {
+ cov_mark::check!(return_value_block);
+ check_edit(
+ "return",
+ r#"fn f() -> i32 { if true { $0 } }"#,
+ r#"fn f() -> i32 { if true { return $0; } }"#,
+ );
+}
+
+#[test]
+fn return_value_no_block() {
+ cov_mark::check!(return_value_no_block);
+ check_edit(
+ "return",
+ r#"fn f() -> i32 { match () { () => $0 } }"#,
+ r#"fn f() -> i32 { match () { () => return $0 } }"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs
index 0bba7f245..a63ef0068 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs
@@ -159,7 +159,7 @@ pub mod some_module {
pub struct ThiiiiiirdStruct;
// contains all letters from the query, but not in the beginning, displayed second
pub struct AfterThirdStruct;
- // contains all letters from the query in the begginning, displayed first
+ // contains all letters from the query in the beginning, displayed first
pub struct ThirdStruct;
}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs
index 30ddbe2dc..db8bef664 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs
@@ -467,7 +467,7 @@ fn foo() {
fn completes_enum_variant_pat() {
cov_mark::check!(enum_variant_pattern_path);
check_edit(
- "RecordVariant {…}",
+ "RecordVariant{}",
r#"
enum Enum {
RecordVariant { field: u32 }
@@ -714,3 +714,30 @@ impl Ty {
"#]],
);
}
+
+#[test]
+fn through_alias() {
+ check_empty(
+ r#"
+enum Enum<T> {
+ Unit,
+ Tuple(T),
+}
+
+type EnumAlias<T> = Enum<T>;
+
+fn f(x: EnumAlias<u8>) {
+ match x {
+ EnumAlias::$0 => (),
+ _ => (),
+ }
+
+}
+
+"#,
+ expect![[r#"
+ bn Tuple(…) Tuple($1)$0
+ bn Unit Unit$0
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs
index f6accc68e..328faaa06 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs
@@ -103,8 +103,9 @@ fn foo(f: Struct) {
}
#[test]
-fn functional_update() {
- // FIXME: This should filter out all completions that do not have the type `Foo`
+fn in_functional_update() {
+ cov_mark::check!(functional_update);
+
check(
r#"
//- minicore:default
@@ -116,13 +117,21 @@ impl Default for Foo {
fn main() {
let thing = 1;
let foo = Foo { foo1: 0, foo2: 0 };
- let foo2 = Foo { thing, $0 }
+ let foo2 = Foo { thing, ..$0 }
}
"#,
expect![[r#"
fd ..Default::default()
- fd foo1 u32
- fd foo2 u32
+ fn main() fn()
+ lc foo Foo
+ lc thing i32
+ md core
+ st Foo
+ st Foo {…} Foo { foo1: u32, foo2: u32 }
+ tt Default
+ bt u32
+ kw crate::
+ kw self::
"#]],
);
check(
@@ -136,14 +145,19 @@ impl Default for Foo {
fn main() {
let thing = 1;
let foo = Foo { foo1: 0, foo2: 0 };
- let foo2 = Foo { thing, .$0 }
+ let foo2 = Foo { thing, ..Default::$0 }
}
"#,
expect![[r#"
- fd ..Default::default()
- sn ..
+ fn default() (as Default) fn() -> Self
"#]],
);
+}
+
+#[test]
+fn functional_update_no_dot() {
+ cov_mark::check!(functional_update_field);
+ // FIXME: This should filter out all completions that do not have the type `Foo`
check(
r#"
//- minicore:default
@@ -155,23 +169,20 @@ impl Default for Foo {
fn main() {
let thing = 1;
let foo = Foo { foo1: 0, foo2: 0 };
- let foo2 = Foo { thing, ..$0 }
+ let foo2 = Foo { thing, $0 }
}
"#,
expect![[r#"
fd ..Default::default()
- fn main() fn()
- lc foo Foo
- lc thing i32
- md core
- st Foo
- st Foo {…} Foo { foo1: u32, foo2: u32 }
- tt Default
- bt u32
- kw crate::
- kw self::
+ fd foo1 u32
+ fd foo2 u32
"#]],
);
+}
+
+#[test]
+fn functional_update_one_dot() {
+ cov_mark::check!(functional_update_one_dot);
check(
r#"
//- minicore:default
@@ -183,11 +194,12 @@ impl Default for Foo {
fn main() {
let thing = 1;
let foo = Foo { foo1: 0, foo2: 0 };
- let foo2 = Foo { thing, ..Default::$0 }
+ let foo2 = Foo { thing, .$0 }
}
"#,
expect![[r#"
- fn default() (as Default) fn() -> Self
+ fd ..Default::default()
+ sn ..
"#]],
);
}
diff --git a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
index a1b0bd6cb..cf0bcd5c9 100644
--- a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
@@ -15,11 +15,12 @@ tracing = "0.1.35"
rayon = "1.5.3"
fst = { version = "0.4.7", default-features = false }
rustc-hash = "1.1.0"
-once_cell = "1.12.0"
+once_cell = "1.15.0"
either = "1.7.0"
-itertools = "0.10.3"
+itertools = "0.10.5"
arrayvec = "0.7.2"
indexmap = "1.9.1"
+memchr = "2.5.0"
stdx = { path = "../stdx", version = "0.0.0" }
parser = { path = "../parser", version = "0.0.0" }
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs b/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs
index 7303ef8b7..7109c6fd1 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs
@@ -12,7 +12,7 @@ use crate::RootDatabase;
#[derive(Debug)]
pub struct ActiveParameter {
pub ty: Type,
- pub pat: Either<ast::SelfParam, ast::Pat>,
+ pub pat: Option<Either<ast::SelfParam, ast::Pat>>,
}
impl ActiveParameter {
@@ -27,12 +27,12 @@ impl ActiveParameter {
return None;
}
let (pat, ty) = params.swap_remove(idx);
- pat.map(|pat| ActiveParameter { ty, pat })
+ Some(ActiveParameter { ty, pat })
}
pub fn ident(&self) -> Option<ast::Name> {
- self.pat.as_ref().right().and_then(|param| match param {
- ast::Pat::IdentPat(ident) => ident.name(),
+ self.pat.as_ref().and_then(|param| match param {
+ Either::Right(ast::Pat::IdentPat(ident)) => ident.name(),
_ => None,
})
}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
index 98b0e9c94..b1ee9b58d 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
@@ -20,7 +20,7 @@ impl RootDatabase {
pub fn apply_change(&mut self, change: Change) {
let _p = profile::span("RootDatabase::apply_change");
self.request_cancellation();
- tracing::info!("apply_change {:?}", change);
+ tracing::trace!("apply_change {:?}", change);
if let Some(roots) = &change.roots {
let mut local_roots = FxHashSet::default();
let mut library_roots = FxHashSet::default();
@@ -45,7 +45,7 @@ impl RootDatabase {
// |===
// | Editor | Action Name
//
- // | VS Code | **Rust Analyzer: Memory Usage (Clears Database)**
+ // | VS Code | **rust-analyzer: Memory Usage (Clears Database)**
// |===
// image::https://user-images.githubusercontent.com/48062697/113065592-08559f00-91b1-11eb-8c96-64b88068ec02.gif[]
pub fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes)> {
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
index aeaca00ec..6c13c0397 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
@@ -127,10 +127,12 @@ impl Definition {
}
}
+// FIXME: IdentClass as a name no longer fits
#[derive(Debug)]
pub enum IdentClass {
NameClass(NameClass),
NameRefClass(NameRefClass),
+ Operator(OperatorClass),
}
impl IdentClass {
@@ -147,6 +149,11 @@ impl IdentClass {
.map(IdentClass::NameClass)
.or_else(|| NameRefClass::classify_lifetime(sema, &lifetime).map(IdentClass::NameRefClass))
},
+ ast::AwaitExpr(await_expr) => OperatorClass::classify_await(sema, &await_expr).map(IdentClass::Operator),
+ ast::BinExpr(bin_expr) => OperatorClass::classify_bin(sema, &bin_expr).map(IdentClass::Operator),
+ ast::IndexExpr(index_expr) => OperatorClass::classify_index(sema, &index_expr).map(IdentClass::Operator),
+ ast::PrefixExpr(prefix_expr) => OperatorClass::classify_prefix(sema,&prefix_expr).map(IdentClass::Operator),
+ ast::TryExpr(try_expr) => OperatorClass::classify_try(sema,&try_expr).map(IdentClass::Operator),
_ => None,
}
}
@@ -184,6 +191,33 @@ impl IdentClass {
res.push(Definition::Local(local_ref));
res.push(Definition::Field(field_ref));
}
+ IdentClass::Operator(
+ OperatorClass::Await(func)
+ | OperatorClass::Prefix(func)
+ | OperatorClass::Bin(func)
+ | OperatorClass::Index(func)
+ | OperatorClass::Try(func),
+ ) => res.push(Definition::Function(func)),
+ }
+ res
+ }
+
+ pub fn definitions_no_ops(self) -> ArrayVec<Definition, 2> {
+ let mut res = ArrayVec::new();
+ match self {
+ IdentClass::NameClass(NameClass::Definition(it) | NameClass::ConstReference(it)) => {
+ res.push(it)
+ }
+ IdentClass::NameClass(NameClass::PatFieldShorthand { local_def, field_ref }) => {
+ res.push(Definition::Local(local_def));
+ res.push(Definition::Field(field_ref));
+ }
+ IdentClass::NameRefClass(NameRefClass::Definition(it)) => res.push(it),
+ IdentClass::NameRefClass(NameRefClass::FieldShorthand { local_ref, field_ref }) => {
+ res.push(Definition::Local(local_ref));
+ res.push(Definition::Field(field_ref));
+ }
+ IdentClass::Operator(_) => (),
}
res
}
@@ -332,6 +366,52 @@ impl NameClass {
}
}
+#[derive(Debug)]
+pub enum OperatorClass {
+ Await(Function),
+ Prefix(Function),
+ Index(Function),
+ Try(Function),
+ Bin(Function),
+}
+
+impl OperatorClass {
+ pub fn classify_await(
+ sema: &Semantics<'_, RootDatabase>,
+ await_expr: &ast::AwaitExpr,
+ ) -> Option<OperatorClass> {
+ sema.resolve_await_to_poll(await_expr).map(OperatorClass::Await)
+ }
+
+ pub fn classify_prefix(
+ sema: &Semantics<'_, RootDatabase>,
+ prefix_expr: &ast::PrefixExpr,
+ ) -> Option<OperatorClass> {
+ sema.resolve_prefix_expr(prefix_expr).map(OperatorClass::Prefix)
+ }
+
+ pub fn classify_try(
+ sema: &Semantics<'_, RootDatabase>,
+ try_expr: &ast::TryExpr,
+ ) -> Option<OperatorClass> {
+ sema.resolve_try_expr(try_expr).map(OperatorClass::Try)
+ }
+
+ pub fn classify_index(
+ sema: &Semantics<'_, RootDatabase>,
+ index_expr: &ast::IndexExpr,
+ ) -> Option<OperatorClass> {
+ sema.resolve_index_expr(index_expr).map(OperatorClass::Index)
+ }
+
+ pub fn classify_bin(
+ sema: &Semantics<'_, RootDatabase>,
+ bin_expr: &ast::BinExpr,
+ ) -> Option<OperatorClass> {
+ sema.resolve_bin_expr(bin_expr).map(OperatorClass::Bin)
+ }
+}
+
/// This is similar to [`NameClass`], but works for [`ast::NameRef`] rather than
/// for [`ast::Name`]. Similarly, what looks like a reference in syntax is a
/// reference most of the time, but there are a couple of annoying exceptions.
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs
index 26ef86155..40a6a3e89 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs
@@ -212,18 +212,20 @@ impl ImportAssets {
&self,
sema: &Semantics<'_, RootDatabase>,
prefix_kind: PrefixKind,
+ prefer_no_std: bool,
) -> Vec<LocatedImport> {
let _p = profile::span("import_assets::search_for_imports");
- self.search_for(sema, Some(prefix_kind))
+ self.search_for(sema, Some(prefix_kind), prefer_no_std)
}
/// This may return non-absolute paths if a part of the returned path is already imported into scope.
pub fn search_for_relative_paths(
&self,
sema: &Semantics<'_, RootDatabase>,
+ prefer_no_std: bool,
) -> Vec<LocatedImport> {
let _p = profile::span("import_assets::search_for_relative_paths");
- self.search_for(sema, None)
+ self.search_for(sema, None, prefer_no_std)
}
pub fn path_fuzzy_name_to_exact(&mut self, case_sensitive: bool) {
@@ -242,6 +244,7 @@ impl ImportAssets {
&self,
sema: &Semantics<'_, RootDatabase>,
prefixed: Option<PrefixKind>,
+ prefer_no_std: bool,
) -> Vec<LocatedImport> {
let _p = profile::span("import_assets::search_for");
@@ -252,6 +255,7 @@ impl ImportAssets {
item_for_path_search(sema.db, item)?,
&self.module_with_candidate,
prefixed,
+ prefer_no_std,
)
};
@@ -564,11 +568,12 @@ fn get_mod_path(
item_to_search: ItemInNs,
module_with_candidate: &Module,
prefixed: Option<PrefixKind>,
+ prefer_no_std: bool,
) -> Option<ModPath> {
if let Some(prefix_kind) = prefixed {
- module_with_candidate.find_use_path_prefixed(db, item_to_search, prefix_kind)
+ module_with_candidate.find_use_path_prefixed(db, item_to_search, prefix_kind, prefer_no_std)
} else {
- module_with_candidate.find_use_path(db, item_to_search)
+ module_with_candidate.find_use_path(db, item_to_search, prefer_no_std)
}
}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs
index c14182279..9be1d3663 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs
@@ -7,7 +7,10 @@ use std::cmp::Ordering;
use hir::Semantics;
use syntax::{
algo,
- ast::{self, make, AstNode, HasAttrs, HasModuleItem, HasVisibility, PathSegmentKind},
+ ast::{
+ self, edit_in_place::Removable, make, AstNode, HasAttrs, HasModuleItem, HasVisibility,
+ PathSegmentKind,
+ },
ted, Direction, NodeOrToken, SyntaxKind, SyntaxNode,
};
@@ -192,20 +195,24 @@ pub fn insert_use(scope: &ImportScope, path: ast::Path, cfg: &InsertUseConfig) {
insert_use_(scope, &path, cfg.group, use_item);
}
-pub fn remove_path_if_in_use_stmt(path: &ast::Path) {
+pub fn ast_to_remove_for_path_in_use_stmt(path: &ast::Path) -> Option<Box<dyn Removable>> {
// FIXME: improve this
if path.parent_path().is_some() {
- return;
+ return None;
}
- if let Some(use_tree) = path.syntax().parent().and_then(ast::UseTree::cast) {
- if use_tree.use_tree_list().is_some() || use_tree.star_token().is_some() {
- return;
- }
- if let Some(use_) = use_tree.syntax().parent().and_then(ast::Use::cast) {
- use_.remove();
- return;
- }
- use_tree.remove();
+ let use_tree = path.syntax().parent().and_then(ast::UseTree::cast)?;
+ if use_tree.use_tree_list().is_some() || use_tree.star_token().is_some() {
+ return None;
+ }
+ if let Some(use_) = use_tree.syntax().parent().and_then(ast::Use::cast) {
+ return Some(Box::new(use_));
+ }
+ Some(Box::new(use_tree))
+}
+
+pub fn remove_path_if_in_use_stmt(path: &ast::Path) {
+ if let Some(node) = ast_to_remove_for_path_in_use_stmt(path) {
+ node.remove();
}
}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs
index 7fb4b90e6..371d642c1 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs
@@ -225,7 +225,7 @@ fn path_cmp_short(a: &ast::Path, b: &ast::Path) -> Ordering {
}
/// Compares two paths, if one ends earlier than the other the has_tl parameters decide which is
-/// greater as a a path that has a tree list should be greater, while one that just ends without
+/// greater as a path that has a tree list should be greater, while one that just ends without
/// a tree list should be considered less.
pub(super) fn use_tree_path_cmp(
a: &ast::Path,
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
index 966bba616..e0bc0f89f 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
@@ -38,6 +38,7 @@ pub mod syntax_helpers {
pub mod node_ext;
pub mod insert_whitespace_into_node;
pub mod format_string;
+ pub mod format_string_exprs;
pub use parser::LexedStr;
}
@@ -52,6 +53,7 @@ use hir::{
db::{AstDatabase, DefDatabase, HirDatabase},
symbols::FileSymbolKind,
};
+use stdx::hash::NoHashHashSet;
use crate::{line_index::LineIndex, symbol_index::SymbolsDatabase};
pub use rustc_hash::{FxHashMap, FxHashSet, FxHasher};
@@ -118,7 +120,7 @@ impl FileLoader for RootDatabase {
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
FileLoaderDelegate(self).resolve_path(path)
}
- fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
+ fn relevant_crates(&self, file_id: FileId) -> Arc<NoHashHashSet<CrateId>> {
FileLoaderDelegate(self).relevant_crates(file_id)
}
}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/line_index.rs b/src/tools/rust-analyzer/crates/ide-db/src/line_index.rs
index 68ad07ee8..75d49ff2f 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/line_index.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/line_index.rs
@@ -2,7 +2,7 @@
//! representation.
use std::{iter, mem};
-use rustc_hash::FxHashMap;
+use stdx::hash::NoHashHashMap;
use syntax::{TextRange, TextSize};
#[derive(Clone, Debug, PartialEq, Eq)]
@@ -10,7 +10,7 @@ pub struct LineIndex {
/// Offset the the beginning of each line, zero-based
pub(crate) newlines: Vec<TextSize>,
/// List of non-ASCII characters on each line
- pub(crate) utf16_lines: FxHashMap<u32, Vec<Utf16Char>>,
+ pub(crate) utf16_lines: NoHashHashMap<u32, Vec<Utf16Char>>,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
@@ -55,7 +55,7 @@ impl Utf16Char {
impl LineIndex {
pub fn new(text: &str) -> LineIndex {
- let mut utf16_lines = FxHashMap::default();
+ let mut utf16_lines = NoHashHashMap::default();
let mut utf16_chars = Vec::new();
let mut newlines = vec![0.into()];
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs
index 40af9e6fe..12d873b4a 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs
@@ -173,6 +173,7 @@ impl<'a> Ctx<'a> {
let found_path = self.target_module.find_use_path(
self.source_scope.db.upcast(),
hir::ModuleDef::Trait(trait_ref),
+ false,
)?;
match ast::make::ty_path(mod_path_to_ast(&found_path)) {
ast::Type::PathType(path_ty) => Some(path_ty),
@@ -209,7 +210,7 @@ impl<'a> Ctx<'a> {
}
let found_path =
- self.target_module.find_use_path(self.source_scope.db.upcast(), def)?;
+ self.target_module.find_use_path(self.source_scope.db.upcast(), def, false)?;
let res = mod_path_to_ast(&found_path).clone_for_update();
if let Some(args) = path.segment().and_then(|it| it.generic_arg_list()) {
if let Some(segment) = res.segment() {
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
index 517fe3f24..49b81265e 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
@@ -82,7 +82,7 @@ impl Definition {
}
/// Textual range of the identifier which will change when renaming this
- /// `Definition`. Note that some definitions, like buitin types, can't be
+ /// `Definition`. Note that some definitions, like builtin types, can't be
/// renamed.
pub fn range_for_rename(self, sema: &Semantics<'_, RootDatabase>) -> Option<FileRange> {
let res = match self {
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
index bd038cdaa..82b85f2fa 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
@@ -4,12 +4,14 @@
//! get a super-set of matches. Then, we we confirm each match using precise
//! name resolution.
-use std::{convert::TryInto, mem, sync::Arc};
+use std::{mem, sync::Arc};
use base_db::{FileId, FileRange, SourceDatabase, SourceDatabaseExt};
use hir::{DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility};
+use memchr::memmem::Finder;
use once_cell::unsync::Lazy;
-use rustc_hash::FxHashMap;
+use parser::SyntaxKind;
+use stdx::hash::NoHashHashMap;
use syntax::{ast, match_ast, AstNode, TextRange, TextSize};
use crate::{
@@ -20,7 +22,7 @@ use crate::{
#[derive(Debug, Default, Clone)]
pub struct UsageSearchResult {
- pub references: FxHashMap<FileId, Vec<FileReference>>,
+ pub references: NoHashHashMap<FileId, Vec<FileReference>>,
}
impl UsageSearchResult {
@@ -45,7 +47,7 @@ impl UsageSearchResult {
impl IntoIterator for UsageSearchResult {
type Item = (FileId, Vec<FileReference>);
- type IntoIter = <FxHashMap<FileId, Vec<FileReference>> as IntoIterator>::IntoIter;
+ type IntoIter = <NoHashHashMap<FileId, Vec<FileReference>> as IntoIterator>::IntoIter;
fn into_iter(self) -> Self::IntoIter {
self.references.into_iter()
@@ -67,6 +69,7 @@ pub enum ReferenceCategory {
// Create
Write,
Read,
+ Import,
// FIXME: Some day should be able to search in doc comments. Would probably
// need to switch from enum to bitflags then?
// DocComment
@@ -78,17 +81,17 @@ pub enum ReferenceCategory {
/// e.g. for things like local variables.
#[derive(Clone, Debug)]
pub struct SearchScope {
- entries: FxHashMap<FileId, Option<TextRange>>,
+ entries: NoHashHashMap<FileId, Option<TextRange>>,
}
impl SearchScope {
- fn new(entries: FxHashMap<FileId, Option<TextRange>>) -> SearchScope {
+ fn new(entries: NoHashHashMap<FileId, Option<TextRange>>) -> SearchScope {
SearchScope { entries }
}
/// Build a search scope spanning the entire crate graph of files.
fn crate_graph(db: &RootDatabase) -> SearchScope {
- let mut entries = FxHashMap::default();
+ let mut entries = NoHashHashMap::default();
let graph = db.crate_graph();
for krate in graph.iter() {
@@ -102,7 +105,7 @@ impl SearchScope {
/// Build a search scope spanning all the reverse dependencies of the given crate.
fn reverse_dependencies(db: &RootDatabase, of: hir::Crate) -> SearchScope {
- let mut entries = FxHashMap::default();
+ let mut entries = NoHashHashMap::default();
for rev_dep in of.transitive_reverse_dependencies(db) {
let root_file = rev_dep.root_file(db);
let source_root_id = db.file_source_root(root_file);
@@ -117,14 +120,12 @@ impl SearchScope {
let root_file = of.root_file(db);
let source_root_id = db.file_source_root(root_file);
let source_root = db.source_root(source_root_id);
- SearchScope {
- entries: source_root.iter().map(|id| (id, None)).collect::<FxHashMap<_, _>>(),
- }
+ SearchScope { entries: source_root.iter().map(|id| (id, None)).collect() }
}
/// Build a search scope spanning the given module and all its submodules.
fn module_and_children(db: &RootDatabase, module: hir::Module) -> SearchScope {
- let mut entries = FxHashMap::default();
+ let mut entries = NoHashHashMap::default();
let (file_id, range) = {
let InFile { file_id, value } = module.definition_source(db);
@@ -157,7 +158,7 @@ impl SearchScope {
/// Build an empty search scope.
pub fn empty() -> SearchScope {
- SearchScope::new(FxHashMap::default())
+ SearchScope::new(NoHashHashMap::default())
}
/// Build a empty search scope spanning the given file.
@@ -238,6 +239,7 @@ impl Definition {
DefWithBody::Function(f) => f.source(db).map(|src| src.syntax().cloned()),
DefWithBody::Const(c) => c.source(db).map(|src| src.syntax().cloned()),
DefWithBody::Static(s) => s.source(db).map(|src| src.syntax().cloned()),
+ DefWithBody::Variant(v) => v.source(db).map(|src| src.syntax().cloned()),
};
return match def {
Some(def) => SearchScope::file_range(def.as_ref().original_file_range(db)),
@@ -402,21 +404,26 @@ impl<'a> FindUsages<'a> {
.or_else(|| ty.as_builtin().map(|builtin| builtin.name()))
})
};
- self.def.name(sema.db).or_else(self_kw_refs).map(|it| it.to_smol_str())
+ // We need to unescape the name in case it is written without "r#" in earlier
+ // editions of Rust where it isn't a keyword.
+ self.def.name(sema.db).or_else(self_kw_refs).map(|it| it.unescaped().to_smol_str())
}
};
let name = match &name {
Some(s) => s.as_str(),
None => return,
};
+ let finder = &Finder::new(name);
+ let include_self_kw_refs =
+ self.include_self_kw_refs.as_ref().map(|ty| (ty, Finder::new("Self")));
- // these can't be closures because rust infers the lifetimes wrong ...
+ // for<'a> |text: &'a str, name: &'a str, search_range: TextRange| -> impl Iterator<Item = TextSize> + 'a { ... }
fn match_indices<'a>(
text: &'a str,
- name: &'a str,
+ finder: &'a Finder<'a>,
search_range: TextRange,
) -> impl Iterator<Item = TextSize> + 'a {
- text.match_indices(name).filter_map(move |(idx, _)| {
+ finder.find_iter(text.as_bytes()).filter_map(move |idx| {
let offset: TextSize = idx.try_into().unwrap();
if !search_range.contains_inclusive(offset) {
return None;
@@ -425,6 +432,7 @@ impl<'a> FindUsages<'a> {
})
}
+ // for<'a> |scope: &'a SearchScope| -> impl Iterator<Item = (Arc<String>, FileId, TextRange)> + 'a { ... }
fn scope_files<'a>(
sema: &'a Semantics<'_, RootDatabase>,
scope: &'a SearchScope,
@@ -448,7 +456,7 @@ impl<'a> FindUsages<'a> {
let tree = Lazy::new(move || sema.parse(file_id).syntax().clone());
// Search for occurrences of the items name
- for offset in match_indices(&text, name, search_range) {
+ for offset in match_indices(&text, finder, search_range) {
for name in sema.find_nodes_at_offset_with_descend(&tree, offset) {
if match name {
ast::NameLike::NameRef(name_ref) => self.found_name_ref(&name_ref, sink),
@@ -460,8 +468,8 @@ impl<'a> FindUsages<'a> {
}
}
// Search for occurrences of the `Self` referring to our type
- if let Some(self_ty) = &self.include_self_kw_refs {
- for offset in match_indices(&text, "Self", search_range) {
+ if let Some((self_ty, finder)) = &include_self_kw_refs {
+ for offset in match_indices(&text, finder, search_range) {
for name_ref in sema.find_nodes_at_offset_with_descend(&tree, offset) {
if self.found_self_ty_name_ref(self_ty, &name_ref, sink) {
return;
@@ -477,20 +485,22 @@ impl<'a> FindUsages<'a> {
let scope = search_scope
.intersection(&SearchScope::module_and_children(self.sema.db, module));
- let is_crate_root = module.is_crate_root(self.sema.db);
+ let is_crate_root =
+ module.is_crate_root(self.sema.db).then(|| Finder::new("crate"));
+ let finder = &Finder::new("super");
for (text, file_id, search_range) in scope_files(sema, &scope) {
let tree = Lazy::new(move || sema.parse(file_id).syntax().clone());
- for offset in match_indices(&text, "super", search_range) {
+ for offset in match_indices(&text, finder, search_range) {
for name_ref in sema.find_nodes_at_offset_with_descend(&tree, offset) {
if self.found_name_ref(&name_ref, sink) {
return;
}
}
}
- if is_crate_root {
- for offset in match_indices(&text, "crate", search_range) {
+ if let Some(finder) = &is_crate_root {
+ for offset in match_indices(&text, finder, search_range) {
for name_ref in sema.find_nodes_at_offset_with_descend(&tree, offset) {
if self.found_name_ref(&name_ref, sink) {
return;
@@ -531,8 +541,9 @@ impl<'a> FindUsages<'a> {
search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(text.as_str())));
let tree = Lazy::new(|| sema.parse(file_id).syntax().clone());
+ let finder = &Finder::new("self");
- for offset in match_indices(&text, "self", search_range) {
+ for offset in match_indices(&text, finder, search_range) {
for name_ref in sema.find_nodes_at_offset_with_descend(&tree, offset) {
if self.found_self_module_name_ref(&name_ref, sink) {
return;
@@ -577,7 +588,7 @@ impl<'a> FindUsages<'a> {
let reference = FileReference {
range,
name: ast::NameLike::NameRef(name_ref.clone()),
- category: None,
+ category: is_name_ref_in_import(name_ref).then(|| ReferenceCategory::Import),
};
sink(file_id, reference)
}
@@ -756,7 +767,7 @@ impl ReferenceCategory {
fn new(def: &Definition, r: &ast::NameRef) -> Option<ReferenceCategory> {
// Only Locals and Fields have accesses for now.
if !matches!(def, Definition::Local(_) | Definition::Field(_)) {
- return None;
+ return is_name_ref_in_import(r).then(|| ReferenceCategory::Import);
}
let mode = r.syntax().ancestors().find_map(|node| {
@@ -783,3 +794,12 @@ impl ReferenceCategory {
mode.or(Some(ReferenceCategory::Read))
}
}
+
+fn is_name_ref_in_import(name_ref: &ast::NameRef) -> bool {
+ name_ref
+ .syntax()
+ .parent()
+ .and_then(ast::PathSegment::cast)
+ .and_then(|it| it.parent_path().top_path().syntax().parent())
+ .map_or(false, |it| it.kind() == SyntaxKind::USE_TREE)
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
index 8132c73ef..8e338061d 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
@@ -3,16 +3,18 @@
//!
//! It can be viewed as a dual for `Change`.
-use std::{collections::hash_map::Entry, iter};
+use std::{collections::hash_map::Entry, iter, mem};
use base_db::{AnchoredPathBuf, FileId};
-use rustc_hash::FxHashMap;
-use stdx::never;
-use text_edit::TextEdit;
+use stdx::{hash::NoHashHashMap, never};
+use syntax::{algo, AstNode, SyntaxNode, SyntaxNodePtr, TextRange, TextSize};
+use text_edit::{TextEdit, TextEditBuilder};
+
+use crate::SnippetCap;
#[derive(Default, Debug, Clone)]
pub struct SourceChange {
- pub source_file_edits: FxHashMap<FileId, TextEdit>,
+ pub source_file_edits: NoHashHashMap<FileId, TextEdit>,
pub file_system_edits: Vec<FileSystemEdit>,
pub is_snippet: bool,
}
@@ -21,7 +23,7 @@ impl SourceChange {
/// Creates a new SourceChange with the given label
/// from the edits.
pub fn from_edits(
- source_file_edits: FxHashMap<FileId, TextEdit>,
+ source_file_edits: NoHashHashMap<FileId, TextEdit>,
file_system_edits: Vec<FileSystemEdit>,
) -> Self {
SourceChange { source_file_edits, file_system_edits, is_snippet: false }
@@ -75,12 +77,141 @@ impl Extend<FileSystemEdit> for SourceChange {
}
}
-impl From<FxHashMap<FileId, TextEdit>> for SourceChange {
- fn from(source_file_edits: FxHashMap<FileId, TextEdit>) -> SourceChange {
+impl From<NoHashHashMap<FileId, TextEdit>> for SourceChange {
+ fn from(source_file_edits: NoHashHashMap<FileId, TextEdit>) -> SourceChange {
SourceChange { source_file_edits, file_system_edits: Vec::new(), is_snippet: false }
}
}
+pub struct SourceChangeBuilder {
+ pub edit: TextEditBuilder,
+ pub file_id: FileId,
+ pub source_change: SourceChange,
+ pub trigger_signature_help: bool,
+
+ /// Maps the original, immutable `SyntaxNode` to a `clone_for_update` twin.
+ pub mutated_tree: Option<TreeMutator>,
+}
+
+pub struct TreeMutator {
+ immutable: SyntaxNode,
+ mutable_clone: SyntaxNode,
+}
+
+impl TreeMutator {
+ pub fn new(immutable: &SyntaxNode) -> TreeMutator {
+ let immutable = immutable.ancestors().last().unwrap();
+ let mutable_clone = immutable.clone_for_update();
+ TreeMutator { immutable, mutable_clone }
+ }
+
+ pub fn make_mut<N: AstNode>(&self, node: &N) -> N {
+ N::cast(self.make_syntax_mut(node.syntax())).unwrap()
+ }
+
+ pub fn make_syntax_mut(&self, node: &SyntaxNode) -> SyntaxNode {
+ let ptr = SyntaxNodePtr::new(node);
+ ptr.to_node(&self.mutable_clone)
+ }
+}
+
+impl SourceChangeBuilder {
+ pub fn new(file_id: FileId) -> SourceChangeBuilder {
+ SourceChangeBuilder {
+ edit: TextEdit::builder(),
+ file_id,
+ source_change: SourceChange::default(),
+ trigger_signature_help: false,
+ mutated_tree: None,
+ }
+ }
+
+ pub fn edit_file(&mut self, file_id: FileId) {
+ self.commit();
+ self.file_id = file_id;
+ }
+
+ fn commit(&mut self) {
+ if let Some(tm) = self.mutated_tree.take() {
+ algo::diff(&tm.immutable, &tm.mutable_clone).into_text_edit(&mut self.edit)
+ }
+
+ let edit = mem::take(&mut self.edit).finish();
+ if !edit.is_empty() {
+ self.source_change.insert_source_edit(self.file_id, edit);
+ }
+ }
+
+ pub fn make_mut<N: AstNode>(&mut self, node: N) -> N {
+ self.mutated_tree.get_or_insert_with(|| TreeMutator::new(node.syntax())).make_mut(&node)
+ }
+ /// Returns a copy of the `node`, suitable for mutation.
+ ///
+ /// Syntax trees in rust-analyzer are typically immutable, and mutating
+ /// operations panic at runtime. However, it is possible to make a copy of
+ /// the tree and mutate the copy freely. Mutation is based on interior
+ /// mutability, and different nodes in the same tree see the same mutations.
+ ///
+ /// The typical pattern for an assist is to find specific nodes in the read
+ /// phase, and then get their mutable couterparts using `make_mut` in the
+ /// mutable state.
+ pub fn make_syntax_mut(&mut self, node: SyntaxNode) -> SyntaxNode {
+ self.mutated_tree.get_or_insert_with(|| TreeMutator::new(&node)).make_syntax_mut(&node)
+ }
+
+ /// Remove specified `range` of text.
+ pub fn delete(&mut self, range: TextRange) {
+ self.edit.delete(range)
+ }
+ /// Append specified `text` at the given `offset`
+ pub fn insert(&mut self, offset: TextSize, text: impl Into<String>) {
+ self.edit.insert(offset, text.into())
+ }
+ /// Append specified `snippet` at the given `offset`
+ pub fn insert_snippet(
+ &mut self,
+ _cap: SnippetCap,
+ offset: TextSize,
+ snippet: impl Into<String>,
+ ) {
+ self.source_change.is_snippet = true;
+ self.insert(offset, snippet);
+ }
+ /// Replaces specified `range` of text with a given string.
+ pub fn replace(&mut self, range: TextRange, replace_with: impl Into<String>) {
+ self.edit.replace(range, replace_with.into())
+ }
+ /// Replaces specified `range` of text with a given `snippet`.
+ pub fn replace_snippet(
+ &mut self,
+ _cap: SnippetCap,
+ range: TextRange,
+ snippet: impl Into<String>,
+ ) {
+ self.source_change.is_snippet = true;
+ self.replace(range, snippet);
+ }
+ pub fn replace_ast<N: AstNode>(&mut self, old: N, new: N) {
+ algo::diff(old.syntax(), new.syntax()).into_text_edit(&mut self.edit)
+ }
+ pub fn create_file(&mut self, dst: AnchoredPathBuf, content: impl Into<String>) {
+ let file_system_edit = FileSystemEdit::CreateFile { dst, initial_contents: content.into() };
+ self.source_change.push_file_system_edit(file_system_edit);
+ }
+ pub fn move_file(&mut self, src: FileId, dst: AnchoredPathBuf) {
+ let file_system_edit = FileSystemEdit::MoveFile { src, dst };
+ self.source_change.push_file_system_edit(file_system_edit);
+ }
+ pub fn trigger_signature_help(&mut self) {
+ self.trigger_signature_help = true;
+ }
+
+ pub fn finish(mut self) -> SourceChange {
+ self.commit();
+ mem::take(&mut self.source_change)
+ }
+}
+
#[derive(Debug, Clone)]
pub enum FileSystemEdit {
CreateFile { dst: AnchoredPathBuf, initial_contents: String },
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs
index f48a57008..2d6927cee 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs
@@ -1,7 +1,8 @@
//! Tools to work with format string literals for the `format_args!` family of macros.
+use crate::syntax_helpers::node_ext::macro_call_for_string_token;
use syntax::{
ast::{self, IsString},
- AstNode, AstToken, TextRange, TextSize,
+ TextRange, TextSize,
};
pub fn is_format_string(string: &ast::String) -> bool {
@@ -14,8 +15,7 @@ pub fn is_format_string(string: &ast::String) -> bool {
// This setup lets us correctly highlight the components of `concat!("{}", "bla")` format
// strings. It still fails for `concat!("{", "}")`, but that is rare.
(|| {
- let macro_call = string.syntax().parent_ancestors().find_map(ast::MacroCall::cast)?;
- let name = macro_call.path()?.segment()?.name_ref()?;
+ let name = macro_call_for_string_token(string)?.path()?.segment()?.name_ref()?;
if !matches!(
name.text().as_str(),
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string_exprs.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string_exprs.rs
new file mode 100644
index 000000000..ac6c6e8fe
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string_exprs.rs
@@ -0,0 +1,267 @@
+//! Tools to work with expressions present in format string literals for the `format_args!` family of macros.
+//! Primarily meant for assists and completions.
+
+/// Enum for represenging extraced format string args.
+/// Can either be extracted expressions (which includes identifiers),
+/// or placeholders `{}`.
+#[derive(Debug, PartialEq, Eq)]
+pub enum Arg {
+ Placeholder,
+ Ident(String),
+ Expr(String),
+}
+
+/**
+ Add placeholders like `$1` and `$2` in place of [`Arg::Placeholder`],
+ and unwraps the [`Arg::Ident`] and [`Arg::Expr`] enums.
+ ```rust
+ # use ide_db::syntax_helpers::format_string_exprs::*;
+ assert_eq!(with_placeholders(vec![Arg::Ident("ident".to_owned()), Arg::Placeholder, Arg::Expr("expr + 2".to_owned())]), vec!["ident".to_owned(), "$1".to_owned(), "expr + 2".to_owned()])
+ ```
+*/
+
+pub fn with_placeholders(args: Vec<Arg>) -> Vec<String> {
+ let mut placeholder_id = 1;
+ args.into_iter()
+ .map(move |a| match a {
+ Arg::Expr(s) | Arg::Ident(s) => s,
+ Arg::Placeholder => {
+ let s = format!("${placeholder_id}");
+ placeholder_id += 1;
+ s
+ }
+ })
+ .collect()
+}
+
+/**
+ Parser for a format-like string. It is more allowing in terms of string contents,
+ as we expect variable placeholders to be filled with expressions.
+
+ Built for completions and assists, and escapes `\` and `$` in output.
+ (See the comments on `get_receiver_text()` for detail.)
+ Splits a format string that may contain expressions
+ like
+ ```rust
+ assert_eq!(parse("{ident} {} {expr + 42} ").unwrap(), ("{} {} {}", vec![Arg::Ident("ident"), Arg::Placeholder, Arg::Expr("expr + 42")]));
+ ```
+*/
+pub fn parse_format_exprs(input: &str) -> Result<(String, Vec<Arg>), ()> {
+ #[derive(Debug, Clone, Copy, PartialEq)]
+ enum State {
+ NotArg,
+ MaybeArg,
+ Expr,
+ Ident,
+ MaybeIncorrect,
+ FormatOpts,
+ }
+
+ let mut state = State::NotArg;
+ let mut current_expr = String::new();
+ let mut extracted_expressions = Vec::new();
+ let mut output = String::new();
+
+ // Count of open braces inside of an expression.
+ // We assume that user knows what they're doing, thus we treat it like a correct pattern, e.g.
+ // "{MyStruct { val_a: 0, val_b: 1 }}".
+ let mut inexpr_open_count = 0;
+
+ let mut chars = input.chars().peekable();
+ while let Some(chr) = chars.next() {
+ match (state, chr) {
+ (State::NotArg, '{') => {
+ output.push(chr);
+ state = State::MaybeArg;
+ }
+ (State::NotArg, '}') => {
+ output.push(chr);
+ state = State::MaybeIncorrect;
+ }
+ (State::NotArg, _) => {
+ if matches!(chr, '\\' | '$') {
+ output.push('\\');
+ }
+ output.push(chr);
+ }
+ (State::MaybeIncorrect, '}') => {
+ // It's okay, we met "}}".
+ output.push(chr);
+ state = State::NotArg;
+ }
+ (State::MaybeIncorrect, _) => {
+ // Error in the string.
+ return Err(());
+ }
+ // Escaped braces `{{`
+ (State::MaybeArg, '{') => {
+ output.push(chr);
+ state = State::NotArg;
+ }
+ (State::MaybeArg, '}') => {
+ // This is an empty sequence '{}'.
+ output.push(chr);
+ extracted_expressions.push(Arg::Placeholder);
+ state = State::NotArg;
+ }
+ (State::MaybeArg, _) => {
+ if matches!(chr, '\\' | '$') {
+ current_expr.push('\\');
+ }
+ current_expr.push(chr);
+
+ // While Rust uses the unicode sets of XID_start and XID_continue for Identifiers
+ // this is probably the best we can do to avoid a false positive
+ if chr.is_alphabetic() || chr == '_' {
+ state = State::Ident;
+ } else {
+ state = State::Expr;
+ }
+ }
+ (State::Ident | State::Expr, '}') => {
+ if inexpr_open_count == 0 {
+ output.push(chr);
+
+ if matches!(state, State::Expr) {
+ extracted_expressions.push(Arg::Expr(current_expr.trim().into()));
+ } else {
+ extracted_expressions.push(Arg::Ident(current_expr.trim().into()));
+ }
+
+ current_expr = String::new();
+ state = State::NotArg;
+ } else {
+ // We're closing one brace met before inside of the expression.
+ current_expr.push(chr);
+ inexpr_open_count -= 1;
+ }
+ }
+ (State::Ident | State::Expr, ':') if matches!(chars.peek(), Some(':')) => {
+ // path separator
+ state = State::Expr;
+ current_expr.push_str("::");
+ chars.next();
+ }
+ (State::Ident | State::Expr, ':') => {
+ if inexpr_open_count == 0 {
+ // We're outside of braces, thus assume that it's a specifier, like "{Some(value):?}"
+ output.push(chr);
+
+ if matches!(state, State::Expr) {
+ extracted_expressions.push(Arg::Expr(current_expr.trim().into()));
+ } else {
+ extracted_expressions.push(Arg::Ident(current_expr.trim().into()));
+ }
+
+ current_expr = String::new();
+ state = State::FormatOpts;
+ } else {
+ // We're inside of braced expression, assume that it's a struct field name/value delimiter.
+ current_expr.push(chr);
+ }
+ }
+ (State::Ident | State::Expr, '{') => {
+ state = State::Expr;
+ current_expr.push(chr);
+ inexpr_open_count += 1;
+ }
+ (State::Ident | State::Expr, _) => {
+ if !(chr.is_alphanumeric() || chr == '_' || chr == '#') {
+ state = State::Expr;
+ }
+
+ if matches!(chr, '\\' | '$') {
+ current_expr.push('\\');
+ }
+ current_expr.push(chr);
+ }
+ (State::FormatOpts, '}') => {
+ output.push(chr);
+ state = State::NotArg;
+ }
+ (State::FormatOpts, _) => {
+ if matches!(chr, '\\' | '$') {
+ output.push('\\');
+ }
+ output.push(chr);
+ }
+ }
+ }
+
+ if state != State::NotArg {
+ return Err(());
+ }
+
+ Ok((output, extracted_expressions))
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use expect_test::{expect, Expect};
+
+ fn check(input: &str, expect: &Expect) {
+ let (output, exprs) = parse_format_exprs(input).unwrap_or(("-".to_string(), vec![]));
+ let outcome_repr = if !exprs.is_empty() {
+ format!("{}; {}", output, with_placeholders(exprs).join(", "))
+ } else {
+ output
+ };
+
+ expect.assert_eq(&outcome_repr);
+ }
+
+ #[test]
+ fn format_str_parser() {
+ let test_vector = &[
+ ("no expressions", expect![["no expressions"]]),
+ (r"no expressions with \$0$1", expect![r"no expressions with \\\$0\$1"]),
+ ("{expr} is {2 + 2}", expect![["{} is {}; expr, 2 + 2"]]),
+ ("{expr:?}", expect![["{:?}; expr"]]),
+ ("{expr:1$}", expect![[r"{:1\$}; expr"]]),
+ ("{$0}", expect![[r"{}; \$0"]]),
+ ("{malformed", expect![["-"]]),
+ ("malformed}", expect![["-"]]),
+ ("{{correct", expect![["{{correct"]]),
+ ("correct}}", expect![["correct}}"]]),
+ ("{correct}}}", expect![["{}}}; correct"]]),
+ ("{correct}}}}}", expect![["{}}}}}; correct"]]),
+ ("{incorrect}}", expect![["-"]]),
+ ("placeholders {} {}", expect![["placeholders {} {}; $1, $2"]]),
+ ("mixed {} {2 + 2} {}", expect![["mixed {} {} {}; $1, 2 + 2, $2"]]),
+ (
+ "{SomeStruct { val_a: 0, val_b: 1 }}",
+ expect![["{}; SomeStruct { val_a: 0, val_b: 1 }"]],
+ ),
+ ("{expr:?} is {2.32f64:.5}", expect![["{:?} is {:.5}; expr, 2.32f64"]]),
+ (
+ "{SomeStruct { val_a: 0, val_b: 1 }:?}",
+ expect![["{:?}; SomeStruct { val_a: 0, val_b: 1 }"]],
+ ),
+ ("{ 2 + 2 }", expect![["{}; 2 + 2"]]),
+ ("{strsim::jaro_winkle(a)}", expect![["{}; strsim::jaro_winkle(a)"]]),
+ ("{foo::bar::baz()}", expect![["{}; foo::bar::baz()"]]),
+ ("{foo::bar():?}", expect![["{:?}; foo::bar()"]]),
+ ];
+
+ for (input, output) in test_vector {
+ check(input, output)
+ }
+ }
+
+ #[test]
+ fn arg_type() {
+ assert_eq!(
+ parse_format_exprs("{_ident} {r#raw_ident} {expr.obj} {name {thing: 42} } {}")
+ .unwrap()
+ .1,
+ vec![
+ Arg::Ident("_ident".to_owned()),
+ Arg::Ident("r#raw_ident".to_owned()),
+ Arg::Expr("expr.obj".to_owned()),
+ Arg::Expr("name {thing: 42}".to_owned()),
+ Arg::Placeholder
+ ]
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/insert_whitespace_into_node.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/insert_whitespace_into_node.rs
index f54ae6c92..8bc093a85 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/insert_whitespace_into_node.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/insert_whitespace_into_node.rs
@@ -95,7 +95,7 @@ pub fn insert_ws_into(syn: SyntaxNode) -> SyntaxNode {
AS_KW | DYN_KW | IMPL_KW | CONST_KW => {
mods.push(do_ws(after, tok));
}
- T![;] => {
+ T![;] if is_next(|it| it != R_CURLY, true) => {
if indent > 0 {
mods.push(do_indent(after, tok, indent));
}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs
index 84bde4d44..39710b8f1 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs
@@ -2,8 +2,8 @@
use itertools::Itertools;
use parser::T;
use syntax::{
- ast::{self, HasLoopBody, PathSegmentKind, VisibilityKind},
- AstNode, Preorder, RustLanguage, WalkEvent,
+ ast::{self, HasLoopBody, MacroCall, PathSegmentKind, VisibilityKind},
+ AstNode, AstToken, Preorder, RustLanguage, WalkEvent,
};
pub fn expr_as_name_ref(expr: &ast::Expr) -> Option<ast::NameRef> {
@@ -315,7 +315,6 @@ pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) {
| ast::Expr::IndexExpr(_)
| ast::Expr::Literal(_)
| ast::Expr::MacroExpr(_)
- | ast::Expr::MacroStmts(_)
| ast::Expr::MethodCallExpr(_)
| ast::Expr::ParenExpr(_)
| ast::Expr::PathExpr(_)
@@ -458,3 +457,8 @@ pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option<Vec<ast::Pat
.collect();
Some(paths)
}
+
+pub fn macro_call_for_string_token(string: &ast::String) -> Option<MacroCall> {
+ let macro_call = string.syntax().parent_ancestors().find_map(ast::MacroCall::cast)?;
+ Some(macro_call)
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml
index e221425ed..e1d146f4e 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml
@@ -11,10 +11,9 @@ doctest = false
[dependencies]
cov-mark = "2.0.0-pre.1"
-itertools = "0.10.3"
-
-
either = "1.7.0"
+itertools = "0.10.5"
+serde_json = "1.0.86"
profile = { path = "../profile", version = "0.0.0" }
stdx = { path = "../stdx", version = "0.0.0" }
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs
index d12594a4c..0c92e706b 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs
@@ -7,9 +7,10 @@ pub(crate) fn break_outside_of_loop(
ctx: &DiagnosticsContext<'_>,
d: &hir::BreakOutsideOfLoop,
) -> Diagnostic {
+ let construct = if d.is_break { "break" } else { "continue" };
Diagnostic::new(
"break-outside-of-loop",
- "break outside of loop",
+ format!("{construct} outside of loop"),
ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range,
)
}
@@ -19,11 +20,122 @@ mod tests {
use crate::tests::check_diagnostics;
#[test]
- fn break_outside_of_loop() {
+ fn outside_of_loop() {
check_diagnostics(
r#"
-fn foo() { break; }
- //^^^^^ error: break outside of loop
+fn foo() {
+ break;
+ //^^^^^ error: break outside of loop
+ break 'a;
+ //^^^^^^^^ error: break outside of loop
+ continue;
+ //^^^^^^^^ error: continue outside of loop
+ continue 'a;
+ //^^^^^^^^^^^ error: continue outside of loop
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn try_blocks_are_borders() {
+ check_diagnostics(
+ r#"
+fn foo() {
+ 'a: loop {
+ try {
+ break;
+ //^^^^^ error: break outside of loop
+ break 'a;
+ //^^^^^^^^ error: break outside of loop
+ continue;
+ //^^^^^^^^ error: continue outside of loop
+ continue 'a;
+ //^^^^^^^^^^^ error: continue outside of loop
+ };
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn async_blocks_are_borders() {
+ check_diagnostics(
+ r#"
+fn foo() {
+ 'a: loop {
+ try {
+ break;
+ //^^^^^ error: break outside of loop
+ break 'a;
+ //^^^^^^^^ error: break outside of loop
+ continue;
+ //^^^^^^^^ error: continue outside of loop
+ continue 'a;
+ //^^^^^^^^^^^ error: continue outside of loop
+ };
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn closures_are_borders() {
+ check_diagnostics(
+ r#"
+fn foo() {
+ 'a: loop {
+ try {
+ break;
+ //^^^^^ error: break outside of loop
+ break 'a;
+ //^^^^^^^^ error: break outside of loop
+ continue;
+ //^^^^^^^^ error: continue outside of loop
+ continue 'a;
+ //^^^^^^^^^^^ error: continue outside of loop
+ };
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn blocks_pass_through() {
+ check_diagnostics(
+ r#"
+fn foo() {
+ 'a: loop {
+ {
+ break;
+ break 'a;
+ continue;
+ continue 'a;
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn label_blocks() {
+ check_diagnostics(
+ r#"
+fn foo() {
+ 'a: {
+ break;
+ //^^^^^ error: break outside of loop
+ break 'a;
+ continue;
+ //^^^^^^^^ error: continue outside of loop
+ continue 'a;
+ //^^^^^^^^^^^ error: continue outside of loop
+ }
+}
"#,
);
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
index 97ea5c456..f558b7256 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
@@ -43,7 +43,7 @@ mod tests {
use crate::{tests::check_diagnostics_with_config, DiagnosticsConfig};
pub(crate) fn check(ra_fixture: &str) {
- let config = DiagnosticsConfig::default();
+ let config = DiagnosticsConfig::test_sample();
check_diagnostics_with_config(config, ra_fixture)
}
@@ -106,18 +106,17 @@ fn f() {
#[test]
fn inactive_assoc_item() {
- // FIXME these currently don't work, hence the *
check(
r#"
struct Foo;
impl Foo {
#[cfg(any())] pub fn f() {}
- //*************************** weak: code is inactive due to #[cfg] directives
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives
}
trait Bar {
#[cfg(any())] pub fn f() {}
- //*************************** weak: code is inactive due to #[cfg] directives
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives
}
"#,
);
@@ -141,4 +140,35 @@ trait Bar {
"#,
);
}
+
+ #[test]
+ fn inactive_fields_and_variants() {
+ check(
+ r#"
+enum Foo {
+ #[cfg(a)] Bar,
+//^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
+ Baz {
+ #[cfg(a)] baz: String,
+ //^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
+ },
+ Qux(#[cfg(a)] String),
+ //^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
+}
+
+struct Baz {
+ #[cfg(a)] baz: String,
+//^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
+}
+
+struct Qux(#[cfg(a)] String);
+ //^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
+
+union FooBar {
+ #[cfg(a)] baz: u32,
+//^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
new file mode 100644
index 000000000..303429519
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
@@ -0,0 +1,312 @@
+//! This diagnostic provides an assist for creating a struct definition from a JSON
+//! example.
+
+use hir::{PathResolution, Semantics};
+use ide_db::{
+ base_db::FileId,
+ helpers::mod_path_to_ast,
+ imports::insert_use::{insert_use, ImportScope},
+ source_change::SourceChangeBuilder,
+ RootDatabase,
+};
+use itertools::Itertools;
+use stdx::{format_to, never};
+use syntax::{
+ ast::{self, make},
+ SyntaxKind, SyntaxNode,
+};
+use text_edit::TextEdit;
+
+use crate::{fix, Diagnostic, DiagnosticsConfig, Severity};
+
+#[derive(Default)]
+struct State {
+ result: String,
+ struct_counts: usize,
+ has_serialize: bool,
+ has_deserialize: bool,
+}
+
+impl State {
+ fn generate_new_name(&mut self) -> ast::Name {
+ self.struct_counts += 1;
+ make::name(&format!("Struct{}", self.struct_counts))
+ }
+
+ fn serde_derive(&self) -> String {
+ let mut v = vec![];
+ if self.has_serialize {
+ v.push("Serialize");
+ }
+ if self.has_deserialize {
+ v.push("Deserialize");
+ }
+ match v.as_slice() {
+ [] => "".to_string(),
+ [x] => format!("#[derive({x})]\n"),
+ [x, y] => format!("#[derive({x}, {y})]\n"),
+ _ => {
+ never!();
+ "".to_string()
+ }
+ }
+ }
+
+ fn build_struct(&mut self, value: &serde_json::Map<String, serde_json::Value>) -> ast::Type {
+ let name = self.generate_new_name();
+ let ty = make::ty(&name.to_string());
+ let strukt = make::struct_(
+ None,
+ name,
+ None,
+ make::record_field_list(value.iter().sorted_unstable_by_key(|x| x.0).map(
+ |(name, value)| make::record_field(None, make::name(name), self.type_of(value)),
+ ))
+ .into(),
+ );
+ format_to!(self.result, "{}{}\n", self.serde_derive(), strukt);
+ ty
+ }
+
+ fn type_of(&mut self, value: &serde_json::Value) -> ast::Type {
+ match value {
+ serde_json::Value::Null => make::ty_unit(),
+ serde_json::Value::Bool(_) => make::ty("bool"),
+ serde_json::Value::Number(it) => make::ty(if it.is_i64() { "i64" } else { "f64" }),
+ serde_json::Value::String(_) => make::ty("String"),
+ serde_json::Value::Array(it) => {
+ let ty = match it.iter().next() {
+ Some(x) => self.type_of(x),
+ None => make::ty_placeholder(),
+ };
+ make::ty(&format!("Vec<{ty}>"))
+ }
+ serde_json::Value::Object(x) => self.build_struct(x),
+ }
+ }
+}
+
+pub(crate) fn json_in_items(
+ sema: &Semantics<'_, RootDatabase>,
+ acc: &mut Vec<Diagnostic>,
+ file_id: FileId,
+ node: &SyntaxNode,
+ config: &DiagnosticsConfig,
+) {
+ (|| {
+ if node.kind() == SyntaxKind::ERROR
+ && node.first_token().map(|x| x.kind()) == Some(SyntaxKind::L_CURLY)
+ && node.last_token().map(|x| x.kind()) == Some(SyntaxKind::R_CURLY)
+ {
+ let node_string = node.to_string();
+ if let Ok(it) = serde_json::from_str(&node_string) {
+ if let serde_json::Value::Object(it) = it {
+ let import_scope = ImportScope::find_insert_use_container(node, sema)?;
+ let range = node.text_range();
+ let mut edit = TextEdit::builder();
+ edit.delete(range);
+ let mut state = State::default();
+ let semantics_scope = sema.scope(node)?;
+ let scope_resolve =
+ |it| semantics_scope.speculative_resolve(&make::path_from_text(it));
+ let scope_has = |it| scope_resolve(it).is_some();
+ let deserialize_resolved = scope_resolve("::serde::Deserialize");
+ let serialize_resolved = scope_resolve("::serde::Serialize");
+ state.has_deserialize = deserialize_resolved.is_some();
+ state.has_serialize = serialize_resolved.is_some();
+ state.build_struct(&it);
+ edit.insert(range.start(), state.result);
+ acc.push(
+ Diagnostic::new(
+ "json-is-not-rust",
+ "JSON syntax is not valid as a Rust item",
+ range,
+ )
+ .severity(Severity::WeakWarning)
+ .with_fixes(Some(vec![{
+ let mut scb = SourceChangeBuilder::new(file_id);
+ let scope = match import_scope.clone() {
+ ImportScope::File(it) => ImportScope::File(scb.make_mut(it)),
+ ImportScope::Module(it) => ImportScope::Module(scb.make_mut(it)),
+ ImportScope::Block(it) => ImportScope::Block(scb.make_mut(it)),
+ };
+ let current_module = semantics_scope.module();
+ if !scope_has("Serialize") {
+ if let Some(PathResolution::Def(it)) = serialize_resolved {
+ if let Some(it) = current_module.find_use_path_prefixed(
+ sema.db,
+ it,
+ config.insert_use.prefix_kind,
+ config.prefer_no_std,
+ ) {
+ insert_use(
+ &scope,
+ mod_path_to_ast(&it),
+ &config.insert_use,
+ );
+ }
+ }
+ }
+ if !scope_has("Deserialize") {
+ if let Some(PathResolution::Def(it)) = deserialize_resolved {
+ if let Some(it) = current_module.find_use_path_prefixed(
+ sema.db,
+ it,
+ config.insert_use.prefix_kind,
+ config.prefer_no_std,
+ ) {
+ insert_use(
+ &scope,
+ mod_path_to_ast(&it),
+ &config.insert_use,
+ );
+ }
+ }
+ }
+ let mut sc = scb.finish();
+ sc.insert_source_edit(file_id, edit.finish());
+ fix("convert_json_to_struct", "Convert JSON to struct", sc, range)
+ }])),
+ );
+ }
+ }
+ }
+ Some(())
+ })();
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::{
+ tests::{check_diagnostics_with_config, check_fix, check_no_fix},
+ DiagnosticsConfig,
+ };
+
+ #[test]
+ fn diagnostic_for_simple_case() {
+ let mut config = DiagnosticsConfig::test_sample();
+ config.disabled.insert("syntax-error".to_string());
+ check_diagnostics_with_config(
+ config,
+ r#"
+ { "foo": "bar" }
+ // ^^^^^^^^^^^^^^^^ 💡 weak: JSON syntax is not valid as a Rust item
+"#,
+ );
+ }
+
+ #[test]
+ fn types_of_primitives() {
+ check_fix(
+ r#"
+ //- /lib.rs crate:lib deps:serde
+ use serde::Serialize;
+
+ fn some_garbage() {
+
+ }
+
+ {$0
+ "foo": "bar",
+ "bar": 2.3,
+ "baz": null,
+ "bay": 57,
+ "box": true
+ }
+ //- /serde.rs crate:serde
+
+ pub trait Serialize {
+ fn serialize() -> u8;
+ }
+ "#,
+ r#"
+ use serde::Serialize;
+
+ fn some_garbage() {
+
+ }
+
+ #[derive(Serialize)]
+ struct Struct1{ bar: f64, bay: i64, baz: (), r#box: bool, foo: String }
+
+ "#,
+ );
+ }
+
+ #[test]
+ fn nested_structs() {
+ check_fix(
+ r#"
+ {$0
+ "foo": "bar",
+ "bar": {
+ "kind": "Object",
+ "value": {}
+ }
+ }
+ "#,
+ r#"
+ struct Struct3{ }
+ struct Struct2{ kind: String, value: Struct3 }
+ struct Struct1{ bar: Struct2, foo: String }
+
+ "#,
+ );
+ }
+
+ #[test]
+ fn arrays() {
+ check_fix(
+ r#"
+ //- /lib.rs crate:lib deps:serde
+ {
+ "of_string": ["foo", "2", "x"], $0
+ "of_object": [{
+ "x": 10,
+ "y": 20
+ }, {
+ "x": 10,
+ "y": 20
+ }],
+ "nested": [[[2]]],
+ "empty": []
+ }
+ //- /serde.rs crate:serde
+
+ pub trait Serialize {
+ fn serialize() -> u8;
+ }
+ pub trait Deserialize {
+ fn deserialize() -> u8;
+ }
+ "#,
+ r#"
+ use serde::Serialize;
+ use serde::Deserialize;
+
+ #[derive(Serialize, Deserialize)]
+ struct Struct2{ x: i64, y: i64 }
+ #[derive(Serialize, Deserialize)]
+ struct Struct1{ empty: Vec<_>, nested: Vec<Vec<Vec<i64>>>, of_object: Vec<Struct2>, of_string: Vec<String> }
+
+ "#,
+ );
+ }
+
+ #[test]
+ fn no_emit_outside_of_item_position() {
+ check_no_fix(
+ r#"
+ fn foo() {
+ let json = {$0
+ "foo": "bar",
+ "bar": {
+ "kind": "Object",
+ "value": {}
+ }
+ };
+ }
+ "#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
index d6a66dc15..43ff4ed5a 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
@@ -79,7 +79,7 @@ pub macro panic {
#[test]
fn include_macro_should_allow_empty_content() {
- let mut config = DiagnosticsConfig::default();
+ let mut config = DiagnosticsConfig::test_sample();
// FIXME: This is a false-positive, the file is actually linked in via
// `include!` macro
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs
index edb1fc091..7f140eb6a 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs
@@ -124,6 +124,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
let type_path = current_module?.find_use_path(
ctx.sema.db,
item_for_path_search(ctx.sema.db, item_in_ns)?,
+ ctx.config.prefer_no_std,
)?;
use_trivial_constructor(
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
index 9e66fbfb7..c24430ce6 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
@@ -750,7 +750,7 @@ fn main() {
enum Foo { A }
fn main() {
// FIXME: this should not bail out but current behavior is such as the old algorithm.
- // ExprValidator::validate_match(..) checks types of top level patterns incorrecly.
+ // ExprValidator::validate_match(..) checks types of top level patterns incorrectly.
match Foo::A {
ref _x => {}
Foo::A => {}
@@ -947,6 +947,50 @@ fn f() {
);
}
+ mod rust_unstable {
+ use super::*;
+
+ #[test]
+ fn rfc_1872_exhaustive_patterns() {
+ check_diagnostics_no_bails(
+ r"
+//- minicore: option, result
+#![feature(exhaustive_patterns)]
+enum Void {}
+fn test() {
+ match None::<!> { None => () }
+ match Result::<u8, !>::Ok(2) { Ok(_) => () }
+ match Result::<u8, Void>::Ok(2) { Ok(_) => () }
+ match (2, loop {}) {}
+ match Result::<!, !>::Ok(loop {}) {}
+ match (&loop {}) {} // https://github.com/rust-lang/rust/issues/50642#issuecomment-388234919
+ // ^^^^^^^^^^ error: missing match arm: type `&!` is non-empty
+}",
+ );
+ }
+
+ #[test]
+ fn rfc_1872_private_uninhabitedness() {
+ check_diagnostics_no_bails(
+ r"
+//- minicore: option
+//- /lib.rs crate:lib
+#![feature(exhaustive_patterns)]
+pub struct PrivatelyUninhabited { private_field: Void }
+enum Void {}
+fn test_local(x: Option<PrivatelyUninhabited>) {
+ match x {}
+} // ^ error: missing match arm: `None` not covered
+//- /main.rs crate:main deps:lib
+#![feature(exhaustive_patterns)]
+fn test(x: Option<lib::PrivatelyUninhabited>) {
+ match x {}
+ // ^ error: missing match arm: `None` and `Some(_)` not covered
+}",
+ );
+ }
+ }
+
mod false_negatives {
//! The implementation of match checking here is a work in progress. As we roll this out, we
//! prefer false negatives to false positives (ideally there would be no false positives). This
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs
index e032c578f..a80299106 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs
@@ -68,7 +68,7 @@ fn missing_record_expr_field_fixes(
}
let new_field = make::record_field(
None,
- make::name(&record_expr_field.field_name()?.text()),
+ make::name(&record_expr_field.field_name()?.ident_token()?.text()),
make::ty(&new_field_type.display_source_code(sema.db, module.into()).ok()?),
);
@@ -109,7 +109,7 @@ fn missing_record_expr_field_fixes(
#[cfg(test)]
mod tests {
- use crate::tests::{check_diagnostics, check_fix};
+ use crate::tests::{check_diagnostics, check_fix, check_no_fix};
#[test]
fn no_such_field_diagnostics() {
@@ -280,4 +280,18 @@ struct Foo {
"#,
)
}
+
+ #[test]
+ fn test_tuple_field_on_record_struct() {
+ check_no_fix(
+ r#"
+struct Struct {}
+fn main() {
+ Struct {
+ 0$0: 0
+ }
+}
+"#,
+ )
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs
index 6bf90e645..62c69f90b 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs
@@ -59,9 +59,6 @@ fn add_reference(
d: &hir::TypeMismatch,
acc: &mut Vec<Assist>,
) -> Option<()> {
- let root = ctx.sema.db.parse_or_expand(d.expr.file_id)?;
- let expr_node = d.expr.value.to_node(&root);
-
let range = ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range;
let (_, mutability) = d.expected.as_reference()?;
@@ -72,7 +69,7 @@ fn add_reference(
let ampersands = format!("&{}", mutability.as_keyword_for_ref());
- let edit = TextEdit::insert(expr_node.syntax().text_range().start(), ampersands);
+ let edit = TextEdit::insert(range.start(), ampersands);
let source_change =
SourceChange::from_text_edit(d.expr.file_id.original_file(ctx.sema.db), edit);
acc.push(fix("add_reference_here", "Add reference here", source_change, range));
@@ -315,6 +312,34 @@ fn main() {
}
#[test]
+ fn test_add_reference_to_macro_call() {
+ check_fix(
+ r#"
+macro_rules! thousand {
+ () => {
+ 1000_u64
+ };
+}
+fn test(foo: &u64) {}
+fn main() {
+ test($0thousand!());
+}
+ "#,
+ r#"
+macro_rules! thousand {
+ () => {
+ 1000_u64
+ };
+}
+fn test(foo: &u64) {}
+fn main() {
+ test(&thousand!());
+}
+ "#,
+ );
+ }
+
+ #[test]
fn test_add_mutable_reference_to_let_stmt() {
check_fix(
r#"
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
index 41abaa836..ae299f058 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
@@ -50,6 +50,7 @@ mod handlers {
pub(crate) mod field_shorthand;
pub(crate) mod useless_braces;
pub(crate) mod unlinked_file;
+ pub(crate) mod json_is_not_rust;
}
#[cfg(test)]
@@ -59,6 +60,7 @@ use hir::{diagnostics::AnyDiagnostic, InFile, Semantics};
use ide_db::{
assists::{Assist, AssistId, AssistKind, AssistResolveStrategy},
base_db::{FileId, FileRange, SourceDatabase},
+ imports::insert_use::InsertUseConfig,
label::Label,
source_change::SourceChange,
FxHashSet, RootDatabase,
@@ -139,13 +141,39 @@ impl Default for ExprFillDefaultMode {
}
}
-#[derive(Default, Debug, Clone)]
+#[derive(Debug, Clone)]
pub struct DiagnosticsConfig {
pub proc_macros_enabled: bool,
pub proc_attr_macros_enabled: bool,
pub disable_experimental: bool,
pub disabled: FxHashSet<String>,
pub expr_fill_default: ExprFillDefaultMode,
+ // FIXME: We may want to include a whole `AssistConfig` here
+ pub insert_use: InsertUseConfig,
+ pub prefer_no_std: bool,
+}
+
+impl DiagnosticsConfig {
+ pub fn test_sample() -> Self {
+ use hir::PrefixKind;
+ use ide_db::imports::insert_use::ImportGranularity;
+
+ Self {
+ proc_macros_enabled: Default::default(),
+ proc_attr_macros_enabled: Default::default(),
+ disable_experimental: Default::default(),
+ disabled: Default::default(),
+ expr_fill_default: Default::default(),
+ insert_use: InsertUseConfig {
+ granularity: ImportGranularity::Preserve,
+ enforce_granularity: false,
+ prefix_kind: PrefixKind::Plain,
+ group: false,
+ skip_glob_imports: false,
+ },
+ prefer_no_std: false,
+ }
+ }
}
struct DiagnosticsContext<'a> {
@@ -172,9 +200,12 @@ pub fn diagnostics(
}),
);
- for node in parse.tree().syntax().descendants() {
+ let parse = sema.parse(file_id);
+
+ for node in parse.syntax().descendants() {
handlers::useless_braces::useless_braces(&mut res, file_id, &node);
handlers::field_shorthand::field_shorthand(&mut res, file_id, &node);
+ handlers::json_is_not_rust::json_in_items(&sema, &mut res, file_id, &node, &config);
}
let module = sema.to_module_def(file_id);
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs
index 7312bca32..729619cfd 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs
@@ -37,7 +37,7 @@ fn check_nth_fix(nth: usize, ra_fixture_before: &str, ra_fixture_after: &str) {
let after = trim_indent(ra_fixture_after);
let (db, file_position) = RootDatabase::with_position(ra_fixture_before);
- let mut conf = DiagnosticsConfig::default();
+ let mut conf = DiagnosticsConfig::test_sample();
conf.expr_fill_default = ExprFillDefaultMode::Default;
let diagnostic =
super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id)
@@ -69,7 +69,7 @@ pub(crate) fn check_no_fix(ra_fixture: &str) {
let (db, file_position) = RootDatabase::with_position(ra_fixture);
let diagnostic = super::diagnostics(
&db,
- &DiagnosticsConfig::default(),
+ &DiagnosticsConfig::test_sample(),
&AssistResolveStrategy::All,
file_position.file_id,
)
@@ -82,7 +82,7 @@ pub(crate) fn check_expect(ra_fixture: &str, expect: Expect) {
let (db, file_id) = RootDatabase::with_single_file(ra_fixture);
let diagnostics = super::diagnostics(
&db,
- &DiagnosticsConfig::default(),
+ &DiagnosticsConfig::test_sample(),
&AssistResolveStrategy::All,
file_id,
);
@@ -91,7 +91,7 @@ pub(crate) fn check_expect(ra_fixture: &str, expect: Expect) {
#[track_caller]
pub(crate) fn check_diagnostics(ra_fixture: &str) {
- let mut config = DiagnosticsConfig::default();
+ let mut config = DiagnosticsConfig::test_sample();
config.disabled.insert("inactive-code".to_string());
check_diagnostics_with_config(config, ra_fixture)
}
@@ -127,7 +127,7 @@ pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixtur
#[test]
fn test_disabled_diagnostics() {
- let mut config = DiagnosticsConfig::default();
+ let mut config = DiagnosticsConfig::test_sample();
config.disabled.insert("unresolved-module".into());
let (db, file_id) = RootDatabase::with_single_file(r#"mod foo;"#);
@@ -137,7 +137,7 @@ fn test_disabled_diagnostics() {
let diagnostics = super::diagnostics(
&db,
- &DiagnosticsConfig::default(),
+ &DiagnosticsConfig::test_sample(),
&AssistResolveStrategy::All,
file_id,
);
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml b/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml
index d36dd02d4..4baf786c4 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml
@@ -12,14 +12,14 @@ doctest = false
[dependencies]
cov-mark = "2.0.0-pre.1"
-
-itertools = "0.10.3"
+itertools = "0.10.5"
text-edit = { path = "../text-edit", version = "0.0.0" }
parser = { path = "../parser", version = "0.0.0" }
syntax = { path = "../syntax", version = "0.0.0" }
ide-db = { path = "../ide-db", version = "0.0.0" }
hir = { path = "../hir", version = "0.0.0" }
+stdx = { path = "../stdx", version = "0.0.0" }
[dev-dependencies]
test-utils = { path = "../test-utils" }
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs
index a5e24daa9..d9834ee63 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs
@@ -57,7 +57,7 @@
// |===
// | Editor | Action Name
//
-// | VS Code | **Rust Analyzer: Structural Search Replace**
+// | VS Code | **rust-analyzer: Structural Search Replace**
// |===
//
// Also available as an assist, by writing a comment containing the structural
@@ -86,11 +86,9 @@ pub use crate::{errors::SsrError, from_comment::ssr_from_comment, matching::Matc
use crate::{errors::bail, matching::MatchFailureReason};
use hir::Semantics;
-use ide_db::{
- base_db::{FileId, FilePosition, FileRange},
- FxHashMap,
-};
+use ide_db::base_db::{FileId, FilePosition, FileRange};
use resolving::ResolvedRule;
+use stdx::hash::NoHashHashMap;
use syntax::{ast, AstNode, SyntaxNode, TextRange};
use text_edit::TextEdit;
@@ -170,9 +168,9 @@ impl<'db> MatchFinder<'db> {
}
/// Finds matches for all added rules and returns edits for all found matches.
- pub fn edits(&self) -> FxHashMap<FileId, TextEdit> {
+ pub fn edits(&self) -> NoHashHashMap<FileId, TextEdit> {
use ide_db::base_db::SourceDatabaseExt;
- let mut matches_by_file = FxHashMap::default();
+ let mut matches_by_file = NoHashHashMap::default();
for m in self.matches().matches {
matches_by_file
.entry(m.range.file_id)
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs
index e3a837ddc..57b5ab6ab 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs
@@ -648,9 +648,10 @@ impl Match {
.module();
for (path, resolved_path) in &template.resolved_paths {
if let hir::PathResolution::Def(module_def) = resolved_path.resolution {
- let mod_path = module.find_use_path(sema.db, module_def).ok_or_else(|| {
- match_error!("Failed to render template path `{}` at match location")
- })?;
+ let mod_path =
+ module.find_use_path(sema.db, module_def, false).ok_or_else(|| {
+ match_error!("Failed to render template path `{}` at match location")
+ })?;
self.rendered_template_paths.insert(path.clone(), mod_path);
}
}
diff --git a/src/tools/rust-analyzer/crates/ide/Cargo.toml b/src/tools/rust-analyzer/crates/ide/Cargo.toml
index 0e9771cd2..712459a7e 100644
--- a/src/tools/rust-analyzer/crates/ide/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide/Cargo.toml
@@ -13,12 +13,12 @@ doctest = false
cov-mark = "2.0.0-pre.1"
crossbeam-channel = "0.5.5"
either = "1.7.0"
-itertools = "0.10.3"
+itertools = "0.10.5"
tracing = "0.1.35"
oorandom = "11.1.3"
-pulldown-cmark-to-cmark = "10.0.1"
+pulldown-cmark-to-cmark = "10.0.4"
pulldown-cmark = { version = "0.9.1", default-features = false }
-url = "2.2.2"
+url = "2.3.1"
dot = "0.1.4"
stdx = { path = "../stdx", version = "0.0.0" }
diff --git a/src/tools/rust-analyzer/crates/ide/src/annotations.rs b/src/tools/rust-analyzer/crates/ide/src/annotations.rs
index 210c5c7fa..f994c284c 100644
--- a/src/tools/rust-analyzer/crates/ide/src/annotations.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/annotations.rs
@@ -8,13 +8,15 @@ use ide_db::{
use syntax::{ast::HasName, AstNode, TextRange};
use crate::{
- fn_references::find_all_methods,
+ annotations::fn_references::find_all_methods,
goto_implementation::goto_implementation,
references::find_all_refs,
runnables::{runnables, Runnable},
NavigationTarget, RunnableKind,
};
+mod fn_references;
+
// Feature: Annotations
//
// Provides user with annotations above items for looking up references or impl blocks
@@ -30,8 +32,8 @@ pub struct Annotation {
#[derive(Debug)]
pub enum AnnotationKind {
Runnable(Runnable),
- HasImpls { file_id: FileId, data: Option<Vec<NavigationTarget>> },
- HasReferences { file_id: FileId, data: Option<Vec<FileRange>> },
+ HasImpls { pos: FilePosition, data: Option<Vec<NavigationTarget>> },
+ HasReferences { pos: FilePosition, data: Option<Vec<FileRange>> },
}
pub struct AnnotationConfig {
@@ -41,6 +43,12 @@ pub struct AnnotationConfig {
pub annotate_references: bool,
pub annotate_method_references: bool,
pub annotate_enum_variant_references: bool,
+ pub location: AnnotationLocation,
+}
+
+pub enum AnnotationLocation {
+ AboveName,
+ AboveWholeItem,
}
pub(crate) fn annotations(
@@ -62,6 +70,16 @@ pub(crate) fn annotations(
}
}
+ let mk_ranges = |(range, focus): (_, Option<_>)| {
+ let cmd_target: TextRange = focus.unwrap_or(range);
+ let annotation_range = match config.location {
+ AnnotationLocation::AboveName => cmd_target,
+ AnnotationLocation::AboveWholeItem => range,
+ };
+ let target_pos = FilePosition { file_id, offset: cmd_target.start() };
+ (annotation_range, target_pos)
+ };
+
visit_file_defs(&Semantics::new(db), file_id, &mut |def| {
let range = match def {
Definition::Const(konst) if config.annotate_references => {
@@ -81,9 +99,13 @@ pub(crate) fn annotations(
})
.flatten()
.for_each(|range| {
+ let (annotation_range, target_position) = mk_ranges(range);
annotations.push(Annotation {
- range,
- kind: AnnotationKind::HasReferences { file_id, data: None },
+ range: annotation_range,
+ kind: AnnotationKind::HasReferences {
+ pos: target_position,
+ data: None,
+ },
})
})
}
@@ -108,15 +130,18 @@ pub(crate) fn annotations(
Some(range) => range,
None => return,
};
-
+ let (annotation_range, target_pos) = mk_ranges(range);
if config.annotate_impls && !matches!(def, Definition::Const(_)) {
- annotations
- .push(Annotation { range, kind: AnnotationKind::HasImpls { file_id, data: None } });
+ annotations.push(Annotation {
+ range: annotation_range,
+ kind: AnnotationKind::HasImpls { pos: target_pos, data: None },
+ });
}
+
if config.annotate_references {
annotations.push(Annotation {
- range,
- kind: AnnotationKind::HasReferences { file_id, data: None },
+ range: annotation_range,
+ kind: AnnotationKind::HasReferences { pos: target_pos, data: None },
});
}
@@ -124,10 +149,13 @@ pub(crate) fn annotations(
db: &RootDatabase,
node: InFile<T>,
source_file_id: FileId,
- ) -> Option<TextRange> {
+ ) -> Option<(TextRange, Option<TextRange>)> {
if let Some(InFile { file_id, value }) = node.original_ast_node(db) {
if file_id == source_file_id.into() {
- return value.name().map(|it| it.syntax().text_range());
+ return Some((
+ value.syntax().text_range(),
+ value.name().map(|name| name.syntax().text_range()),
+ ));
}
}
None
@@ -135,12 +163,13 @@ pub(crate) fn annotations(
});
if config.annotate_method_references {
- annotations.extend(find_all_methods(db, file_id).into_iter().map(
- |FileRange { file_id, range }| Annotation {
- range,
- kind: AnnotationKind::HasReferences { file_id, data: None },
- },
- ));
+ annotations.extend(find_all_methods(db, file_id).into_iter().map(|range| {
+ let (annotation_range, target_range) = mk_ranges(range);
+ Annotation {
+ range: annotation_range,
+ kind: AnnotationKind::HasReferences { pos: target_range, data: None },
+ }
+ }));
}
annotations
@@ -148,18 +177,11 @@ pub(crate) fn annotations(
pub(crate) fn resolve_annotation(db: &RootDatabase, mut annotation: Annotation) -> Annotation {
match annotation.kind {
- AnnotationKind::HasImpls { file_id, ref mut data } => {
- *data =
- goto_implementation(db, FilePosition { file_id, offset: annotation.range.start() })
- .map(|range| range.info);
+ AnnotationKind::HasImpls { pos, ref mut data } => {
+ *data = goto_implementation(db, pos).map(|range| range.info);
}
- AnnotationKind::HasReferences { file_id, ref mut data } => {
- *data = find_all_refs(
- &Semantics::new(db),
- FilePosition { file_id, offset: annotation.range.start() },
- None,
- )
- .map(|result| {
+ AnnotationKind::HasReferences { pos, ref mut data } => {
+ *data = find_all_refs(&Semantics::new(db), pos, None).map(|result| {
result
.into_iter()
.flat_map(|res| res.references)
@@ -188,21 +210,23 @@ mod tests {
use crate::{fixture, Annotation, AnnotationConfig};
- fn check(ra_fixture: &str, expect: Expect) {
+ use super::AnnotationLocation;
+
+ const DEFAULT_CONFIG: AnnotationConfig = AnnotationConfig {
+ binary_target: true,
+ annotate_runnables: true,
+ annotate_impls: true,
+ annotate_references: true,
+ annotate_method_references: true,
+ annotate_enum_variant_references: true,
+ location: AnnotationLocation::AboveName,
+ };
+
+ fn check_with_config(ra_fixture: &str, expect: Expect, config: &AnnotationConfig) {
let (analysis, file_id) = fixture::file(ra_fixture);
let annotations: Vec<Annotation> = analysis
- .annotations(
- &AnnotationConfig {
- binary_target: true,
- annotate_runnables: true,
- annotate_impls: true,
- annotate_references: true,
- annotate_method_references: true,
- annotate_enum_variant_references: true,
- },
- file_id,
- )
+ .annotations(config, file_id)
.unwrap()
.into_iter()
.map(|annotation| analysis.resolve_annotation(annotation).unwrap())
@@ -211,6 +235,10 @@ mod tests {
expect.assert_debug_eq(&annotations);
}
+ fn check(ra_fixture: &str, expect: Expect) {
+ check_with_config(ra_fixture, expect, &DEFAULT_CONFIG);
+ }
+
#[test]
fn const_annotations() {
check(
@@ -247,9 +275,12 @@ fn main() {
Annotation {
range: 6..10,
kind: HasReferences {
- file_id: FileId(
- 0,
- ),
+ pos: FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 6,
+ },
data: Some(
[
FileRange {
@@ -265,9 +296,12 @@ fn main() {
Annotation {
range: 30..36,
kind: HasReferences {
- file_id: FileId(
- 0,
- ),
+ pos: FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 30,
+ },
data: Some(
[],
),
@@ -276,9 +310,12 @@ fn main() {
Annotation {
range: 53..57,
kind: HasReferences {
- file_id: FileId(
- 0,
- ),
+ pos: FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 53,
+ },
data: Some(
[],
),
@@ -323,9 +360,12 @@ fn main() {
Annotation {
range: 7..11,
kind: HasImpls {
- file_id: FileId(
- 0,
- ),
+ pos: FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 7,
+ },
data: Some(
[],
),
@@ -334,9 +374,12 @@ fn main() {
Annotation {
range: 7..11,
kind: HasReferences {
- file_id: FileId(
- 0,
- ),
+ pos: FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 7,
+ },
data: Some(
[
FileRange {
@@ -352,9 +395,12 @@ fn main() {
Annotation {
range: 17..21,
kind: HasReferences {
- file_id: FileId(
- 0,
- ),
+ pos: FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 17,
+ },
data: Some(
[],
),
@@ -403,9 +449,12 @@ fn main() {
Annotation {
range: 7..11,
kind: HasImpls {
- file_id: FileId(
- 0,
- ),
+ pos: FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 7,
+ },
data: Some(
[
NavigationTarget {
@@ -424,9 +473,12 @@ fn main() {
Annotation {
range: 7..11,
kind: HasReferences {
- file_id: FileId(
- 0,
- ),
+ pos: FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 7,
+ },
data: Some(
[
FileRange {
@@ -448,9 +500,12 @@ fn main() {
Annotation {
range: 20..31,
kind: HasImpls {
- file_id: FileId(
- 0,
- ),
+ pos: FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 20,
+ },
data: Some(
[
NavigationTarget {
@@ -469,9 +524,12 @@ fn main() {
Annotation {
range: 20..31,
kind: HasReferences {
- file_id: FileId(
- 0,
- ),
+ pos: FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 20,
+ },
data: Some(
[
FileRange {
@@ -487,9 +545,12 @@ fn main() {
Annotation {
range: 69..73,
kind: HasReferences {
- file_id: FileId(
- 0,
- ),
+ pos: FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 69,
+ },
data: Some(
[],
),
@@ -530,9 +591,12 @@ fn main() {}
Annotation {
range: 3..7,
kind: HasReferences {
- file_id: FileId(
- 0,
- ),
+ pos: FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 3,
+ },
data: Some(
[],
),
@@ -581,9 +645,12 @@ fn main() {
Annotation {
range: 7..11,
kind: HasImpls {
- file_id: FileId(
- 0,
- ),
+ pos: FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 7,
+ },
data: Some(
[
NavigationTarget {
@@ -602,9 +669,12 @@ fn main() {
Annotation {
range: 7..11,
kind: HasReferences {
- file_id: FileId(
- 0,
- ),
+ pos: FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 7,
+ },
data: Some(
[
FileRange {
@@ -626,9 +696,12 @@ fn main() {
Annotation {
range: 33..44,
kind: HasReferences {
- file_id: FileId(
- 0,
- ),
+ pos: FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 33,
+ },
data: Some(
[
FileRange {
@@ -644,9 +717,12 @@ fn main() {
Annotation {
range: 61..65,
kind: HasReferences {
- file_id: FileId(
- 0,
- ),
+ pos: FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 61,
+ },
data: Some(
[],
),
@@ -740,9 +816,12 @@ mod tests {
Annotation {
range: 3..7,
kind: HasReferences {
- file_id: FileId(
- 0,
- ),
+ pos: FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 3,
+ },
data: Some(
[],
),
@@ -786,4 +865,48 @@ m!();
"#]],
);
}
+
+ #[test]
+ fn test_annotations_appear_above_whole_item_when_configured_to_do_so() {
+ check_with_config(
+ r#"
+/// This is a struct named Foo, obviously.
+#[derive(Clone)]
+struct Foo;
+"#,
+ expect![[r#"
+ [
+ Annotation {
+ range: 0..71,
+ kind: HasImpls {
+ pos: FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 67,
+ },
+ data: Some(
+ [],
+ ),
+ },
+ },
+ Annotation {
+ range: 0..71,
+ kind: HasReferences {
+ pos: FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 67,
+ },
+ data: Some(
+ [],
+ ),
+ },
+ },
+ ]
+ "#]],
+ &AnnotationConfig { location: AnnotationLocation::AboveWholeItem, ..DEFAULT_CONFIG },
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/annotations/fn_references.rs b/src/tools/rust-analyzer/crates/ide/src/annotations/fn_references.rs
new file mode 100644
index 000000000..0cadf125f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/annotations/fn_references.rs
@@ -0,0 +1,103 @@
+//! This module implements a methods and free functions search in the specified file.
+//! We have to skip tests, so cannot reuse file_structure module.
+
+use hir::Semantics;
+use ide_assists::utils::test_related_attribute;
+use ide_db::RootDatabase;
+use syntax::{ast, ast::HasName, AstNode, SyntaxNode, TextRange};
+
+use crate::FileId;
+
+pub(super) fn find_all_methods(
+ db: &RootDatabase,
+ file_id: FileId,
+) -> Vec<(TextRange, Option<TextRange>)> {
+ let sema = Semantics::new(db);
+ let source_file = sema.parse(file_id);
+ source_file.syntax().descendants().filter_map(|it| method_range(it)).collect()
+}
+
+fn method_range(item: SyntaxNode) -> Option<(TextRange, Option<TextRange>)> {
+ ast::Fn::cast(item).and_then(|fn_def| {
+ if test_related_attribute(&fn_def).is_some() {
+ None
+ } else {
+ Some((
+ fn_def.syntax().text_range(),
+ fn_def.name().map(|name| name.syntax().text_range()),
+ ))
+ }
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use syntax::TextRange;
+
+ use crate::fixture;
+ use crate::TextSize;
+ use std::ops::RangeInclusive;
+
+ #[test]
+ fn test_find_all_methods() {
+ let (analysis, pos) = fixture::position(
+ r#"
+ fn private_fn() {$0}
+
+ pub fn pub_fn() {}
+
+ pub fn generic_fn<T>(arg: T) {}
+ "#,
+ );
+
+ let refs = super::find_all_methods(&analysis.db, pos.file_id);
+ check_result(&refs, &[3..=13, 27..=33, 47..=57]);
+ }
+
+ #[test]
+ fn test_find_trait_methods() {
+ let (analysis, pos) = fixture::position(
+ r#"
+ trait Foo {
+ fn bar() {$0}
+ fn baz() {}
+ }
+ "#,
+ );
+
+ let refs = super::find_all_methods(&analysis.db, pos.file_id);
+ check_result(&refs, &[19..=22, 35..=38]);
+ }
+
+ #[test]
+ fn test_skip_tests() {
+ let (analysis, pos) = fixture::position(
+ r#"
+ //- /lib.rs
+ #[test]
+ fn foo() {$0}
+
+ pub fn pub_fn() {}
+
+ mod tests {
+ #[test]
+ fn bar() {}
+ }
+ "#,
+ );
+
+ let refs = super::find_all_methods(&analysis.db, pos.file_id);
+ check_result(&refs, &[28..=34]);
+ }
+
+ fn check_result(refs: &[(TextRange, Option<TextRange>)], expected: &[RangeInclusive<u32>]) {
+ assert_eq!(refs.len(), expected.len());
+
+ for (i, &(full, focus)) in refs.iter().enumerate() {
+ let range = &expected[i];
+ let item = focus.unwrap_or(full);
+ assert_eq!(TextSize::from(*range.start()), item.start());
+ assert_eq!(TextSize::from(*range.end()), item.end());
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs b/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs
index a18a6bea9..5a8cda8fb 100644
--- a/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs
@@ -7,7 +7,7 @@ use ide_db::{
search::FileReference,
FxIndexMap, RootDatabase,
};
-use syntax::{ast, AstNode, SyntaxKind::NAME, TextRange};
+use syntax::{ast, AstNode, SyntaxKind::IDENT, TextRange};
use crate::{goto_definition, FilePosition, NavigationTarget, RangeInfo, TryToNav};
@@ -79,7 +79,7 @@ pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Optio
let file = sema.parse(file_id);
let file = file.syntax();
let token = pick_best_token(file.token_at_offset(position.offset), |kind| match kind {
- NAME => 1,
+ IDENT => 1,
_ => 0,
})?;
let mut calls = CallLocations::default();
diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
index 582e9fe7e..d96827326 100644
--- a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
@@ -184,10 +184,10 @@ pub(crate) fn resolve_doc_path_for_def(
Definition::TypeAlias(it) => it.resolve_doc_path(db, link, ns),
Definition::Macro(it) => it.resolve_doc_path(db, link, ns),
Definition::Field(it) => it.resolve_doc_path(db, link, ns),
+ Definition::SelfType(it) => it.resolve_doc_path(db, link, ns),
Definition::BuiltinAttr(_)
| Definition::ToolModule(_)
| Definition::BuiltinType(_)
- | Definition::SelfType(_)
| Definition::Local(_)
| Definition::GenericParam(_)
| Definition::Label(_)
@@ -232,8 +232,13 @@ pub(crate) fn token_as_doc_comment(doc_token: &SyntaxToken) -> Option<DocComment
(match_ast! {
match doc_token {
ast::Comment(comment) => TextSize::try_from(comment.prefix().len()).ok(),
- ast::String(string) => doc_token.parent_ancestors().find_map(ast::Attr::cast)
- .filter(|attr| attr.simple_name().as_deref() == Some("doc")).and_then(|_| string.open_quote_text_range().map(|it| it.len())),
+ ast::String(string) => {
+ doc_token.parent_ancestors().find_map(ast::Attr::cast).filter(|attr| attr.simple_name().as_deref() == Some("doc"))?;
+ if doc_token.parent_ancestors().find_map(ast::MacroCall::cast).filter(|mac| mac.path().and_then(|p| p.segment()?.name_ref()).as_ref().map(|n| n.text()).as_deref() == Some("include_str")).is_some() {
+ return None;
+ }
+ string.open_quote_text_range().map(|it| it.len())
+ },
_ => None,
}
}).map(|prefix_len| DocCommentToken { prefix_len, doc_token: doc_token.clone() })
diff --git a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs
index efa8551a0..93252339c 100644
--- a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs
@@ -19,7 +19,7 @@ pub struct ExpandedMacro {
// |===
// | Editor | Action Name
//
-// | VS Code | **Rust Analyzer: Expand macro recursively**
+// | VS Code | **rust-analyzer: Expand macro recursively**
// |===
//
// image::https://user-images.githubusercontent.com/48062697/113020648-b3973180-917a-11eb-84a9-ecb921293dc5.gif[]
@@ -32,7 +32,7 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
_ => 0,
})?;
- // due to how Rust Analyzer works internally, we need to special case derive attributes,
+ // due to how rust-analyzer works internally, we need to special case derive attributes,
// otherwise they might not get found, e.g. here with the cursor at $0 `#[attr]` would expand:
// ```
// #[attr]
diff --git a/src/tools/rust-analyzer/crates/ide/src/fn_references.rs b/src/tools/rust-analyzer/crates/ide/src/fn_references.rs
deleted file mode 100644
index 63fb322ce..000000000
--- a/src/tools/rust-analyzer/crates/ide/src/fn_references.rs
+++ /dev/null
@@ -1,94 +0,0 @@
-//! This module implements a methods and free functions search in the specified file.
-//! We have to skip tests, so cannot reuse file_structure module.
-
-use hir::Semantics;
-use ide_assists::utils::test_related_attribute;
-use ide_db::RootDatabase;
-use syntax::{ast, ast::HasName, AstNode, SyntaxNode};
-
-use crate::{FileId, FileRange};
-
-pub(crate) fn find_all_methods(db: &RootDatabase, file_id: FileId) -> Vec<FileRange> {
- let sema = Semantics::new(db);
- let source_file = sema.parse(file_id);
- source_file.syntax().descendants().filter_map(|it| method_range(it, file_id)).collect()
-}
-
-fn method_range(item: SyntaxNode, file_id: FileId) -> Option<FileRange> {
- ast::Fn::cast(item).and_then(|fn_def| {
- if test_related_attribute(&fn_def).is_some() {
- None
- } else {
- fn_def.name().map(|name| FileRange { file_id, range: name.syntax().text_range() })
- }
- })
-}
-
-#[cfg(test)]
-mod tests {
- use crate::fixture;
- use crate::{FileRange, TextSize};
- use std::ops::RangeInclusive;
-
- #[test]
- fn test_find_all_methods() {
- let (analysis, pos) = fixture::position(
- r#"
- fn private_fn() {$0}
-
- pub fn pub_fn() {}
-
- pub fn generic_fn<T>(arg: T) {}
- "#,
- );
-
- let refs = analysis.find_all_methods(pos.file_id).unwrap();
- check_result(&refs, &[3..=13, 27..=33, 47..=57]);
- }
-
- #[test]
- fn test_find_trait_methods() {
- let (analysis, pos) = fixture::position(
- r#"
- trait Foo {
- fn bar() {$0}
- fn baz() {}
- }
- "#,
- );
-
- let refs = analysis.find_all_methods(pos.file_id).unwrap();
- check_result(&refs, &[19..=22, 35..=38]);
- }
-
- #[test]
- fn test_skip_tests() {
- let (analysis, pos) = fixture::position(
- r#"
- //- /lib.rs
- #[test]
- fn foo() {$0}
-
- pub fn pub_fn() {}
-
- mod tests {
- #[test]
- fn bar() {}
- }
- "#,
- );
-
- let refs = analysis.find_all_methods(pos.file_id).unwrap();
- check_result(&refs, &[28..=34]);
- }
-
- fn check_result(refs: &[FileRange], expected: &[RangeInclusive<u32>]) {
- assert_eq!(refs.len(), expected.len());
-
- for (i, item) in refs.iter().enumerate() {
- let range = &expected[i];
- assert_eq!(TextSize::from(*range.start()), item.range.start());
- assert_eq!(TextSize::from(*range.end()), item.range.end());
- }
- }
-}
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
index d9c97751c..d0be1b3f4 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
@@ -1,4 +1,4 @@
-use std::{convert::TryInto, mem::discriminant};
+use std::mem::discriminant;
use crate::{doc_links::token_as_doc_comment, FilePosition, NavigationTarget, RangeInfo, TryToNav};
use hir::{AsAssocItem, AssocItem, Semantics};
@@ -39,15 +39,23 @@ pub(crate) fn goto_definition(
| T![super]
| T![crate]
| T![Self]
- | COMMENT => 2,
+ | COMMENT => 4,
+ // index and prefix ops
+ T!['['] | T![']'] | T![?] | T![*] | T![-] | T![!] => 3,
+ kind if kind.is_keyword() => 2,
+ T!['('] | T![')'] => 2,
kind if kind.is_trivia() => 0,
_ => 1,
})?;
if let Some(doc_comment) = token_as_doc_comment(&original_token) {
- return doc_comment.get_definition_with_descend_at(sema, position.offset, |def, _, _| {
- let nav = def.try_to_nav(db)?;
- Some(RangeInfo::new(original_token.text_range(), vec![nav]))
- });
+ return doc_comment.get_definition_with_descend_at(
+ sema,
+ position.offset,
+ |def, _, link_range| {
+ let nav = def.try_to_nav(db)?;
+ Some(RangeInfo::new(link_range, vec![nav]))
+ },
+ );
}
let navs = sema
.descend_into_macros(original_token.clone())
@@ -91,6 +99,14 @@ fn try_lookup_include_path(
if !matches!(&*name.text(), "include" | "include_str" | "include_bytes") {
return None;
}
+
+ // Ignore non-built-in macros to account for shadowing
+ if let Some(it) = sema.resolve_macro_call(&macro_call) {
+ if !matches!(it.kind(sema.db), hir::MacroKind::BuiltIn) {
+ return None;
+ }
+ }
+
let file_id = sema.db.resolve_path(AnchoredPath { anchor: file_id, path: &path })?;
let size = sema.db.file_text(file_id).len().try_into().ok()?;
Some(NavigationTarget {
@@ -152,9 +168,6 @@ mod tests {
fn check(ra_fixture: &str) {
let (analysis, position, expected) = fixture::annotations(ra_fixture);
let navs = analysis.goto_definition(position).unwrap().expect("no definition found").info;
- if navs.is_empty() {
- panic!("unresolved reference")
- }
let cmp = |&FileRange { file_id, range }: &_| (file_id, range.start());
let navs = navs
@@ -1344,6 +1357,10 @@ fn f(e: Enum) {
check(
r#"
//- /main.rs
+
+#[rustc_builtin_macro]
+macro_rules! include_str {}
+
fn main() {
let str = include_str!("foo.txt$0");
}
@@ -1353,6 +1370,42 @@ fn main() {
"#,
);
}
+
+ #[test]
+ fn goto_doc_include_str() {
+ check(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include_str {}
+
+#[doc = include_str!("docs.md$0")]
+struct Item;
+
+//- /docs.md
+// docs
+//^file
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_shadow_include() {
+ check(
+ r#"
+//- /main.rs
+macro_rules! include {
+ ("included.rs") => {}
+}
+
+include!("included.rs$0");
+
+//- /included.rs
+// empty
+"#,
+ );
+ }
+
#[cfg(test)]
mod goto_impl_of_trait_fn {
use super::check;
@@ -1631,4 +1684,154 @@ foo!(bar$0);
"#,
);
}
+
+ #[test]
+ fn goto_await_poll() {
+ check(
+ r#"
+//- minicore: future
+
+struct MyFut;
+
+impl core::future::Future for MyFut {
+ type Output = ();
+
+ fn poll(
+ //^^^^
+ self: std::pin::Pin<&mut Self>,
+ cx: &mut std::task::Context<'_>
+ ) -> std::task::Poll<Self::Output>
+ {
+ ()
+ }
+}
+
+fn f() {
+ MyFut.await$0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_await_into_future_poll() {
+ check(
+ r#"
+//- minicore: future
+
+struct Futurable;
+
+impl core::future::IntoFuture for Futurable {
+ type IntoFuture = MyFut;
+}
+
+struct MyFut;
+
+impl core::future::Future for MyFut {
+ type Output = ();
+
+ fn poll(
+ //^^^^
+ self: std::pin::Pin<&mut Self>,
+ cx: &mut std::task::Context<'_>
+ ) -> std::task::Poll<Self::Output>
+ {
+ ()
+ }
+}
+
+fn f() {
+ Futurable.await$0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_try_op() {
+ check(
+ r#"
+//- minicore: try
+
+struct Struct;
+
+impl core::ops::Try for Struct {
+ fn branch(
+ //^^^^^^
+ self
+ ) {}
+}
+
+fn f() {
+ Struct?$0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_index_op() {
+ check(
+ r#"
+//- minicore: index
+
+struct Struct;
+
+impl core::ops::Index<usize> for Struct {
+ fn index(
+ //^^^^^
+ self
+ ) {}
+}
+
+fn f() {
+ Struct[0]$0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_prefix_op() {
+ check(
+ r#"
+//- minicore: deref
+
+struct Struct;
+
+impl core::ops::Deref for Struct {
+ fn deref(
+ //^^^^^
+ self
+ ) {}
+}
+
+fn f() {
+ $0*Struct;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_bin_op() {
+ check(
+ r#"
+//- minicore: add
+
+struct Struct;
+
+impl core::ops::Add for Struct {
+ fn add(
+ //^^^
+ self
+ ) {}
+}
+
+fn f() {
+ Struct +$0 Struct;
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
index 04b51c839..b3f711b6b 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
@@ -30,7 +30,7 @@ pub(crate) fn goto_implementation(
let original_token =
pick_best_token(syntax.token_at_offset(position.offset), |kind| match kind {
- IDENT | T![self] => 1,
+ IDENT | T![self] | INT_NUMBER => 1,
_ => 0,
})?;
let range = original_token.text_range();
diff --git a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
index f2d7029ea..540a11583 100644
--- a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
@@ -333,7 +333,8 @@ fn cover_range(r0: Option<TextRange>, r1: Option<TextRange>) -> Option<TextRange
fn find_defs(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> FxHashSet<Definition> {
sema.descend_into_macros(token)
.into_iter()
- .filter_map(|token| IdentClass::classify_token(sema, &token).map(IdentClass::definitions))
+ .filter_map(|token| IdentClass::classify_token(sema, &token))
+ .map(IdentClass::definitions_no_ops)
.flatten()
.collect()
}
@@ -376,6 +377,7 @@ mod tests {
match it {
ReferenceCategory::Read => "read",
ReferenceCategory::Write => "write",
+ ReferenceCategory::Import => "import",
}
.to_string()
}),
@@ -422,12 +424,12 @@ struct Foo;
check(
r#"
use crate$0;
- //^^^^^
+ //^^^^^ import
use self;
- //^^^^
+ //^^^^ import
mod __ {
use super;
- //^^^^^
+ //^^^^^ import
}
"#,
);
@@ -435,7 +437,7 @@ mod __ {
r#"
//- /main.rs crate:main deps:lib
use lib$0;
- //^^^
+ //^^^ import
//- /lib.rs crate:lib
"#,
);
@@ -449,7 +451,7 @@ use lib$0;
mod foo;
//- /foo.rs
use self$0;
- // ^^^^
+ // ^^^^ import
"#,
);
}
@@ -1374,4 +1376,20 @@ fn main() {
"#,
);
}
+
+ #[test]
+ fn test_assoc_type_highlighting() {
+ check(
+ r#"
+trait Trait {
+ type Output;
+ // ^^^^^^
+}
+impl Trait for () {
+ type Output$0 = ();
+ // ^^^^^^
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs
index 59c97f2dc..3687b597f 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs
@@ -9,7 +9,7 @@ use either::Either;
use hir::{HasSource, Semantics};
use ide_db::{
base_db::FileRange,
- defs::{Definition, IdentClass},
+ defs::{Definition, IdentClass, OperatorClass},
famous_defs::FamousDefs,
helpers::pick_best_token,
FxIndexSet, RootDatabase,
@@ -27,6 +27,7 @@ use crate::{
pub struct HoverConfig {
pub links_in_hover: bool,
pub documentation: Option<HoverDocFormat>,
+ pub keywords: bool,
}
impl HoverConfig {
@@ -101,7 +102,10 @@ pub(crate) fn hover(
let offset = range.start();
let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
- IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self] => 3,
+ IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self] => 4,
+ // index and prefix ops
+ T!['['] | T![']'] | T![?] | T![*] | T![-] | T![!] => 3,
+ kind if kind.is_keyword() => 2,
T!['('] | T![')'] => 2,
kind if kind.is_trivia() => 0,
_ => 1,
@@ -116,6 +120,8 @@ pub(crate) fn hover(
}
let in_attr = matches!(original_token.parent().and_then(ast::TokenTree::cast), Some(tt) if tt.syntax().ancestors().any(|it| ast::Meta::can_cast(it.kind())));
+ // prefer descending the same token kind in attribute expansions, in normal macros text
+ // equivalency is more important
let descended = if in_attr {
[sema.descend_into_macros_with_kind_preference(original_token.clone())].into()
} else {
@@ -136,6 +142,11 @@ pub(crate) fn hover(
.filter_map(|token| {
let node = token.parent()?;
let class = IdentClass::classify_token(sema, token)?;
+ if let IdentClass::Operator(OperatorClass::Await(_)) = class {
+ // It's better for us to fall back to the keyword hover here,
+ // rendering poll is very confusing
+ return None;
+ }
Some(class.definitions().into_iter().zip(iter::once(node).cycle()))
})
.flatten()
@@ -232,10 +243,12 @@ fn hover_type_fallback(
token: &SyntaxToken,
original_token: &SyntaxToken,
) -> Option<RangeInfo<HoverResult>> {
- let node = token
- .parent_ancestors()
- .take_while(|it| !ast::Item::can_cast(it.kind()))
- .find(|n| ast::Expr::can_cast(n.kind()) || ast::Pat::can_cast(n.kind()))?;
+ let node =
+ token.parent_ancestors().take_while(|it| !ast::Item::can_cast(it.kind())).find(|n| {
+ ast::Expr::can_cast(n.kind())
+ || ast::Pat::can_cast(n.kind())
+ || ast::Type::can_cast(n.kind())
+ })?;
let expr_or_pat = match_ast! {
match node {
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
index 6c50a4e6a..d109c0769 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
@@ -2,12 +2,13 @@
use std::fmt::Display;
use either::Either;
-use hir::{AsAssocItem, AttributeTemplate, HasAttrs, HirDisplay, Semantics, TypeInfo};
+use hir::{AsAssocItem, AttributeTemplate, HasAttrs, HasSource, HirDisplay, Semantics, TypeInfo};
use ide_db::{
base_db::SourceDatabase,
defs::Definition,
famous_defs::FamousDefs,
generated::lints::{CLIPPY_LINTS, DEFAULT_LINTS, FEATURES},
+ syntax_helpers::insert_whitespace_into_node,
RootDatabase,
};
use itertools::Itertools;
@@ -230,7 +231,7 @@ pub(super) fn keyword(
config: &HoverConfig,
token: &SyntaxToken,
) -> Option<HoverResult> {
- if !token.kind().is_keyword() || !config.documentation.is_some() {
+ if !token.kind().is_keyword() || !config.documentation.is_some() || !config.keywords {
return None;
}
let parent = token.parent()?;
@@ -345,15 +346,38 @@ pub(super) fn definition(
Definition::Module(it) => label_and_docs(db, it),
Definition::Function(it) => label_and_docs(db, it),
Definition::Adt(it) => label_and_docs(db, it),
- Definition::Variant(it) => label_and_docs(db, it),
+ Definition::Variant(it) => label_value_and_docs(db, it, |&it| {
+ if !it.parent_enum(db).is_data_carrying(db) {
+ match it.eval(db) {
+ Ok(x) => Some(format!("{}", x)),
+ Err(_) => it.value(db).map(|x| format!("{:?}", x)),
+ }
+ } else {
+ None
+ }
+ }),
Definition::Const(it) => label_value_and_docs(db, it, |it| {
let body = it.eval(db);
match body {
Ok(x) => Some(format!("{}", x)),
- Err(_) => it.value(db).map(|x| format!("{}", x)),
+ Err(_) => {
+ let source = it.source(db)?;
+ let mut body = source.value.body()?.syntax().clone();
+ if source.file_id.is_macro() {
+ body = insert_whitespace_into_node::insert_ws_into(body);
+ }
+ Some(body.to_string())
+ }
+ }
+ }),
+ Definition::Static(it) => label_value_and_docs(db, it, |it| {
+ let source = it.source(db)?;
+ let mut body = source.value.body()?.syntax().clone();
+ if source.file_id.is_macro() {
+ body = insert_whitespace_into_node::insert_ws_into(body);
}
+ Some(body.to_string())
}),
- Definition::Static(it) => label_value_and_docs(db, it, |it| it.value(db)),
Definition::Trait(it) => label_and_docs(db, it),
Definition::TypeAlias(it) => label_and_docs(db, it),
Definition::BuiltinType(it) => {
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
index 867d1f54d..eb997e6fe 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
@@ -8,7 +8,11 @@ fn check_hover_no_result(ra_fixture: &str) {
let (analysis, position) = fixture::position(ra_fixture);
let hover = analysis
.hover(
- &HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) },
+ &HoverConfig {
+ links_in_hover: true,
+ documentation: Some(HoverDocFormat::Markdown),
+ keywords: true,
+ },
FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
)
.unwrap();
@@ -20,7 +24,11 @@ fn check(ra_fixture: &str, expect: Expect) {
let (analysis, position) = fixture::position(ra_fixture);
let hover = analysis
.hover(
- &HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) },
+ &HoverConfig {
+ links_in_hover: true,
+ documentation: Some(HoverDocFormat::Markdown),
+ keywords: true,
+ },
FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
)
.unwrap()
@@ -37,7 +45,11 @@ fn check_hover_no_links(ra_fixture: &str, expect: Expect) {
let (analysis, position) = fixture::position(ra_fixture);
let hover = analysis
.hover(
- &HoverConfig { links_in_hover: false, documentation: Some(HoverDocFormat::Markdown) },
+ &HoverConfig {
+ links_in_hover: false,
+ documentation: Some(HoverDocFormat::Markdown),
+ keywords: true,
+ },
FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
)
.unwrap()
@@ -54,7 +66,11 @@ fn check_hover_no_markdown(ra_fixture: &str, expect: Expect) {
let (analysis, position) = fixture::position(ra_fixture);
let hover = analysis
.hover(
- &HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::PlainText) },
+ &HoverConfig {
+ links_in_hover: true,
+ documentation: Some(HoverDocFormat::PlainText),
+ keywords: true,
+ },
FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
)
.unwrap()
@@ -71,7 +87,11 @@ fn check_actions(ra_fixture: &str, expect: Expect) {
let (analysis, file_id, position) = fixture::range_or_position(ra_fixture);
let hover = analysis
.hover(
- &HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) },
+ &HoverConfig {
+ links_in_hover: true,
+ documentation: Some(HoverDocFormat::Markdown),
+ keywords: true,
+ },
FileRange { file_id, range: position.range_or_empty() },
)
.unwrap()
@@ -83,7 +103,11 @@ fn check_hover_range(ra_fixture: &str, expect: Expect) {
let (analysis, range) = fixture::range(ra_fixture);
let hover = analysis
.hover(
- &HoverConfig { links_in_hover: false, documentation: Some(HoverDocFormat::Markdown) },
+ &HoverConfig {
+ links_in_hover: false,
+ documentation: Some(HoverDocFormat::Markdown),
+ keywords: true,
+ },
range,
)
.unwrap()
@@ -95,7 +119,11 @@ fn check_hover_range_no_results(ra_fixture: &str) {
let (analysis, range) = fixture::range(ra_fixture);
let hover = analysis
.hover(
- &HoverConfig { links_in_hover: false, documentation: Some(HoverDocFormat::Markdown) },
+ &HoverConfig {
+ links_in_hover: false,
+ documentation: Some(HoverDocFormat::Markdown),
+ keywords: true,
+ },
range,
)
.unwrap();
@@ -670,6 +698,7 @@ fn hover_enum_variant() {
check(
r#"
enum Option<T> {
+ Some(T)
/// The None variant
Non$0e
}
@@ -3500,6 +3529,112 @@ impl<const LEN: usize> Foo<LEN$0> {}
}
#[test]
+fn hover_const_eval_variant() {
+ // show hex for <10
+ check(
+ r#"
+#[repr(u8)]
+enum E {
+ /// This is a doc
+ A$0 = 1 << 3,
+}
+"#,
+ expect![[r#"
+ *A*
+
+ ```rust
+ test::E
+ ```
+
+ ```rust
+ A = 8
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ // show hex for >10
+ check(
+ r#"
+#[repr(u8)]
+enum E {
+ /// This is a doc
+ A$0 = (1 << 3) + (1 << 2),
+}
+"#,
+ expect![[r#"
+ *A*
+
+ ```rust
+ test::E
+ ```
+
+ ```rust
+ A = 12 (0xC)
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ // enums in const eval
+ check(
+ r#"
+#[repr(u8)]
+enum E {
+ A = 1,
+ /// This is a doc
+ B$0 = E::A as u8 + 1,
+}
+"#,
+ expect![[r#"
+ *B*
+
+ ```rust
+ test::E
+ ```
+
+ ```rust
+ B = 2
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ // unspecified variant should increment by one
+ check(
+ r#"
+#[repr(u8)]
+enum E {
+ A = 4,
+ /// This is a doc
+ B$0,
+}
+"#,
+ expect![[r#"
+ *B*
+
+ ```rust
+ test::E
+ ```
+
+ ```rust
+ B = 5
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+}
+
+#[test]
fn hover_const_eval() {
// show hex for <10
check(
@@ -3795,6 +3930,35 @@ fn foo() {
This is a doc
"#]],
);
+ check(
+ r#"
+enum E {
+ /// This is a doc
+ A = 3,
+}
+fn foo(e: E) {
+ match e {
+ E::A$0 => (),
+ _ => ()
+ }
+}
+"#,
+ expect![[r#"
+ *A*
+
+ ```rust
+ test::E
+ ```
+
+ ```rust
+ A = 3
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
}
#[test]
@@ -5051,3 +5215,95 @@ fn f() {
```"#]],
);
}
+
+#[test]
+fn hover_deref() {
+ check(
+ r#"
+//- minicore: deref
+
+struct Struct(usize);
+
+impl core::ops::Deref for Struct {
+ type Target = usize;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+fn f() {
+ $0*Struct(0);
+}
+"#,
+ expect![[r#"
+ ***
+
+ ```rust
+ test::Struct
+ ```
+
+ ```rust
+ fn deref(&self) -> &Self::Target
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn static_const_macro_expanded_body() {
+ check(
+ r#"
+macro_rules! m {
+ () => {
+ pub const V: i8 = {
+ let e = 123;
+ f(e) // Prevent const eval from evaluating this constant, we want to print the body's code.
+ };
+ };
+}
+m!();
+fn main() { $0V; }
+"#,
+ expect![[r#"
+ *V*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub const V: i8 = {
+ let e = 123;
+ f(e)
+ }
+ ```
+ "#]],
+ );
+ check(
+ r#"
+macro_rules! m {
+ () => {
+ pub static V: i8 = {
+ let e = 123;
+ };
+ };
+}
+m!();
+fn main() { $0V; }
+"#,
+ expect![[r#"
+ *V*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub static V: i8 = {
+ let e = 123;
+ }
+ ```
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs
index 5aae669aa..34d8bf67a 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs
@@ -1,3 +1,5 @@
+use std::fmt;
+
use either::Either;
use hir::{known, Callable, HasVisibility, HirDisplay, Mutability, Semantics, TypeInfo};
use ide_db::{
@@ -69,7 +71,7 @@ pub enum InlayKind {
pub struct InlayHint {
pub range: TextRange,
pub kind: InlayKind,
- pub label: String,
+ pub label: InlayHintLabel,
pub tooltip: Option<InlayTooltip>,
}
@@ -80,6 +82,83 @@ pub enum InlayTooltip {
HoverOffset(FileId, TextSize),
}
+pub struct InlayHintLabel {
+ pub parts: Vec<InlayHintLabelPart>,
+}
+
+impl InlayHintLabel {
+ pub fn as_simple_str(&self) -> Option<&str> {
+ match &*self.parts {
+ [part] => part.as_simple_str(),
+ _ => None,
+ }
+ }
+
+ pub fn prepend_str(&mut self, s: &str) {
+ match &mut *self.parts {
+ [part, ..] if part.as_simple_str().is_some() => part.text = format!("{s}{}", part.text),
+ _ => self.parts.insert(0, InlayHintLabelPart { text: s.into(), linked_location: None }),
+ }
+ }
+
+ pub fn append_str(&mut self, s: &str) {
+ match &mut *self.parts {
+ [.., part] if part.as_simple_str().is_some() => part.text.push_str(s),
+ _ => self.parts.push(InlayHintLabelPart { text: s.into(), linked_location: None }),
+ }
+ }
+}
+
+impl From<String> for InlayHintLabel {
+ fn from(s: String) -> Self {
+ Self { parts: vec![InlayHintLabelPart { text: s, linked_location: None }] }
+ }
+}
+
+impl fmt::Display for InlayHintLabel {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}", self.parts.iter().map(|part| &part.text).format(""))
+ }
+}
+
+impl fmt::Debug for InlayHintLabel {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(&self.parts).finish()
+ }
+}
+
+pub struct InlayHintLabelPart {
+ pub text: String,
+ /// Source location represented by this label part. The client will use this to fetch the part's
+ /// hover tooltip, and Ctrl+Clicking the label part will navigate to the definition the location
+ /// refers to (not necessarily the location itself).
+ /// When setting this, no tooltip must be set on the containing hint, or VS Code will display
+ /// them both.
+ pub linked_location: Option<FileRange>,
+}
+
+impl InlayHintLabelPart {
+ pub fn as_simple_str(&self) -> Option<&str> {
+ match self {
+ Self { text, linked_location: None } => Some(text),
+ _ => None,
+ }
+ }
+}
+
+impl fmt::Debug for InlayHintLabelPart {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self.as_simple_str() {
+ Some(string) => string.fmt(f),
+ None => f
+ .debug_struct("InlayHintLabelPart")
+ .field("text", &self.text)
+ .field("linked_location", &self.linked_location)
+ .finish(),
+ }
+ }
+}
+
// Feature: Inlay Hints
//
// rust-analyzer shows additional information inline with the source code.
@@ -97,12 +176,6 @@ pub enum InlayTooltip {
// * elided lifetimes
// * compiler inserted reborrows
//
-// |===
-// | Editor | Action Name
-//
-// | VS Code | **Rust Analyzer: Toggle inlay hints*
-// |===
-//
// image::https://user-images.githubusercontent.com/48062697/113020660-b5f98b80-917a-11eb-8d70-3be3fd558cdd.png[]
pub(crate) fn inlay_hints(
db: &RootDatabase,
@@ -192,10 +265,10 @@ fn closing_brace_hints(
) -> Option<()> {
let min_lines = config.closing_brace_hints_min_lines?;
- let name = |it: ast::Name| it.syntax().text_range().start();
+ let name = |it: ast::Name| it.syntax().text_range();
let mut closing_token;
- let (label, name_offset) = if let Some(item_list) = ast::AssocItemList::cast(node.clone()) {
+ let (label, name_range) = if let Some(item_list) = ast::AssocItemList::cast(node.clone()) {
closing_token = item_list.r_curly_token()?;
let parent = item_list.syntax().parent()?;
@@ -205,11 +278,11 @@ fn closing_brace_hints(
let imp = sema.to_def(&imp)?;
let ty = imp.self_ty(sema.db);
let trait_ = imp.trait_(sema.db);
-
- (match trait_ {
+ let hint_text = match trait_ {
Some(tr) => format!("impl {} for {}", tr.name(sema.db), ty.display_truncated(sema.db, config.max_length)),
None => format!("impl {}", ty.display_truncated(sema.db, config.max_length)),
- }, None)
+ };
+ (hint_text, None)
},
ast::Trait(tr) => {
(format!("trait {}", tr.name()?), tr.name().map(name))
@@ -253,7 +326,7 @@ fn closing_brace_hints(
(
format!("{}!", mac.path()?),
- mac.path().and_then(|it| it.segment()).map(|it| it.syntax().text_range().start()),
+ mac.path().and_then(|it| it.segment()).map(|it| it.syntax().text_range()),
)
} else {
return None;
@@ -278,11 +351,12 @@ fn closing_brace_hints(
return None;
}
+ let linked_location = name_range.map(|range| FileRange { file_id, range });
acc.push(InlayHint {
range: closing_token.text_range(),
kind: InlayKind::ClosingBraceHint,
- label,
- tooltip: name_offset.map(|it| InlayTooltip::HoverOffset(file_id, it)),
+ label: InlayHintLabel { parts: vec![InlayHintLabelPart { text: label, linked_location }] },
+ tooltip: None, // provided by label part location
});
None
@@ -311,7 +385,7 @@ fn implicit_static_hints(
acc.push(InlayHint {
range: t.text_range(),
kind: InlayKind::LifetimeHint,
- label: "'static".to_owned(),
+ label: "'static".to_owned().into(),
tooltip: Some(InlayTooltip::String("Elided static lifetime".into())),
});
}
@@ -329,10 +403,10 @@ fn fn_lifetime_fn_hints(
return None;
}
- let mk_lt_hint = |t: SyntaxToken, label| InlayHint {
+ let mk_lt_hint = |t: SyntaxToken, label: String| InlayHint {
range: t.text_range(),
kind: InlayKind::LifetimeHint,
- label,
+ label: label.into(),
tooltip: Some(InlayTooltip::String("Elided lifetime".into())),
};
@@ -486,7 +560,8 @@ fn fn_lifetime_fn_hints(
"{}{}",
allocated_lifetimes.iter().format(", "),
if is_empty { "" } else { ", " }
- ),
+ )
+ .into(),
tooltip: Some(InlayTooltip::String("Elided lifetimes".into())),
});
}
@@ -535,7 +610,8 @@ fn closure_ret_hints(
range: param_list.syntax().text_range(),
kind: InlayKind::ClosureReturnTypeHint,
label: hint_iterator(sema, &famous_defs, config, &ty)
- .unwrap_or_else(|| ty.display_truncated(sema.db, config.max_length).to_string()),
+ .unwrap_or_else(|| ty.display_truncated(sema.db, config.max_length).to_string())
+ .into(),
tooltip: Some(InlayTooltip::HoverRanged(file_id, param_list.syntax().text_range())),
});
Some(())
@@ -562,7 +638,7 @@ fn reborrow_hints(
acc.push(InlayHint {
range: expr.syntax().text_range(),
kind: InlayKind::ImplicitReborrowHint,
- label: label.to_string(),
+ label: label.to_string().into(),
tooltip: Some(InlayTooltip::String("Compiler inserted reborrow".into())),
});
Some(())
@@ -620,9 +696,9 @@ fn chaining_hints(
acc.push(InlayHint {
range: expr.syntax().text_range(),
kind: InlayKind::ChainingHint,
- label: hint_iterator(sema, &famous_defs, config, &ty).unwrap_or_else(|| {
- ty.display_truncated(sema.db, config.max_length).to_string()
- }),
+ label: hint_iterator(sema, &famous_defs, config, &ty)
+ .unwrap_or_else(|| ty.display_truncated(sema.db, config.max_length).to_string())
+ .into(),
tooltip: Some(InlayTooltip::HoverRanged(file_id, expr.syntax().text_range())),
});
}
@@ -674,7 +750,7 @@ fn param_name_hints(
InlayHint {
range,
kind: InlayKind::ParameterHint,
- label: param_name,
+ label: param_name.into(),
tooltip: tooltip.map(|it| InlayTooltip::HoverOffset(it.file_id, it.range.start())),
}
});
@@ -705,7 +781,7 @@ fn binding_mode_hints(
acc.push(InlayHint {
range,
kind: InlayKind::BindingModeHint,
- label: r.to_string(),
+ label: r.to_string().into(),
tooltip: Some(InlayTooltip::String("Inferred binding mode".into())),
});
});
@@ -720,7 +796,7 @@ fn binding_mode_hints(
acc.push(InlayHint {
range,
kind: InlayKind::BindingModeHint,
- label: bm.to_string(),
+ label: bm.to_string().into(),
tooltip: Some(InlayTooltip::String("Inferred binding mode".into())),
});
}
@@ -772,7 +848,7 @@ fn bind_pat_hints(
None => pat.syntax().text_range(),
},
kind: InlayKind::TypeHint,
- label,
+ label: label.into(),
tooltip: pat
.name()
.map(|it| it.syntax().text_range())
@@ -1606,6 +1682,74 @@ fn main() {
}
#[test]
+ fn iterator_hint_regression_issue_12674() {
+ // Ensure we don't crash while solving the projection type of iterators.
+ check_expect(
+ InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG },
+ r#"
+//- minicore: iterators
+struct S<T>(T);
+impl<T> S<T> {
+ fn iter(&self) -> Iter<'_, T> { loop {} }
+}
+struct Iter<'a, T: 'a>(&'a T);
+impl<'a, T> Iterator for Iter<'a, T> {
+ type Item = &'a T;
+ fn next(&mut self) -> Option<Self::Item> { loop {} }
+}
+struct Container<'a> {
+ elements: S<&'a str>,
+}
+struct SliceIter<'a, T>(&'a T);
+impl<'a, T> Iterator for SliceIter<'a, T> {
+ type Item = &'a T;
+ fn next(&mut self) -> Option<Self::Item> { loop {} }
+}
+
+fn main(a: SliceIter<'_, Container>) {
+ a
+ .filter_map(|c| Some(c.elements.iter().filter_map(|v| Some(v))))
+ .map(|e| e);
+}
+ "#,
+ expect![[r#"
+ [
+ InlayHint {
+ range: 484..554,
+ kind: ChainingHint,
+ label: [
+ "impl Iterator<Item = impl Iterator<Item = &&str>>",
+ ],
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 484..554,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 484..485,
+ kind: ChainingHint,
+ label: [
+ "SliceIter<Container>",
+ ],
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 484..485,
+ ),
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
fn infer_call_method_return_associated_types_with_generic() {
check_types(
r#"
@@ -1880,7 +2024,14 @@ impl<T> Vec<T> {
}
impl<T> IntoIterator for Vec<T> {
- type Item=T;
+ type Item = T;
+ type IntoIter = IntoIter<T>;
+}
+
+struct IntoIter<T> {}
+
+impl<T> Iterator for IntoIter<T> {
+ type Item = T;
}
fn main() {
@@ -1910,7 +2061,7 @@ impl<T> Vec<T> {
pub struct Box<T> {}
trait Display {}
-trait Sync {}
+auto trait Sync {}
fn main() {
// The block expression wrapping disables the constructor hint hiding logic
@@ -2223,7 +2374,9 @@ fn main() {
InlayHint {
range: 147..172,
kind: ChainingHint,
- label: "B",
+ label: [
+ "B",
+ ],
tooltip: Some(
HoverRanged(
FileId(
@@ -2236,7 +2389,9 @@ fn main() {
InlayHint {
range: 147..154,
kind: ChainingHint,
- label: "A",
+ label: [
+ "A",
+ ],
tooltip: Some(
HoverRanged(
FileId(
@@ -2294,7 +2449,9 @@ fn main() {
InlayHint {
range: 143..190,
kind: ChainingHint,
- label: "C",
+ label: [
+ "C",
+ ],
tooltip: Some(
HoverRanged(
FileId(
@@ -2307,7 +2464,9 @@ fn main() {
InlayHint {
range: 143..179,
kind: ChainingHint,
- label: "B",
+ label: [
+ "B",
+ ],
tooltip: Some(
HoverRanged(
FileId(
@@ -2350,7 +2509,9 @@ fn main() {
InlayHint {
range: 246..283,
kind: ChainingHint,
- label: "B<X<i32, bool>>",
+ label: [
+ "B<X<i32, bool>>",
+ ],
tooltip: Some(
HoverRanged(
FileId(
@@ -2363,7 +2524,9 @@ fn main() {
InlayHint {
range: 246..265,
kind: ChainingHint,
- label: "A<X<i32, bool>>",
+ label: [
+ "A<X<i32, bool>>",
+ ],
tooltip: Some(
HoverRanged(
FileId(
@@ -2408,7 +2571,9 @@ fn main() {
InlayHint {
range: 174..241,
kind: ChainingHint,
- label: "impl Iterator<Item = ()>",
+ label: [
+ "impl Iterator<Item = ()>",
+ ],
tooltip: Some(
HoverRanged(
FileId(
@@ -2421,7 +2586,9 @@ fn main() {
InlayHint {
range: 174..224,
kind: ChainingHint,
- label: "impl Iterator<Item = ()>",
+ label: [
+ "impl Iterator<Item = ()>",
+ ],
tooltip: Some(
HoverRanged(
FileId(
@@ -2434,7 +2601,9 @@ fn main() {
InlayHint {
range: 174..206,
kind: ChainingHint,
- label: "impl Iterator<Item = ()>",
+ label: [
+ "impl Iterator<Item = ()>",
+ ],
tooltip: Some(
HoverRanged(
FileId(
@@ -2447,7 +2616,9 @@ fn main() {
InlayHint {
range: 174..189,
kind: ChainingHint,
- label: "&mut MyIter",
+ label: [
+ "&mut MyIter",
+ ],
tooltip: Some(
HoverRanged(
FileId(
@@ -2489,7 +2660,9 @@ fn main() {
InlayHint {
range: 124..130,
kind: TypeHint,
- label: "Struct",
+ label: [
+ "Struct",
+ ],
tooltip: Some(
HoverRanged(
FileId(
@@ -2502,7 +2675,9 @@ fn main() {
InlayHint {
range: 145..185,
kind: ChainingHint,
- label: "Struct",
+ label: [
+ "Struct",
+ ],
tooltip: Some(
HoverRanged(
FileId(
@@ -2515,7 +2690,9 @@ fn main() {
InlayHint {
range: 145..168,
kind: ChainingHint,
- label: "Struct",
+ label: [
+ "Struct",
+ ],
tooltip: Some(
HoverRanged(
FileId(
@@ -2528,7 +2705,9 @@ fn main() {
InlayHint {
range: 222..228,
kind: ParameterHint,
- label: "self",
+ label: [
+ "self",
+ ],
tooltip: Some(
HoverOffset(
FileId(
diff --git a/src/tools/rust-analyzer/crates/ide/src/join_lines.rs b/src/tools/rust-analyzer/crates/ide/src/join_lines.rs
index 08621adde..edc48e84d 100644
--- a/src/tools/rust-analyzer/crates/ide/src/join_lines.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/join_lines.rs
@@ -28,7 +28,7 @@ pub struct JoinLinesConfig {
// |===
// | Editor | Action Name
//
-// | VS Code | **Rust Analyzer: Join lines**
+// | VS Code | **rust-analyzer: Join lines**
// |===
//
// image::https://user-images.githubusercontent.com/48062697/113020661-b6922200-917a-11eb-87c4-b75acc028f11.gif[]
diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs
index dd108fa79..416817ca0 100644
--- a/src/tools/rust-analyzer/crates/ide/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs
@@ -31,7 +31,6 @@ mod highlight_related;
mod expand_macro;
mod extend_selection;
mod file_structure;
-mod fn_references;
mod folding_ranges;
mod goto_declaration;
mod goto_definition;
@@ -74,7 +73,7 @@ use syntax::SourceFile;
use crate::navigation_target::{ToNav, TryToNav};
pub use crate::{
- annotations::{Annotation, AnnotationConfig, AnnotationKind},
+ annotations::{Annotation, AnnotationConfig, AnnotationKind, AnnotationLocation},
call_hierarchy::CallItem,
expand_macro::ExpandedMacro,
file_structure::{StructureNode, StructureNodeKind},
@@ -82,12 +81,12 @@ pub use crate::{
highlight_related::{HighlightRelatedConfig, HighlightedRange},
hover::{HoverAction, HoverConfig, HoverDocFormat, HoverGotoTypeData, HoverResult},
inlay_hints::{
- ClosureReturnTypeHints, InlayHint, InlayHintsConfig, InlayKind, InlayTooltip,
- LifetimeElisionHints, ReborrowHints,
+ ClosureReturnTypeHints, InlayHint, InlayHintLabel, InlayHintsConfig, InlayKind,
+ InlayTooltip, LifetimeElisionHints, ReborrowHints,
},
join_lines::JoinLinesConfig,
markup::Markup,
- moniker::{MonikerKind, MonikerResult, PackageInformation},
+ moniker::{MonikerDescriptorKind, MonikerKind, MonikerResult, PackageInformation},
move_item::Direction,
navigation_target::NavigationTarget,
prime_caches::ParallelPrimeCachesProgress,
@@ -98,7 +97,7 @@ pub use crate::{
static_index::{StaticIndex, StaticIndexedFile, TokenId, TokenStaticData},
syntax_highlighting::{
tags::{Highlight, HlMod, HlMods, HlOperator, HlPunct, HlTag},
- HlRange,
+ HighlightConfig, HlRange,
},
};
pub use hir::{Documentation, Semantics};
@@ -236,7 +235,7 @@ impl Analysis {
Env::default(),
Ok(Vec::new()),
false,
- CrateOrigin::CratesIo { repo: None },
+ CrateOrigin::CratesIo { repo: None, name: None },
);
change.change_file(file_id, Some(Arc::new(text)));
change.set_crate_graph(crate_graph);
@@ -429,11 +428,6 @@ impl Analysis {
self.with_db(|db| references::find_all_refs(&Semantics::new(db), position, search_scope))
}
- /// Finds all methods and free functions for the file. Does not return tests!
- pub fn find_all_methods(&self, file_id: FileId) -> Cancellable<Vec<FileRange>> {
- self.with_db(|db| fn_references::find_all_methods(db, file_id))
- }
-
/// Returns a short text describing element at position.
pub fn hover(
&self,
@@ -488,8 +482,18 @@ impl Analysis {
}
/// Returns crates this file belongs too.
- pub fn crate_for(&self, file_id: FileId) -> Cancellable<Vec<CrateId>> {
- self.with_db(|db| parent_module::crate_for(db, file_id))
+ pub fn crates_for(&self, file_id: FileId) -> Cancellable<Vec<CrateId>> {
+ self.with_db(|db| parent_module::crates_for(db, file_id))
+ }
+
+ /// Returns crates this file belongs too.
+ pub fn transitive_rev_deps(&self, crate_id: CrateId) -> Cancellable<Vec<CrateId>> {
+ self.with_db(|db| db.crate_graph().transitive_rev_deps(crate_id).collect())
+ }
+
+ /// Returns crates this file *might* belong too.
+ pub fn relevant_crates_for(&self, file_id: FileId) -> Cancellable<Vec<CrateId>> {
+ self.with_db(|db| db.relevant_crates(file_id).iter().copied().collect())
}
/// Returns the edition of the given crate.
@@ -517,8 +521,12 @@ impl Analysis {
}
/// Computes syntax highlighting for the given file
- pub fn highlight(&self, file_id: FileId) -> Cancellable<Vec<HlRange>> {
- self.with_db(|db| syntax_highlighting::highlight(db, file_id, None, false))
+ pub fn highlight(
+ &self,
+ highlight_config: HighlightConfig,
+ file_id: FileId,
+ ) -> Cancellable<Vec<HlRange>> {
+ self.with_db(|db| syntax_highlighting::highlight(db, highlight_config, file_id, None))
}
/// Computes all ranges to highlight for a given item in a file.
@@ -533,9 +541,13 @@ impl Analysis {
}
/// Computes syntax highlighting for the given file range.
- pub fn highlight_range(&self, frange: FileRange) -> Cancellable<Vec<HlRange>> {
+ pub fn highlight_range(
+ &self,
+ highlight_config: HighlightConfig,
+ frange: FileRange,
+ ) -> Cancellable<Vec<HlRange>> {
self.with_db(|db| {
- syntax_highlighting::highlight(db, frange.file_id, Some(frange.range), false)
+ syntax_highlighting::highlight(db, highlight_config, frange.file_id, Some(frange.range))
})
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/matching_brace.rs b/src/tools/rust-analyzer/crates/ide/src/matching_brace.rs
index da70cecdd..6e8a6d020 100644
--- a/src/tools/rust-analyzer/crates/ide/src/matching_brace.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/matching_brace.rs
@@ -12,7 +12,7 @@ use syntax::{
// |===
// | Editor | Action Name
//
-// | VS Code | **Rust Analyzer: Find matching brace**
+// | VS Code | **rust-analyzer: Find matching brace**
// |===
//
// image::https://user-images.githubusercontent.com/48062697/113065573-04298180-91b1-11eb-8dec-d4e2a202f304.gif[]
diff --git a/src/tools/rust-analyzer/crates/ide/src/moniker.rs b/src/tools/rust-analyzer/crates/ide/src/moniker.rs
index 6bab9fa1e..852a8fd83 100644
--- a/src/tools/rust-analyzer/crates/ide/src/moniker.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/moniker.rs
@@ -14,16 +14,38 @@ use syntax::{AstNode, SyntaxKind::*, T};
use crate::{doc_links::token_as_doc_comment, RangeInfo};
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum MonikerDescriptorKind {
+ Namespace,
+ Type,
+ Term,
+ Method,
+ TypeParameter,
+ Parameter,
+ Macro,
+ Meta,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct MonikerDescriptor {
+ pub name: Name,
+ pub desc: MonikerDescriptorKind,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct MonikerIdentifier {
- crate_name: String,
- path: Vec<Name>,
+ pub crate_name: String,
+ pub description: Vec<MonikerDescriptor>,
}
impl ToString for MonikerIdentifier {
fn to_string(&self) -> String {
match self {
- MonikerIdentifier { path, crate_name } => {
- format!("{}::{}", crate_name, path.iter().map(|x| x.to_string()).join("::"))
+ MonikerIdentifier { description, crate_name } => {
+ format!(
+ "{}::{}",
+ crate_name,
+ description.iter().map(|x| x.name.to_string()).join("::")
+ )
}
}
}
@@ -42,6 +64,12 @@ pub struct MonikerResult {
pub package_information: PackageInformation,
}
+impl MonikerResult {
+ pub fn from_def(db: &RootDatabase, def: Definition, from_crate: Crate) -> Option<Self> {
+ def_to_moniker(db, def, from_crate)
+ }
+}
+
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct PackageInformation {
pub name: String,
@@ -90,7 +118,7 @@ pub(crate) fn moniker(
.descend_into_macros(original_token.clone())
.into_iter()
.filter_map(|token| {
- IdentClass::classify_token(sema, &token).map(IdentClass::definitions).map(|it| {
+ IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops).map(|it| {
it.into_iter().flat_map(|def| def_to_moniker(sema.db, def, current_crate))
})
})
@@ -105,13 +133,23 @@ pub(crate) fn def_to_moniker(
def: Definition,
from_crate: Crate,
) -> Option<MonikerResult> {
- if matches!(def, Definition::GenericParam(_) | Definition::SelfType(_) | Definition::Local(_)) {
+ if matches!(
+ def,
+ Definition::GenericParam(_)
+ | Definition::Label(_)
+ | Definition::DeriveHelper(_)
+ | Definition::BuiltinAttr(_)
+ | Definition::ToolModule(_)
+ ) {
return None;
}
+
let module = def.module(db)?;
let krate = module.krate();
- let mut path = vec![];
- path.extend(module.path_to_root(db).into_iter().filter_map(|x| x.name(db)));
+ let mut description = vec![];
+ description.extend(module.path_to_root(db).into_iter().filter_map(|x| {
+ Some(MonikerDescriptor { name: x.name(db)?, desc: MonikerDescriptorKind::Namespace })
+ }));
// Handle associated items within a trait
if let Some(assoc) = def.as_assoc_item(db) {
@@ -120,38 +158,109 @@ pub(crate) fn def_to_moniker(
AssocItemContainer::Trait(trait_) => {
// Because different traits can have functions with the same name,
// we have to include the trait name as part of the moniker for uniqueness.
- path.push(trait_.name(db));
+ description.push(MonikerDescriptor {
+ name: trait_.name(db),
+ desc: MonikerDescriptorKind::Type,
+ });
}
AssocItemContainer::Impl(impl_) => {
// Because a struct can implement multiple traits, for implementations
// we add both the struct name and the trait name to the path
if let Some(adt) = impl_.self_ty(db).as_adt() {
- path.push(adt.name(db));
+ description.push(MonikerDescriptor {
+ name: adt.name(db),
+ desc: MonikerDescriptorKind::Type,
+ });
}
if let Some(trait_) = impl_.trait_(db) {
- path.push(trait_.name(db));
+ description.push(MonikerDescriptor {
+ name: trait_.name(db),
+ desc: MonikerDescriptorKind::Type,
+ });
}
}
}
}
if let Definition::Field(it) = def {
- path.push(it.parent_def(db).name(db));
+ description.push(MonikerDescriptor {
+ name: it.parent_def(db).name(db),
+ desc: MonikerDescriptorKind::Type,
+ });
}
- path.push(def.name(db)?);
+ let name_desc = match def {
+ // These are handled by top-level guard (for performance).
+ Definition::GenericParam(_)
+ | Definition::Label(_)
+ | Definition::DeriveHelper(_)
+ | Definition::BuiltinAttr(_)
+ | Definition::ToolModule(_) => return None,
+
+ Definition::Local(local) => {
+ if !local.is_param(db) {
+ return None;
+ }
+
+ MonikerDescriptor { name: local.name(db), desc: MonikerDescriptorKind::Parameter }
+ }
+ Definition::Macro(m) => {
+ MonikerDescriptor { name: m.name(db), desc: MonikerDescriptorKind::Macro }
+ }
+ Definition::Function(f) => {
+ MonikerDescriptor { name: f.name(db), desc: MonikerDescriptorKind::Method }
+ }
+ Definition::Variant(v) => {
+ MonikerDescriptor { name: v.name(db), desc: MonikerDescriptorKind::Type }
+ }
+ Definition::Const(c) => {
+ MonikerDescriptor { name: c.name(db)?, desc: MonikerDescriptorKind::Term }
+ }
+ Definition::Trait(trait_) => {
+ MonikerDescriptor { name: trait_.name(db), desc: MonikerDescriptorKind::Type }
+ }
+ Definition::TypeAlias(ta) => {
+ MonikerDescriptor { name: ta.name(db), desc: MonikerDescriptorKind::TypeParameter }
+ }
+ Definition::Module(m) => {
+ MonikerDescriptor { name: m.name(db)?, desc: MonikerDescriptorKind::Namespace }
+ }
+ Definition::BuiltinType(b) => {
+ MonikerDescriptor { name: b.name(), desc: MonikerDescriptorKind::Type }
+ }
+ Definition::SelfType(imp) => MonikerDescriptor {
+ name: imp.self_ty(db).as_adt()?.name(db),
+ desc: MonikerDescriptorKind::Type,
+ },
+ Definition::Field(it) => {
+ MonikerDescriptor { name: it.name(db), desc: MonikerDescriptorKind::Term }
+ }
+ Definition::Adt(adt) => {
+ MonikerDescriptor { name: adt.name(db), desc: MonikerDescriptorKind::Type }
+ }
+ Definition::Static(s) => {
+ MonikerDescriptor { name: s.name(db), desc: MonikerDescriptorKind::Meta }
+ }
+ };
+
+ description.push(name_desc);
+
Some(MonikerResult {
identifier: MonikerIdentifier {
crate_name: krate.display_name(db)?.crate_name().to_string(),
- path,
+ description,
},
kind: if krate == from_crate { MonikerKind::Export } else { MonikerKind::Import },
package_information: {
- let name = krate.display_name(db)?.to_string();
- let (repo, version) = match krate.origin(db) {
- CrateOrigin::CratesIo { repo } => (repo?, krate.version(db)?),
+ let (name, repo, version) = match krate.origin(db) {
+ CrateOrigin::CratesIo { repo, name } => (
+ name.unwrap_or(krate.display_name(db)?.canonical_name().to_string()),
+ repo?,
+ krate.version(db)?,
+ ),
CrateOrigin::Lang(lang) => (
+ krate.display_name(db)?.canonical_name().to_string(),
"https://github.com/rust-lang/rust/".to_string(),
match lang {
LangCrateOrigin::Other => {
diff --git a/src/tools/rust-analyzer/crates/ide/src/move_item.rs b/src/tools/rust-analyzer/crates/ide/src/move_item.rs
index 02e9fb8b5..ffc4bdd7d 100644
--- a/src/tools/rust-analyzer/crates/ide/src/move_item.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/move_item.rs
@@ -19,8 +19,8 @@ pub enum Direction {
// |===
// | Editor | Action Name
//
-// | VS Code | **Rust Analyzer: Move item up**
-// | VS Code | **Rust Analyzer: Move item down**
+// | VS Code | **rust-analyzer: Move item up**
+// | VS Code | **rust-analyzer: Move item down**
// |===
//
// image::https://user-images.githubusercontent.com/48062697/113065576-04298180-91b1-11eb-91ce-4505e99ed598.gif[]
diff --git a/src/tools/rust-analyzer/crates/ide/src/parent_module.rs b/src/tools/rust-analyzer/crates/ide/src/parent_module.rs
index 9b1f48044..506f9452c 100644
--- a/src/tools/rust-analyzer/crates/ide/src/parent_module.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/parent_module.rs
@@ -1,6 +1,6 @@
-use hir::Semantics;
+use hir::{db::DefDatabase, Semantics};
use ide_db::{
- base_db::{CrateId, FileId, FilePosition},
+ base_db::{CrateId, FileId, FileLoader, FilePosition},
RootDatabase,
};
use itertools::Itertools;
@@ -18,7 +18,7 @@ use crate::NavigationTarget;
// |===
// | Editor | Action Name
//
-// | VS Code | **Rust Analyzer: Locate parent module**
+// | VS Code | **rust-analyzer: Locate parent module**
// |===
//
// image::https://user-images.githubusercontent.com/48062697/113065580-04c21800-91b1-11eb-9a32-00086161c0bd.gif[]
@@ -55,9 +55,13 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<Na
}
/// Returns `Vec` for the same reason as `parent_module`
-pub(crate) fn crate_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> {
- let sema = Semantics::new(db);
- sema.to_module_defs(file_id).map(|module| module.krate().into()).unique().collect()
+pub(crate) fn crates_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> {
+ db.relevant_crates(file_id)
+ .iter()
+ .copied()
+ .filter(|&crate_id| db.crate_def_map(crate_id).modules_for_file(file_id).next().is_some())
+ .sorted()
+ .collect()
}
#[cfg(test)]
@@ -147,7 +151,7 @@ $0
mod foo;
"#,
);
- assert_eq!(analysis.crate_for(file_id).unwrap().len(), 1);
+ assert_eq!(analysis.crates_for(file_id).unwrap().len(), 1);
}
#[test]
@@ -162,6 +166,6 @@ mod baz;
mod baz;
"#,
);
- assert_eq!(analysis.crate_for(file_id).unwrap().len(), 2);
+ assert_eq!(analysis.crates_for(file_id).unwrap().len(), 2);
}
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/prime_caches.rs b/src/tools/rust-analyzer/crates/ide/src/prime_caches.rs
index 296270036..87b3ef380 100644
--- a/src/tools/rust-analyzer/crates/ide/src/prime_caches.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/prime_caches.rs
@@ -12,8 +12,9 @@ use ide_db::{
salsa::{Database, ParallelDatabase, Snapshot},
Cancelled, CrateGraph, CrateId, SourceDatabase, SourceDatabaseExt,
},
- FxHashSet, FxIndexMap,
+ FxIndexMap,
};
+use stdx::hash::NoHashHashSet;
use crate::RootDatabase;
@@ -141,7 +142,7 @@ pub(crate) fn parallel_prime_caches(
}
}
-fn compute_crates_to_prime(db: &RootDatabase, graph: &CrateGraph) -> FxHashSet<CrateId> {
+fn compute_crates_to_prime(db: &RootDatabase, graph: &CrateGraph) -> NoHashHashSet<CrateId> {
// We're only interested in the workspace crates and the `ImportMap`s of their direct
// dependencies, though in practice the latter also compute the `DefMap`s.
// We don't prime transitive dependencies because they're generally not visible in
diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs
index 1a6beec18..e942413c1 100644
--- a/src/tools/rust-analyzer/crates/ide/src/references.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/references.rs
@@ -14,8 +14,9 @@ use ide_db::{
base_db::FileId,
defs::{Definition, NameClass, NameRefClass},
search::{ReferenceCategory, SearchScope, UsageSearchResult},
- FxHashMap, RootDatabase,
+ RootDatabase,
};
+use stdx::hash::NoHashHashMap;
use syntax::{
algo::find_node_at_offset,
ast::{self, HasName},
@@ -29,7 +30,7 @@ use crate::{FilePosition, NavigationTarget, TryToNav};
#[derive(Debug, Clone)]
pub struct ReferenceSearchResult {
pub declaration: Option<Declaration>,
- pub references: FxHashMap<FileId, Vec<(TextRange, Option<ReferenceCategory>)>>,
+ pub references: NoHashHashMap<FileId, Vec<(TextRange, Option<ReferenceCategory>)>>,
}
#[derive(Debug, Clone)]
@@ -741,7 +742,7 @@ pub struct Foo {
expect![[r#"
foo Module FileId(0) 0..8 4..7
- FileId(0) 14..17
+ FileId(0) 14..17 Import
"#]],
);
}
@@ -759,7 +760,7 @@ use self$0;
expect![[r#"
foo Module FileId(0) 0..8 4..7
- FileId(1) 4..8
+ FileId(1) 4..8 Import
"#]],
);
}
@@ -774,7 +775,7 @@ use self$0;
expect![[r#"
Module FileId(0) 0..10
- FileId(0) 4..8
+ FileId(0) 4..8 Import
"#]],
);
}
@@ -802,7 +803,7 @@ pub(super) struct Foo$0 {
expect![[r#"
Foo Struct FileId(2) 0..41 18..21
- FileId(1) 20..23
+ FileId(1) 20..23 Import
FileId(1) 47..50
"#]],
);
@@ -965,7 +966,7 @@ fn g() { f(); }
expect![[r#"
f Function FileId(0) 22..31 25..26
- FileId(1) 11..12
+ FileId(1) 11..12 Import
FileId(1) 24..25
"#]],
);
@@ -1423,9 +1424,9 @@ pub use level1::Foo;
expect![[r#"
Foo Struct FileId(0) 0..15 11..14
- FileId(1) 16..19
- FileId(2) 16..19
- FileId(3) 16..19
+ FileId(1) 16..19 Import
+ FileId(2) 16..19 Import
+ FileId(3) 16..19 Import
"#]],
);
}
@@ -1453,7 +1454,7 @@ lib::foo!();
expect![[r#"
foo Macro FileId(1) 0..61 29..32
- FileId(0) 46..49
+ FileId(0) 46..49 Import
FileId(2) 0..3
FileId(3) 5..8
"#]],
@@ -1616,7 +1617,7 @@ struct Foo;
expect![[r#"
derive_identity Derive FileId(2) 1..107 45..60
- FileId(0) 17..31
+ FileId(0) 17..31 Import
FileId(0) 56..70
"#]],
);
diff --git a/src/tools/rust-analyzer/crates/ide/src/runnables.rs b/src/tools/rust-analyzer/crates/ide/src/runnables.rs
index bec770ed9..0181c6b8e 100644
--- a/src/tools/rust-analyzer/crates/ide/src/runnables.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/runnables.rs
@@ -116,7 +116,7 @@ impl Runnable {
// |===
// | Editor | Action Name
//
-// | VS Code | **Rust Analyzer: Run**
+// | VS Code | **rust-analyzer: Run**
// |===
// image::https://user-images.githubusercontent.com/48062697/113065583-055aae80-91b1-11eb-958f-d67efcaf6a2f.gif[]
pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
@@ -202,7 +202,7 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
// |===
// | Editor | Action Name
//
-// | VS Code | **Rust Analyzer: Peek related tests**
+// | VS Code | **rust-analyzer: Peek related tests**
// |===
pub(crate) fn related_tests(
db: &RootDatabase,
@@ -373,11 +373,13 @@ pub(crate) fn runnable_impl(
let adt_name = ty.as_adt()?.name(sema.db);
let mut ty_args = ty.type_arguments().peekable();
let params = if ty_args.peek().is_some() {
- format!("<{}>", ty_args.format_with(", ", |ty, cb| cb(&ty.display(sema.db))))
+ format!("<{}>", ty_args.format_with(",", |ty, cb| cb(&ty.display(sema.db))))
} else {
String::new()
};
- let test_id = TestId::Path(format!("{}{}", adt_name, params));
+ let mut test_id = format!("{}{}", adt_name, params);
+ test_id.retain(|c| c != ' ');
+ let test_id = TestId::Path(test_id);
Some(Runnable { use_name_in_title: false, nav, kind: RunnableKind::DocTest { test_id }, cfg })
}
@@ -441,10 +443,11 @@ fn module_def_doctest(db: &RootDatabase, def: Definition) -> Option<Runnable> {
format_to!(
path,
"<{}>",
- ty_args.format_with(", ", |ty, cb| cb(&ty.display(db)))
+ ty_args.format_with(",", |ty, cb| cb(&ty.display(db)))
);
}
format_to!(path, "::{}", def_name);
+ path.retain(|c| c != ' ');
return Some(path);
}
}
@@ -2067,13 +2070,23 @@ mod tests {
$0
struct Foo<T, U>;
+/// ```
+/// ```
impl<T, U> Foo<T, U> {
/// ```rust
/// ````
fn t() {}
}
+
+/// ```
+/// ```
+impl Foo<Foo<(), ()>, ()> {
+ /// ```
+ /// ```
+ fn t() {}
+}
"#,
- &[DocTest],
+ &[DocTest, DocTest, DocTest, DocTest],
expect![[r#"
[
Runnable {
@@ -2082,12 +2095,64 @@ impl<T, U> Foo<T, U> {
file_id: FileId(
0,
),
- full_range: 47..85,
+ full_range: 20..103,
+ focus_range: 47..56,
+ name: "impl",
+ kind: Impl,
+ },
+ kind: DocTest {
+ test_id: Path(
+ "Foo<T,U>",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 63..101,
+ name: "t",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "Foo<T,U>::t",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 105..188,
+ focus_range: 126..146,
+ name: "impl",
+ kind: Impl,
+ },
+ kind: DocTest {
+ test_id: Path(
+ "Foo<Foo<(),()>,()>",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 153..186,
name: "t",
},
kind: DocTest {
test_id: Path(
- "Foo<T, U>::t",
+ "Foo<Foo<(),()>,()>::t",
),
},
cfg: None,
@@ -2160,4 +2225,190 @@ macro_rules! foo {
"#]],
);
}
+
+ #[test]
+ fn test_paths_with_raw_ident() {
+ check(
+ r#"
+//- /lib.rs
+$0
+mod r#mod {
+ #[test]
+ fn r#fn() {}
+
+ /// ```
+ /// ```
+ fn r#for() {}
+
+ /// ```
+ /// ```
+ struct r#struct<r#type>(r#type);
+
+ /// ```
+ /// ```
+ impl<r#type> r#struct<r#type> {
+ /// ```
+ /// ```
+ fn r#fn() {}
+ }
+
+ enum r#enum {}
+ impl r#struct<r#enum> {
+ /// ```
+ /// ```
+ fn r#fn() {}
+ }
+
+ trait r#trait {}
+
+ /// ```
+ /// ```
+ impl<T> r#trait for r#struct<T> {}
+}
+"#,
+ &[TestMod, Test, DocTest, DocTest, DocTest, DocTest, DocTest, DocTest],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..461,
+ focus_range: 5..10,
+ name: "r#mod",
+ kind: Module,
+ description: "mod r#mod",
+ },
+ kind: TestMod {
+ path: "r#mod",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 17..41,
+ focus_range: 32..36,
+ name: "r#fn",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "r#mod::r#fn",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 47..84,
+ name: "r#for",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "r#mod::r#for",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 90..146,
+ name: "r#struct",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "r#mod::r#struct",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 152..266,
+ focus_range: 189..205,
+ name: "impl",
+ kind: Impl,
+ },
+ kind: DocTest {
+ test_id: Path(
+ "r#struct<r#type>",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 216..260,
+ name: "r#fn",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "r#mod::r#struct<r#type>::r#fn",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 323..367,
+ name: "r#fn",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "r#mod::r#struct<r#enum>::r#fn",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 401..459,
+ focus_range: 445..456,
+ name: "impl",
+ kind: Impl,
+ },
+ kind: DocTest {
+ test_id: Path(
+ "r#struct<T>",
+ ),
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ )
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/shuffle_crate_graph.rs b/src/tools/rust-analyzer/crates/ide/src/shuffle_crate_graph.rs
index 15cb89dcc..2d8662764 100644
--- a/src/tools/rust-analyzer/crates/ide/src/shuffle_crate_graph.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/shuffle_crate_graph.rs
@@ -12,7 +12,7 @@ use ide_db::{
// |===
// | Editor | Action Name
//
-// | VS Code | **Rust Analyzer: Shuffle Crate Graph**
+// | VS Code | **rust-analyzer: Shuffle Crate Graph**
// |===
pub(crate) fn shuffle_crate_graph(db: &mut RootDatabase) {
let crate_graph = db.crate_graph();
diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs
index d74b64041..27ad1a948 100644
--- a/src/tools/rust-analyzer/crates/ide/src/static_index.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs
@@ -130,8 +130,11 @@ impl StaticIndex<'_> {
syntax::NodeOrToken::Node(_) => None,
syntax::NodeOrToken::Token(x) => Some(x),
});
- let hover_config =
- HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) };
+ let hover_config = HoverConfig {
+ links_in_hover: true,
+ documentation: Some(HoverDocFormat::Markdown),
+ keywords: true,
+ };
let tokens = tokens.filter(|token| {
matches!(
token.kind(),
@@ -204,12 +207,10 @@ impl StaticIndex<'_> {
fn get_definition(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> Option<Definition> {
for token in sema.descend_into_macros(token) {
- let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions);
+ let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops);
if let Some(&[x]) = def.as_deref() {
return Some(x);
- } else {
- continue;
- };
+ }
}
None
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/status.rs b/src/tools/rust-analyzer/crates/ide/src/status.rs
index 3191870eb..20810c25b 100644
--- a/src/tools/rust-analyzer/crates/ide/src/status.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/status.rs
@@ -1,4 +1,4 @@
-use std::{fmt, iter::FromIterator, sync::Arc};
+use std::{fmt, sync::Arc};
use hir::{ExpandResult, MacroFile};
use ide_db::base_db::{
@@ -29,7 +29,7 @@ fn macro_syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats {
// |===
// | Editor | Action Name
//
-// | VS Code | **Rust Analyzer: Status**
+// | VS Code | **rust-analyzer: Status**
// |===
// image::https://user-images.githubusercontent.com/48062697/113065584-05f34500-91b1-11eb-98cc-5c196f76be7f.gif[]
pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String {
@@ -45,7 +45,7 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String {
if let Some(file_id) = file_id {
format_to!(buf, "\nFile info:\n");
- let crates = crate::parent_module::crate_for(db, file_id);
+ let crates = crate::parent_module::crates_for(db, file_id);
if crates.is_empty() {
format_to!(buf, "Does not belong to any crate");
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
index 3fb49b45d..50371d620 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
@@ -14,7 +14,7 @@ mod html;
mod tests;
use hir::{Name, Semantics};
-use ide_db::{FxHashMap, RootDatabase};
+use ide_db::{FxHashMap, RootDatabase, SymbolKind};
use syntax::{
ast, AstNode, AstToken, NodeOrToken, SyntaxKind::*, SyntaxNode, TextRange, WalkEvent, T,
};
@@ -24,7 +24,7 @@ use crate::{
escape::highlight_escape_string, format::highlight_format_string, highlights::Highlights,
macro_::MacroHighlighter, tags::Highlight,
},
- FileId, HlMod, HlTag,
+ FileId, HlMod, HlOperator, HlPunct, HlTag,
};
pub(crate) use html::highlight_as_html;
@@ -36,6 +36,26 @@ pub struct HlRange {
pub binding_hash: Option<u64>,
}
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub struct HighlightConfig {
+ /// Whether to highlight strings
+ pub strings: bool,
+ /// Whether to highlight punctuation
+ pub punctuation: bool,
+ /// Whether to specialize punctuation highlights
+ pub specialize_punctuation: bool,
+ /// Whether to highlight operator
+ pub operator: bool,
+ /// Whether to specialize operator highlights
+ pub specialize_operator: bool,
+ /// Whether to inject highlights into doc comments
+ pub inject_doc_comment: bool,
+ /// Whether to highlight the macro call bang
+ pub macro_bang: bool,
+ /// Whether to highlight unresolved things be their syntax
+ pub syntactic_name_ref_highlighting: bool,
+}
+
// Feature: Semantic Syntax Highlighting
//
// rust-analyzer highlights the code semantically.
@@ -155,9 +175,9 @@ pub struct HlRange {
// image::https://user-images.githubusercontent.com/48062697/113187625-f7f50100-9250-11eb-825e-91c58f236071.png[]
pub(crate) fn highlight(
db: &RootDatabase,
+ config: HighlightConfig,
file_id: FileId,
range_to_highlight: Option<TextRange>,
- syntactic_name_ref_highlighting: bool,
) -> Vec<HlRange> {
let _p = profile::span("highlight");
let sema = Semantics::new(db);
@@ -183,26 +203,18 @@ pub(crate) fn highlight(
Some(it) => it.krate(),
None => return hl.to_vec(),
};
- traverse(
- &mut hl,
- &sema,
- file_id,
- &root,
- krate,
- range_to_highlight,
- syntactic_name_ref_highlighting,
- );
+ traverse(&mut hl, &sema, config, file_id, &root, krate, range_to_highlight);
hl.to_vec()
}
fn traverse(
hl: &mut Highlights,
sema: &Semantics<'_, RootDatabase>,
+ config: HighlightConfig,
file_id: FileId,
root: &SyntaxNode,
krate: hir::Crate,
range_to_highlight: TextRange,
- syntactic_name_ref_highlighting: bool,
) {
let is_unlinked = sema.to_module_def(file_id).is_none();
let mut bindings_shadow_count: FxHashMap<Name, u32> = FxHashMap::default();
@@ -323,9 +335,11 @@ fn traverse(
Enter(it) => it,
Leave(NodeOrToken::Token(_)) => continue,
Leave(NodeOrToken::Node(node)) => {
- // Doc comment highlighting injection, we do this when leaving the node
- // so that we overwrite the highlighting of the doc comment itself.
- inject::doc_comment(hl, sema, file_id, &node);
+ if config.inject_doc_comment {
+ // Doc comment highlighting injection, we do this when leaving the node
+ // so that we overwrite the highlighting of the doc comment itself.
+ inject::doc_comment(hl, sema, config, file_id, &node);
+ }
continue;
}
};
@@ -400,7 +414,8 @@ fn traverse(
let string_to_highlight = ast::String::cast(descended_token.clone());
if let Some((string, expanded_string)) = string.zip(string_to_highlight) {
if string.is_raw() {
- if inject::ra_fixture(hl, sema, &string, &expanded_string).is_some() {
+ if inject::ra_fixture(hl, sema, config, &string, &expanded_string).is_some()
+ {
continue;
}
}
@@ -421,7 +436,7 @@ fn traverse(
sema,
krate,
&mut bindings_shadow_count,
- syntactic_name_ref_highlighting,
+ config.syntactic_name_ref_highlighting,
name_like,
),
NodeOrToken::Token(token) => highlight::token(sema, token).zip(Some(None)),
@@ -439,6 +454,29 @@ fn traverse(
// something unresolvable. FIXME: There should be a way to prevent that
continue;
}
+
+ // apply config filtering
+ match &mut highlight.tag {
+ HlTag::StringLiteral if !config.strings => continue,
+ // If punctuation is disabled, make the macro bang part of the macro call again.
+ tag @ HlTag::Punctuation(HlPunct::MacroBang) => {
+ if !config.macro_bang {
+ *tag = HlTag::Symbol(SymbolKind::Macro);
+ } else if !config.specialize_punctuation {
+ *tag = HlTag::Punctuation(HlPunct::Other);
+ }
+ }
+ HlTag::Punctuation(_) if !config.punctuation => continue,
+ tag @ HlTag::Punctuation(_) if !config.specialize_punctuation => {
+ *tag = HlTag::Punctuation(HlPunct::Other);
+ }
+ HlTag::Operator(_) if !config.operator && highlight.mods.is_empty() => continue,
+ tag @ HlTag::Operator(_) if !config.specialize_operator => {
+ *tag = HlTag::Operator(HlOperator::Other);
+ }
+ _ => (),
+ }
+
if inside_attribute {
highlight |= HlMod::Attribute
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
index 9395e914c..e7d0a8be7 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
@@ -87,9 +87,9 @@ fn punctuation(
let parent = token.parent();
let parent_kind = parent.as_ref().map_or(EOF, SyntaxNode::kind);
match (kind, parent_kind) {
- (T![?], _) => HlTag::Operator(HlOperator::Other) | HlMod::ControlFlow,
+ (T![?], TRY_EXPR) => HlTag::Operator(HlOperator::Other) | HlMod::ControlFlow,
(T![&], BIN_EXPR) => HlOperator::Bitwise.into(),
- (T![&], _) => {
+ (T![&], REF_EXPR) => {
let h = HlTag::Operator(HlOperator::Other).into();
let is_unsafe = parent
.and_then(ast::RefExpr::cast)
@@ -100,7 +100,9 @@ fn punctuation(
h
}
}
- (T![::] | T![->] | T![=>] | T![..] | T![=] | T![@] | T![.], _) => HlOperator::Other.into(),
+ (T![::] | T![->] | T![=>] | T![..] | T![..=] | T![=] | T![@] | T![.], _) => {
+ HlOperator::Other.into()
+ }
(T![!], MACRO_CALL | MACRO_RULES) => HlPunct::MacroBang.into(),
(T![!], NEVER_TYPE) => HlTag::BuiltinType.into(),
(T![!], PREFIX_EXPR) => HlOperator::Logical.into(),
@@ -129,7 +131,7 @@ fn punctuation(
(T![+=] | T![-=] | T![*=] | T![/=] | T![%=], BIN_EXPR) => {
Highlight::from(HlOperator::Arithmetic) | HlMod::Mutable
}
- (T![|] | T![&] | T![!] | T![^] | T![>>] | T![<<], BIN_EXPR) => HlOperator::Bitwise.into(),
+ (T![|] | T![&] | T![^] | T![>>] | T![<<], BIN_EXPR) => HlOperator::Bitwise.into(),
(T![|=] | T![&=] | T![^=] | T![>>=] | T![<<=], BIN_EXPR) => {
Highlight::from(HlOperator::Bitwise) | HlMod::Mutable
}
@@ -137,7 +139,6 @@ fn punctuation(
(T![>] | T![<] | T![==] | T![>=] | T![<=] | T![!=], BIN_EXPR) => {
HlOperator::Comparison.into()
}
- (_, PREFIX_EXPR | BIN_EXPR | RANGE_EXPR | RANGE_PAT | REST_PAT) => HlOperator::Other.into(),
(_, ATTR) => HlTag::AttributeBracket.into(),
(kind, _) => match kind {
T!['['] | T![']'] => HlPunct::Bracket,
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs
index 9777c014c..e91fd7f12 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs
@@ -5,7 +5,10 @@ use oorandom::Rand32;
use stdx::format_to;
use syntax::AstNode;
-use crate::{syntax_highlighting::highlight, FileId, RootDatabase};
+use crate::{
+ syntax_highlighting::{highlight, HighlightConfig},
+ FileId, RootDatabase,
+};
pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String {
let parse = db.parse(file_id);
@@ -20,7 +23,21 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo
)
}
- let hl_ranges = highlight(db, file_id, None, false);
+ let hl_ranges = highlight(
+ db,
+ HighlightConfig {
+ strings: true,
+ punctuation: true,
+ specialize_punctuation: true,
+ specialize_operator: true,
+ operator: true,
+ inject_doc_comment: true,
+ macro_bang: true,
+ syntactic_name_ref_highlighting: false,
+ },
+ file_id,
+ None,
+ );
let text = parse.tree().syntax().to_string();
let mut buf = String::new();
buf.push_str(STYLE);
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs
index f376f9fda..9139528c7 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs
@@ -15,13 +15,14 @@ use syntax::{
use crate::{
doc_links::{doc_attributes, extract_definitions_from_docs, resolve_doc_path_for_def},
- syntax_highlighting::{highlights::Highlights, injector::Injector},
+ syntax_highlighting::{highlights::Highlights, injector::Injector, HighlightConfig},
Analysis, HlMod, HlRange, HlTag, RootDatabase,
};
pub(super) fn ra_fixture(
hl: &mut Highlights,
sema: &Semantics<'_, RootDatabase>,
+ config: HighlightConfig,
literal: &ast::String,
expanded: &ast::String,
) -> Option<()> {
@@ -63,7 +64,13 @@ pub(super) fn ra_fixture(
let (analysis, tmp_file_id) = Analysis::from_single_file(inj.take_text());
- for mut hl_range in analysis.highlight(tmp_file_id).unwrap() {
+ for mut hl_range in analysis
+ .highlight(
+ HighlightConfig { syntactic_name_ref_highlighting: false, ..config },
+ tmp_file_id,
+ )
+ .unwrap()
+ {
for range in inj.map_range_up(hl_range.range) {
if let Some(range) = literal.map_range_up(range) {
hl_range.range = range;
@@ -86,6 +93,7 @@ const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"];
pub(super) fn doc_comment(
hl: &mut Highlights,
sema: &Semantics<'_, RootDatabase>,
+ config: HighlightConfig,
src_file_id: FileId,
node: &SyntaxNode,
) {
@@ -206,7 +214,14 @@ pub(super) fn doc_comment(
let (analysis, tmp_file_id) = Analysis::from_single_file(inj.take_text());
- if let Ok(ranges) = analysis.with_db(|db| super::highlight(db, tmp_file_id, None, true)) {
+ if let Ok(ranges) = analysis.with_db(|db| {
+ super::highlight(
+ db,
+ HighlightConfig { syntactic_name_ref_highlighting: true, ..config },
+ tmp_file_id,
+ None,
+ )
+ }) {
for HlRange { range, highlight, binding_hash } in ranges {
for range in inj.map_range_up(range) {
hl.add(HlRange { range, highlight: highlight | HlMod::Injected, binding_hash });
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs
index 5262770f3..3949f1189 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs
@@ -199,7 +199,7 @@ impl fmt::Display for HlTag {
}
impl HlMod {
- const ALL: &'static [HlMod; HlMod::Unsafe as u8 as usize + 1] = &[
+ const ALL: &'static [HlMod; 19] = &[
HlMod::Associated,
HlMod::Async,
HlMod::Attribute,
@@ -296,7 +296,7 @@ impl Highlight {
Highlight { tag, mods: HlMods::default() }
}
pub fn is_empty(&self) -> bool {
- self.tag == HlTag::None && self.mods == HlMods::default()
+ self.tag == HlTag::None && self.mods.is_empty()
}
}
@@ -330,6 +330,10 @@ impl ops::BitOr<HlMod> for Highlight {
}
impl HlMods {
+ pub fn is_empty(&self) -> bool {
+ self.0 == 0
+ }
+
pub fn contains(self, m: HlMod) -> bool {
self.0 & m.mask() == m.mask()
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html
index e07fd3925..9ed65fbc8 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html
@@ -48,15 +48,15 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="keyword">impl</span> <span class="struct">foo</span> <span class="brace">{</span>
<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function associated declaration public static">is_static</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
- <span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function associated declaration public reference">is_not_static</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+ <span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function associated declaration public reference">is_not_static</span><span class="parenthesis">(</span><span class="punctuation">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
<span class="brace">}</span>
<span class="keyword">trait</span> <span class="trait declaration">t</span> <span class="brace">{</span>
<span class="keyword">fn</span> <span class="function associated declaration static trait">t_is_static</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
- <span class="keyword">fn</span> <span class="function associated declaration reference trait">t_is_not_static</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+ <span class="keyword">fn</span> <span class="function associated declaration reference trait">t_is_not_static</span><span class="parenthesis">(</span><span class="punctuation">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
<span class="brace">}</span>
<span class="keyword">impl</span> <span class="trait">t</span> <span class="keyword">for</span> <span class="struct">foo</span> <span class="brace">{</span>
<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function associated declaration public static trait">is_static</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
- <span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function associated declaration public reference trait">is_not_static</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+ <span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function associated declaration public reference trait">is_not_static</span><span class="parenthesis">(</span><span class="punctuation">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
<span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
index a747b4bc1..18045f1f5 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
@@ -56,7 +56,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="field declaration">bar</span><span class="colon">:</span> <span class="builtin_type">bool</span><span class="comma">,</span>
<span class="brace">}</span>
-<span class="comment documentation">/// This is an impl with a code block.</span>
+<span class="comment documentation">/// This is an impl of </span><span class="struct documentation injected intra_doc_link">[`Foo`]</span><span class="comment documentation"> with a code block.</span>
<span class="comment documentation">///</span>
<span class="comment documentation">/// ```</span>
<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">foo</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span>
@@ -125,7 +125,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="comment documentation">/// ```sh</span>
<span class="comment documentation">/// echo 1</span>
<span class="comment documentation">/// ```</span>
- <span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function associated declaration public reference">foo</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">bool</span> <span class="brace">{</span>
+ <span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function associated declaration public reference">foo</span><span class="parenthesis">(</span><span class="punctuation">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">bool</span> <span class="brace">{</span>
<span class="bool_literal">true</span>
<span class="brace">}</span>
<span class="brace">}</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
index a97802cbb..9f2b1926b 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
@@ -61,11 +61,11 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="brace">}</span>
<span class="keyword">trait</span> <span class="trait declaration">Bar</span> <span class="brace">{</span>
- <span class="keyword">fn</span> <span class="function associated declaration reference trait">bar</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">i32</span><span class="semicolon">;</span>
+ <span class="keyword">fn</span> <span class="function associated declaration reference trait">bar</span><span class="parenthesis">(</span><span class="punctuation">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">i32</span><span class="semicolon">;</span>
<span class="brace">}</span>
<span class="keyword">impl</span> <span class="trait">Bar</span> <span class="keyword">for</span> <span class="struct">Foo</span> <span class="brace">{</span>
- <span class="keyword">fn</span> <span class="function associated declaration reference trait">bar</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">i32</span> <span class="brace">{</span>
+ <span class="keyword">fn</span> <span class="function associated declaration reference trait">bar</span><span class="parenthesis">(</span><span class="punctuation">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">i32</span> <span class="brace">{</span>
<span class="self_keyword reference">self</span><span class="operator">.</span><span class="field">x</span>
<span class="brace">}</span>
<span class="brace">}</span>
@@ -75,11 +75,11 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="value_param">f</span><span class="operator">.</span><span class="function associated consuming">baz</span><span class="parenthesis">(</span><span class="self_keyword consuming mutable">self</span><span class="parenthesis">)</span>
<span class="brace">}</span>
- <span class="keyword">fn</span> <span class="function associated declaration mutable reference">qux</span><span class="parenthesis">(</span><span class="operator">&</span><span class="keyword">mut</span> <span class="self_keyword declaration mutable reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="keyword">fn</span> <span class="function associated declaration mutable reference">qux</span><span class="parenthesis">(</span><span class="punctuation">&</span><span class="keyword">mut</span> <span class="self_keyword declaration mutable reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span>
<span class="self_keyword mutable reference">self</span><span class="operator">.</span><span class="field">x</span> <span class="operator">=</span> <span class="numeric_literal">0</span><span class="semicolon">;</span>
<span class="brace">}</span>
- <span class="keyword">fn</span> <span class="function associated declaration reference">quop</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">i32</span> <span class="brace">{</span>
+ <span class="keyword">fn</span> <span class="function associated declaration reference">quop</span><span class="parenthesis">(</span><span class="punctuation">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">i32</span> <span class="brace">{</span>
<span class="self_keyword reference">self</span><span class="operator">.</span><span class="field">x</span>
<span class="brace">}</span>
<span class="brace">}</span>
@@ -96,11 +96,11 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="value_param">f</span><span class="operator">.</span><span class="function associated">baz</span><span class="parenthesis">(</span><span class="self_keyword">self</span><span class="parenthesis">)</span>
<span class="brace">}</span>
- <span class="keyword">fn</span> <span class="function associated declaration mutable reference">qux</span><span class="parenthesis">(</span><span class="operator">&</span><span class="keyword">mut</span> <span class="self_keyword declaration mutable reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="keyword">fn</span> <span class="function associated declaration mutable reference">qux</span><span class="parenthesis">(</span><span class="punctuation">&</span><span class="keyword">mut</span> <span class="self_keyword declaration mutable reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span>
<span class="self_keyword mutable reference">self</span><span class="operator">.</span><span class="field">x</span> <span class="operator">=</span> <span class="numeric_literal">0</span><span class="semicolon">;</span>
<span class="brace">}</span>
- <span class="keyword">fn</span> <span class="function associated declaration reference">quop</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">u32</span> <span class="brace">{</span>
+ <span class="keyword">fn</span> <span class="function associated declaration reference">quop</span><span class="parenthesis">(</span><span class="punctuation">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">u32</span> <span class="brace">{</span>
<span class="self_keyword reference">self</span><span class="operator">.</span><span class="field">x</span>
<span class="brace">}</span>
<span class="brace">}</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html
index ced7d22f0..abcd80c28 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html
@@ -42,7 +42,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
-<pre><code><span class="keyword">fn</span> <span class="function declaration">fixture</span><span class="parenthesis">(</span><span class="value_param declaration reference">ra_fixture</span><span class="colon">:</span> <span class="operator">&</span><span class="builtin_type">str</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+<pre><code><span class="keyword">fn</span> <span class="function declaration">fixture</span><span class="parenthesis">(</span><span class="value_param declaration reference">ra_fixture</span><span class="colon">:</span> <span class="punctuation">&</span><span class="builtin_type">str</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
<span class="function">fixture</span><span class="parenthesis">(</span><span class="string_literal">r#"</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html
index 2d85fc8c9..f98e0b1cd 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html
@@ -45,8 +45,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<pre><code>
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="attribute attribute default_library library">derive</span><span class="parenthesis attribute">(</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
<span class="keyword">struct</span> <span class="struct declaration">Foo</span><span class="angle">&lt;</span><span class="lifetime declaration">'a</span><span class="comma">,</span> <span class="lifetime declaration">'b</span><span class="comma">,</span> <span class="lifetime declaration">'c</span><span class="angle">&gt;</span> <span class="keyword">where</span> <span class="lifetime">'a</span><span class="colon">:</span> <span class="lifetime">'a</span><span class="comma">,</span> <span class="lifetime">'static</span><span class="colon">:</span> <span class="lifetime">'static</span> <span class="brace">{</span>
- <span class="field declaration">field</span><span class="colon">:</span> <span class="operator">&</span><span class="lifetime">'a</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="comma">,</span>
- <span class="field declaration">field2</span><span class="colon">:</span> <span class="operator">&</span><span class="lifetime">'static</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="comma">,</span>
+ <span class="field declaration">field</span><span class="colon">:</span> <span class="punctuation">&</span><span class="lifetime">'a</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="comma">,</span>
+ <span class="field declaration">field2</span><span class="colon">:</span> <span class="punctuation">&</span><span class="lifetime">'static</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="comma">,</span>
<span class="brace">}</span>
<span class="keyword">impl</span><span class="angle">&lt;</span><span class="lifetime declaration">'a</span><span class="angle">&gt;</span> <span class="struct">Foo</span><span class="angle">&lt;</span><span class="lifetime">'_</span><span class="comma">,</span> <span class="lifetime">'a</span><span class="comma">,</span> <span class="lifetime">'static</span><span class="angle">&gt;</span>
<span class="keyword">where</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
index c627bc9b0..a626cda3f 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
@@ -62,16 +62,16 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
<span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic<span class="parenthesis">(</span><span class="string_literal">"explicit panic"</span><span class="parenthesis">)</span>
<span class="parenthesis">)</span><span class="comma">,</span>
- <span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="colon">:</span>literal <span class="punctuation">$</span><span class="parenthesis">(</span><span class="comma">,</span><span class="parenthesis">)</span><span class="operator control">?</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="colon">:</span>literal <span class="punctuation">$</span><span class="parenthesis">(</span><span class="comma">,</span><span class="parenthesis">)</span><span class="punctuation">?</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
<span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic<span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="parenthesis">)</span>
<span class="parenthesis">)</span><span class="comma">,</span>
<span class="comment">// Use `panic_str` instead of `panic_display::&lt;&str&gt;` for non_fmt_panic lint.</span>
- <span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="colon">:</span>expr <span class="punctuation">$</span><span class="parenthesis">(</span><span class="comma">,</span><span class="parenthesis">)</span><span class="operator control">?</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="colon">:</span>expr <span class="punctuation">$</span><span class="parenthesis">(</span><span class="comma">,</span><span class="parenthesis">)</span><span class="punctuation">?</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
<span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic_str<span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="parenthesis">)</span>
<span class="parenthesis">)</span><span class="comma">,</span>
<span class="comment">// Special-case the single-argument case for const_panic.</span>
- <span class="parenthesis">(</span><span class="string_literal">"{}"</span><span class="comma">,</span> <span class="punctuation">$</span>arg<span class="colon">:</span>expr <span class="punctuation">$</span><span class="parenthesis">(</span><span class="comma">,</span><span class="parenthesis">)</span><span class="operator control">?</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
- <span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic_display<span class="parenthesis">(</span><span class="operator">&</span><span class="punctuation">$</span>arg<span class="parenthesis">)</span>
+ <span class="parenthesis">(</span><span class="string_literal">"{}"</span><span class="comma">,</span> <span class="punctuation">$</span>arg<span class="colon">:</span>expr <span class="punctuation">$</span><span class="parenthesis">(</span><span class="comma">,</span><span class="parenthesis">)</span><span class="punctuation">?</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
+ <span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic_display<span class="parenthesis">(</span><span class="punctuation">&</span><span class="punctuation">$</span>arg<span class="parenthesis">)</span>
<span class="parenthesis">)</span><span class="comma">,</span>
<span class="parenthesis">(</span><span class="punctuation">$</span>fmt<span class="colon">:</span>expr<span class="comma">,</span> <span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="colon">:</span>tt<span class="parenthesis">)</span><span class="punctuation">+</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
<span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic_fmt<span class="parenthesis">(</span><span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>const_format_args<span class="punctuation">!</span><span class="parenthesis">(</span><span class="punctuation">$</span>fmt<span class="comma">,</span> <span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="parenthesis">)</span><span class="punctuation">+</span><span class="parenthesis">)</span><span class="parenthesis">)</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html
index 0716bae75..1992bdc6a 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html
@@ -49,7 +49,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="brace">}</span>
<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">unsafe_deref</span> <span class="brace">{</span>
<span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="brace">{</span>
- <span class="punctuation">*</span><span class="parenthesis">(</span><span class="operator">&</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="keyword">as</span> <span class="punctuation">*</span><span class="keyword">const</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="parenthesis">)</span>
+ <span class="punctuation">*</span><span class="parenthesis">(</span><span class="punctuation">&</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="keyword">as</span> <span class="punctuation">*</span><span class="keyword">const</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="parenthesis">)</span>
<span class="brace">}</span><span class="semicolon">;</span>
<span class="brace">}</span>
<span class="keyword">static</span> <span class="keyword">mut</span> <span class="static declaration mutable unsafe">MUT_GLOBAL</span><span class="colon">:</span> <span class="struct">Struct</span> <span class="operator">=</span> <span class="struct">Struct</span> <span class="brace">{</span> <span class="field">field</span><span class="colon">:</span> <span class="numeric_literal">0</span> <span class="brace">}</span><span class="semicolon">;</span>
@@ -63,7 +63,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="keyword">struct</span> <span class="struct declaration">Struct</span> <span class="brace">{</span> <span class="field declaration">field</span><span class="colon">:</span> <span class="builtin_type">i32</span> <span class="brace">}</span>
<span class="keyword">impl</span> <span class="struct">Struct</span> <span class="brace">{</span>
- <span class="keyword unsafe">unsafe</span> <span class="keyword">fn</span> <span class="function associated declaration reference unsafe">unsafe_method</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+ <span class="keyword unsafe">unsafe</span> <span class="keyword">fn</span> <span class="function associated declaration reference unsafe">unsafe_method</span><span class="parenthesis">(</span><span class="punctuation">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
<span class="brace">}</span>
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">repr</span><span class="parenthesis attribute">(</span><span class="none attribute">packed</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
@@ -78,11 +78,11 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="keyword">fn</span> <span class="function declaration">unsafe_trait_bound</span><span class="angle">&lt;</span><span class="type_param declaration">T</span><span class="colon">:</span> <span class="trait">UnsafeTrait</span><span class="angle">&gt;</span><span class="parenthesis">(</span><span class="punctuation">_</span><span class="colon">:</span> <span class="type_param">T</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
<span class="keyword">trait</span> <span class="trait declaration">DoTheAutoref</span> <span class="brace">{</span>
- <span class="keyword">fn</span> <span class="function associated declaration reference trait">calls_autoref</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="keyword">fn</span> <span class="function associated declaration reference trait">calls_autoref</span><span class="parenthesis">(</span><span class="punctuation">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="brace">}</span>
<span class="keyword">impl</span> <span class="trait">DoTheAutoref</span> <span class="keyword">for</span> <span class="builtin_type">u16</span> <span class="brace">{</span>
- <span class="keyword">fn</span> <span class="function associated declaration reference trait">calls_autoref</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+ <span class="keyword">fn</span> <span class="function associated declaration reference trait">calls_autoref</span><span class="parenthesis">(</span><span class="punctuation">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
<span class="brace">}</span>
<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
index 99be7c664..46cc667fc 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
@@ -4,7 +4,18 @@ use expect_test::{expect_file, ExpectFile};
use ide_db::SymbolKind;
use test_utils::{bench, bench_fixture, skip_slow_tests, AssertLinear};
-use crate::{fixture, FileRange, HlTag, TextRange};
+use crate::{fixture, FileRange, HighlightConfig, HlTag, TextRange};
+
+const HL_CONFIG: HighlightConfig = HighlightConfig {
+ strings: true,
+ punctuation: true,
+ specialize_punctuation: true,
+ specialize_operator: true,
+ operator: true,
+ inject_doc_comment: true,
+ macro_bang: true,
+ syntactic_name_ref_highlighting: false,
+};
#[test]
fn attributes() {
@@ -613,7 +624,7 @@ struct Foo {
bar: bool,
}
-/// This is an impl with a code block.
+/// This is an impl of [`Foo`] with a code block.
///
/// ```
/// fn foo() {
@@ -958,7 +969,7 @@ pub struct Struct;
#[test]
#[cfg_attr(
- all(unix, not(target_pointer_width = "64")),
+ not(all(unix, target_pointer_width = "64")),
ignore = "depends on `DefaultHasher` outputs"
)]
fn test_rainbow_highlighting() {
@@ -996,7 +1007,10 @@ struct Foo {
// The "x"
let highlights = &analysis
- .highlight_range(FileRange { file_id, range: TextRange::at(45.into(), 1.into()) })
+ .highlight_range(
+ HL_CONFIG,
+ FileRange { file_id, range: TextRange::at(45.into(), 1.into()) },
+ )
.unwrap();
assert_eq!(&highlights[0].highlight.to_string(), "field.declaration.public");
@@ -1011,7 +1025,7 @@ macro_rules! test {}
}"#
.trim(),
);
- let _ = analysis.highlight(file_id).unwrap();
+ let _ = analysis.highlight(HL_CONFIG, file_id).unwrap();
}
/// Highlights the code given by the `ra_fixture` argument, renders the
@@ -1035,7 +1049,7 @@ fn benchmark_syntax_highlighting_long_struct() {
let hash = {
let _pt = bench("syntax highlighting long struct");
analysis
- .highlight(file_id)
+ .highlight(HL_CONFIG, file_id)
.unwrap()
.iter()
.filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Struct))
@@ -1061,7 +1075,7 @@ fn syntax_highlighting_not_quadratic() {
let time = Instant::now();
let hash = analysis
- .highlight(file_id)
+ .highlight(HL_CONFIG, file_id)
.unwrap()
.iter()
.filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Struct))
@@ -1086,7 +1100,7 @@ fn benchmark_syntax_highlighting_parser() {
let hash = {
let _pt = bench("syntax highlighting parser");
analysis
- .highlight(file_id)
+ .highlight(HL_CONFIG, file_id)
.unwrap()
.iter()
.filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Function))
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_tree.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_tree.rs
index 9003e7cd3..4256fea0f 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_tree.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_tree.rs
@@ -12,7 +12,7 @@ use syntax::{
// |===
// | Editor | Action Name
//
-// | VS Code | **Rust Analyzer: Show Syntax Tree**
+// | VS Code | **rust-analyzer: Show Syntax Tree**
// |===
// image::https://user-images.githubusercontent.com/48062697/113065586-068bdb80-91b1-11eb-9507-fee67f9f45a0.gif[]
pub(crate) fn syntax_tree(
diff --git a/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs b/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs
index 51291a645..17a1e385b 100644
--- a/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs
@@ -3,8 +3,9 @@ use std::sync::Arc;
use dot::{Id, LabelText};
use ide_db::{
base_db::{CrateGraph, CrateId, Dependency, SourceDatabase, SourceDatabaseExt},
- FxHashSet, RootDatabase,
+ RootDatabase,
};
+use stdx::hash::NoHashHashSet;
// Feature: View Crate Graph
//
@@ -16,7 +17,7 @@ use ide_db::{
// |===
// | Editor | Action Name
//
-// | VS Code | **Rust Analyzer: View Crate Graph**
+// | VS Code | **rust-analyzer: View Crate Graph**
// |===
pub(crate) fn view_crate_graph(db: &RootDatabase, full: bool) -> Result<String, String> {
let crate_graph = db.crate_graph();
@@ -41,7 +42,7 @@ pub(crate) fn view_crate_graph(db: &RootDatabase, full: bool) -> Result<String,
struct DotCrateGraph {
graph: Arc<CrateGraph>,
- crates_to_render: FxHashSet<CrateId>,
+ crates_to_render: NoHashHashSet<CrateId>,
}
type Edge<'a> = (CrateId, &'a Dependency);
diff --git a/src/tools/rust-analyzer/crates/ide/src/view_hir.rs b/src/tools/rust-analyzer/crates/ide/src/view_hir.rs
index 7312afe53..d2bbbf6d2 100644
--- a/src/tools/rust-analyzer/crates/ide/src/view_hir.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/view_hir.rs
@@ -1,4 +1,4 @@
-use hir::{Function, Semantics};
+use hir::{DefWithBody, Semantics};
use ide_db::base_db::FilePosition;
use ide_db::RootDatabase;
use syntax::{algo::find_node_at_offset, ast, AstNode};
@@ -8,7 +8,7 @@ use syntax::{algo::find_node_at_offset, ast, AstNode};
// |===
// | Editor | Action Name
//
-// | VS Code | **Rust Analyzer: View Hir**
+// | VS Code | **rust-analyzer: View Hir**
// |===
// image::https://user-images.githubusercontent.com/48062697/113065588-068bdb80-91b1-11eb-9a78-0b4ef1e972fb.gif[]
pub(crate) fn view_hir(db: &RootDatabase, position: FilePosition) -> String {
@@ -19,8 +19,12 @@ fn body_hir(db: &RootDatabase, position: FilePosition) -> Option<String> {
let sema = Semantics::new(db);
let source_file = sema.parse(position.file_id);
- let function = find_node_at_offset::<ast::Fn>(source_file.syntax(), position.offset)?;
-
- let function: Function = sema.to_def(&function)?;
- Some(function.debug_hir(db))
+ let item = find_node_at_offset::<ast::Item>(source_file.syntax(), position.offset)?;
+ let def: DefWithBody = match item {
+ ast::Item::Fn(it) => sema.to_def(&it)?.into(),
+ ast::Item::Const(it) => sema.to_def(&it)?.into(),
+ ast::Item::Static(it) => sema.to_def(&it)?.into(),
+ _ => return None,
+ };
+ Some(def.debug_hir(db))
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs b/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs
index 3dc03085d..9c1f93356 100644
--- a/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs
@@ -9,7 +9,7 @@ use ide_db::RootDatabase;
// |===
// | Editor | Action Name
//
-// | VS Code | **Rust Analyzer: Debug ItemTree**
+// | VS Code | **rust-analyzer: Debug ItemTree**
// |===
pub(crate) fn view_item_tree(db: &RootDatabase, file_id: FileId) -> String {
db.file_item_tree(file_id.into()).pretty_print()