summaryrefslogtreecommitdiffstats
path: root/src/tools/rust-analyzer/crates/ide-db
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/Cargo.toml5
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs8
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/defs.rs80
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs13
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs31
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/lib.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/line_index.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs3
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/rename.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/search.rs74
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/source_change.rs147
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string_exprs.rs267
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/insert_whitespace_into_node.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs10
17 files changed, 591 insertions, 73 deletions
diff --git a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
index a1b0bd6cb..cf0bcd5c9 100644
--- a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
@@ -15,11 +15,12 @@ tracing = "0.1.35"
rayon = "1.5.3"
fst = { version = "0.4.7", default-features = false }
rustc-hash = "1.1.0"
-once_cell = "1.12.0"
+once_cell = "1.15.0"
either = "1.7.0"
-itertools = "0.10.3"
+itertools = "0.10.5"
arrayvec = "0.7.2"
indexmap = "1.9.1"
+memchr = "2.5.0"
stdx = { path = "../stdx", version = "0.0.0" }
parser = { path = "../parser", version = "0.0.0" }
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs b/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs
index 7303ef8b7..7109c6fd1 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs
@@ -12,7 +12,7 @@ use crate::RootDatabase;
#[derive(Debug)]
pub struct ActiveParameter {
pub ty: Type,
- pub pat: Either<ast::SelfParam, ast::Pat>,
+ pub pat: Option<Either<ast::SelfParam, ast::Pat>>,
}
impl ActiveParameter {
@@ -27,12 +27,12 @@ impl ActiveParameter {
return None;
}
let (pat, ty) = params.swap_remove(idx);
- pat.map(|pat| ActiveParameter { ty, pat })
+ Some(ActiveParameter { ty, pat })
}
pub fn ident(&self) -> Option<ast::Name> {
- self.pat.as_ref().right().and_then(|param| match param {
- ast::Pat::IdentPat(ident) => ident.name(),
+ self.pat.as_ref().and_then(|param| match param {
+ Either::Right(ast::Pat::IdentPat(ident)) => ident.name(),
_ => None,
})
}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
index 98b0e9c94..b1ee9b58d 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
@@ -20,7 +20,7 @@ impl RootDatabase {
pub fn apply_change(&mut self, change: Change) {
let _p = profile::span("RootDatabase::apply_change");
self.request_cancellation();
- tracing::info!("apply_change {:?}", change);
+ tracing::trace!("apply_change {:?}", change);
if let Some(roots) = &change.roots {
let mut local_roots = FxHashSet::default();
let mut library_roots = FxHashSet::default();
@@ -45,7 +45,7 @@ impl RootDatabase {
// |===
// | Editor | Action Name
//
- // | VS Code | **Rust Analyzer: Memory Usage (Clears Database)**
+ // | VS Code | **rust-analyzer: Memory Usage (Clears Database)**
// |===
// image::https://user-images.githubusercontent.com/48062697/113065592-08559f00-91b1-11eb-8c96-64b88068ec02.gif[]
pub fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes)> {
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
index aeaca00ec..6c13c0397 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
@@ -127,10 +127,12 @@ impl Definition {
}
}
+// FIXME: IdentClass as a name no longer fits
#[derive(Debug)]
pub enum IdentClass {
NameClass(NameClass),
NameRefClass(NameRefClass),
+ Operator(OperatorClass),
}
impl IdentClass {
@@ -147,6 +149,11 @@ impl IdentClass {
.map(IdentClass::NameClass)
.or_else(|| NameRefClass::classify_lifetime(sema, &lifetime).map(IdentClass::NameRefClass))
},
+ ast::AwaitExpr(await_expr) => OperatorClass::classify_await(sema, &await_expr).map(IdentClass::Operator),
+ ast::BinExpr(bin_expr) => OperatorClass::classify_bin(sema, &bin_expr).map(IdentClass::Operator),
+ ast::IndexExpr(index_expr) => OperatorClass::classify_index(sema, &index_expr).map(IdentClass::Operator),
+ ast::PrefixExpr(prefix_expr) => OperatorClass::classify_prefix(sema,&prefix_expr).map(IdentClass::Operator),
+ ast::TryExpr(try_expr) => OperatorClass::classify_try(sema,&try_expr).map(IdentClass::Operator),
_ => None,
}
}
@@ -184,6 +191,33 @@ impl IdentClass {
res.push(Definition::Local(local_ref));
res.push(Definition::Field(field_ref));
}
+ IdentClass::Operator(
+ OperatorClass::Await(func)
+ | OperatorClass::Prefix(func)
+ | OperatorClass::Bin(func)
+ | OperatorClass::Index(func)
+ | OperatorClass::Try(func),
+ ) => res.push(Definition::Function(func)),
+ }
+ res
+ }
+
+ pub fn definitions_no_ops(self) -> ArrayVec<Definition, 2> {
+ let mut res = ArrayVec::new();
+ match self {
+ IdentClass::NameClass(NameClass::Definition(it) | NameClass::ConstReference(it)) => {
+ res.push(it)
+ }
+ IdentClass::NameClass(NameClass::PatFieldShorthand { local_def, field_ref }) => {
+ res.push(Definition::Local(local_def));
+ res.push(Definition::Field(field_ref));
+ }
+ IdentClass::NameRefClass(NameRefClass::Definition(it)) => res.push(it),
+ IdentClass::NameRefClass(NameRefClass::FieldShorthand { local_ref, field_ref }) => {
+ res.push(Definition::Local(local_ref));
+ res.push(Definition::Field(field_ref));
+ }
+ IdentClass::Operator(_) => (),
}
res
}
@@ -332,6 +366,52 @@ impl NameClass {
}
}
+#[derive(Debug)]
+pub enum OperatorClass {
+ Await(Function),
+ Prefix(Function),
+ Index(Function),
+ Try(Function),
+ Bin(Function),
+}
+
+impl OperatorClass {
+ pub fn classify_await(
+ sema: &Semantics<'_, RootDatabase>,
+ await_expr: &ast::AwaitExpr,
+ ) -> Option<OperatorClass> {
+ sema.resolve_await_to_poll(await_expr).map(OperatorClass::Await)
+ }
+
+ pub fn classify_prefix(
+ sema: &Semantics<'_, RootDatabase>,
+ prefix_expr: &ast::PrefixExpr,
+ ) -> Option<OperatorClass> {
+ sema.resolve_prefix_expr(prefix_expr).map(OperatorClass::Prefix)
+ }
+
+ pub fn classify_try(
+ sema: &Semantics<'_, RootDatabase>,
+ try_expr: &ast::TryExpr,
+ ) -> Option<OperatorClass> {
+ sema.resolve_try_expr(try_expr).map(OperatorClass::Try)
+ }
+
+ pub fn classify_index(
+ sema: &Semantics<'_, RootDatabase>,
+ index_expr: &ast::IndexExpr,
+ ) -> Option<OperatorClass> {
+ sema.resolve_index_expr(index_expr).map(OperatorClass::Index)
+ }
+
+ pub fn classify_bin(
+ sema: &Semantics<'_, RootDatabase>,
+ bin_expr: &ast::BinExpr,
+ ) -> Option<OperatorClass> {
+ sema.resolve_bin_expr(bin_expr).map(OperatorClass::Bin)
+ }
+}
+
/// This is similar to [`NameClass`], but works for [`ast::NameRef`] rather than
/// for [`ast::Name`]. Similarly, what looks like a reference in syntax is a
/// reference most of the time, but there are a couple of annoying exceptions.
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs
index 26ef86155..40a6a3e89 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs
@@ -212,18 +212,20 @@ impl ImportAssets {
&self,
sema: &Semantics<'_, RootDatabase>,
prefix_kind: PrefixKind,
+ prefer_no_std: bool,
) -> Vec<LocatedImport> {
let _p = profile::span("import_assets::search_for_imports");
- self.search_for(sema, Some(prefix_kind))
+ self.search_for(sema, Some(prefix_kind), prefer_no_std)
}
/// This may return non-absolute paths if a part of the returned path is already imported into scope.
pub fn search_for_relative_paths(
&self,
sema: &Semantics<'_, RootDatabase>,
+ prefer_no_std: bool,
) -> Vec<LocatedImport> {
let _p = profile::span("import_assets::search_for_relative_paths");
- self.search_for(sema, None)
+ self.search_for(sema, None, prefer_no_std)
}
pub fn path_fuzzy_name_to_exact(&mut self, case_sensitive: bool) {
@@ -242,6 +244,7 @@ impl ImportAssets {
&self,
sema: &Semantics<'_, RootDatabase>,
prefixed: Option<PrefixKind>,
+ prefer_no_std: bool,
) -> Vec<LocatedImport> {
let _p = profile::span("import_assets::search_for");
@@ -252,6 +255,7 @@ impl ImportAssets {
item_for_path_search(sema.db, item)?,
&self.module_with_candidate,
prefixed,
+ prefer_no_std,
)
};
@@ -564,11 +568,12 @@ fn get_mod_path(
item_to_search: ItemInNs,
module_with_candidate: &Module,
prefixed: Option<PrefixKind>,
+ prefer_no_std: bool,
) -> Option<ModPath> {
if let Some(prefix_kind) = prefixed {
- module_with_candidate.find_use_path_prefixed(db, item_to_search, prefix_kind)
+ module_with_candidate.find_use_path_prefixed(db, item_to_search, prefix_kind, prefer_no_std)
} else {
- module_with_candidate.find_use_path(db, item_to_search)
+ module_with_candidate.find_use_path(db, item_to_search, prefer_no_std)
}
}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs
index c14182279..9be1d3663 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs
@@ -7,7 +7,10 @@ use std::cmp::Ordering;
use hir::Semantics;
use syntax::{
algo,
- ast::{self, make, AstNode, HasAttrs, HasModuleItem, HasVisibility, PathSegmentKind},
+ ast::{
+ self, edit_in_place::Removable, make, AstNode, HasAttrs, HasModuleItem, HasVisibility,
+ PathSegmentKind,
+ },
ted, Direction, NodeOrToken, SyntaxKind, SyntaxNode,
};
@@ -192,20 +195,24 @@ pub fn insert_use(scope: &ImportScope, path: ast::Path, cfg: &InsertUseConfig) {
insert_use_(scope, &path, cfg.group, use_item);
}
-pub fn remove_path_if_in_use_stmt(path: &ast::Path) {
+pub fn ast_to_remove_for_path_in_use_stmt(path: &ast::Path) -> Option<Box<dyn Removable>> {
// FIXME: improve this
if path.parent_path().is_some() {
- return;
+ return None;
}
- if let Some(use_tree) = path.syntax().parent().and_then(ast::UseTree::cast) {
- if use_tree.use_tree_list().is_some() || use_tree.star_token().is_some() {
- return;
- }
- if let Some(use_) = use_tree.syntax().parent().and_then(ast::Use::cast) {
- use_.remove();
- return;
- }
- use_tree.remove();
+ let use_tree = path.syntax().parent().and_then(ast::UseTree::cast)?;
+ if use_tree.use_tree_list().is_some() || use_tree.star_token().is_some() {
+ return None;
+ }
+ if let Some(use_) = use_tree.syntax().parent().and_then(ast::Use::cast) {
+ return Some(Box::new(use_));
+ }
+ Some(Box::new(use_tree))
+}
+
+pub fn remove_path_if_in_use_stmt(path: &ast::Path) {
+ if let Some(node) = ast_to_remove_for_path_in_use_stmt(path) {
+ node.remove();
}
}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs
index 7fb4b90e6..371d642c1 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs
@@ -225,7 +225,7 @@ fn path_cmp_short(a: &ast::Path, b: &ast::Path) -> Ordering {
}
/// Compares two paths, if one ends earlier than the other the has_tl parameters decide which is
-/// greater as a a path that has a tree list should be greater, while one that just ends without
+/// greater as a path that has a tree list should be greater, while one that just ends without
/// a tree list should be considered less.
pub(super) fn use_tree_path_cmp(
a: &ast::Path,
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
index 966bba616..e0bc0f89f 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
@@ -38,6 +38,7 @@ pub mod syntax_helpers {
pub mod node_ext;
pub mod insert_whitespace_into_node;
pub mod format_string;
+ pub mod format_string_exprs;
pub use parser::LexedStr;
}
@@ -52,6 +53,7 @@ use hir::{
db::{AstDatabase, DefDatabase, HirDatabase},
symbols::FileSymbolKind,
};
+use stdx::hash::NoHashHashSet;
use crate::{line_index::LineIndex, symbol_index::SymbolsDatabase};
pub use rustc_hash::{FxHashMap, FxHashSet, FxHasher};
@@ -118,7 +120,7 @@ impl FileLoader for RootDatabase {
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
FileLoaderDelegate(self).resolve_path(path)
}
- fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
+ fn relevant_crates(&self, file_id: FileId) -> Arc<NoHashHashSet<CrateId>> {
FileLoaderDelegate(self).relevant_crates(file_id)
}
}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/line_index.rs b/src/tools/rust-analyzer/crates/ide-db/src/line_index.rs
index 68ad07ee8..75d49ff2f 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/line_index.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/line_index.rs
@@ -2,7 +2,7 @@
//! representation.
use std::{iter, mem};
-use rustc_hash::FxHashMap;
+use stdx::hash::NoHashHashMap;
use syntax::{TextRange, TextSize};
#[derive(Clone, Debug, PartialEq, Eq)]
@@ -10,7 +10,7 @@ pub struct LineIndex {
/// Offset the the beginning of each line, zero-based
pub(crate) newlines: Vec<TextSize>,
/// List of non-ASCII characters on each line
- pub(crate) utf16_lines: FxHashMap<u32, Vec<Utf16Char>>,
+ pub(crate) utf16_lines: NoHashHashMap<u32, Vec<Utf16Char>>,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
@@ -55,7 +55,7 @@ impl Utf16Char {
impl LineIndex {
pub fn new(text: &str) -> LineIndex {
- let mut utf16_lines = FxHashMap::default();
+ let mut utf16_lines = NoHashHashMap::default();
let mut utf16_chars = Vec::new();
let mut newlines = vec![0.into()];
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs
index 40af9e6fe..12d873b4a 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs
@@ -173,6 +173,7 @@ impl<'a> Ctx<'a> {
let found_path = self.target_module.find_use_path(
self.source_scope.db.upcast(),
hir::ModuleDef::Trait(trait_ref),
+ false,
)?;
match ast::make::ty_path(mod_path_to_ast(&found_path)) {
ast::Type::PathType(path_ty) => Some(path_ty),
@@ -209,7 +210,7 @@ impl<'a> Ctx<'a> {
}
let found_path =
- self.target_module.find_use_path(self.source_scope.db.upcast(), def)?;
+ self.target_module.find_use_path(self.source_scope.db.upcast(), def, false)?;
let res = mod_path_to_ast(&found_path).clone_for_update();
if let Some(args) = path.segment().and_then(|it| it.generic_arg_list()) {
if let Some(segment) = res.segment() {
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
index 517fe3f24..49b81265e 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
@@ -82,7 +82,7 @@ impl Definition {
}
/// Textual range of the identifier which will change when renaming this
- /// `Definition`. Note that some definitions, like buitin types, can't be
+ /// `Definition`. Note that some definitions, like builtin types, can't be
/// renamed.
pub fn range_for_rename(self, sema: &Semantics<'_, RootDatabase>) -> Option<FileRange> {
let res = match self {
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
index bd038cdaa..82b85f2fa 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
@@ -4,12 +4,14 @@
//! get a super-set of matches. Then, we we confirm each match using precise
//! name resolution.
-use std::{convert::TryInto, mem, sync::Arc};
+use std::{mem, sync::Arc};
use base_db::{FileId, FileRange, SourceDatabase, SourceDatabaseExt};
use hir::{DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility};
+use memchr::memmem::Finder;
use once_cell::unsync::Lazy;
-use rustc_hash::FxHashMap;
+use parser::SyntaxKind;
+use stdx::hash::NoHashHashMap;
use syntax::{ast, match_ast, AstNode, TextRange, TextSize};
use crate::{
@@ -20,7 +22,7 @@ use crate::{
#[derive(Debug, Default, Clone)]
pub struct UsageSearchResult {
- pub references: FxHashMap<FileId, Vec<FileReference>>,
+ pub references: NoHashHashMap<FileId, Vec<FileReference>>,
}
impl UsageSearchResult {
@@ -45,7 +47,7 @@ impl UsageSearchResult {
impl IntoIterator for UsageSearchResult {
type Item = (FileId, Vec<FileReference>);
- type IntoIter = <FxHashMap<FileId, Vec<FileReference>> as IntoIterator>::IntoIter;
+ type IntoIter = <NoHashHashMap<FileId, Vec<FileReference>> as IntoIterator>::IntoIter;
fn into_iter(self) -> Self::IntoIter {
self.references.into_iter()
@@ -67,6 +69,7 @@ pub enum ReferenceCategory {
// Create
Write,
Read,
+ Import,
// FIXME: Some day should be able to search in doc comments. Would probably
// need to switch from enum to bitflags then?
// DocComment
@@ -78,17 +81,17 @@ pub enum ReferenceCategory {
/// e.g. for things like local variables.
#[derive(Clone, Debug)]
pub struct SearchScope {
- entries: FxHashMap<FileId, Option<TextRange>>,
+ entries: NoHashHashMap<FileId, Option<TextRange>>,
}
impl SearchScope {
- fn new(entries: FxHashMap<FileId, Option<TextRange>>) -> SearchScope {
+ fn new(entries: NoHashHashMap<FileId, Option<TextRange>>) -> SearchScope {
SearchScope { entries }
}
/// Build a search scope spanning the entire crate graph of files.
fn crate_graph(db: &RootDatabase) -> SearchScope {
- let mut entries = FxHashMap::default();
+ let mut entries = NoHashHashMap::default();
let graph = db.crate_graph();
for krate in graph.iter() {
@@ -102,7 +105,7 @@ impl SearchScope {
/// Build a search scope spanning all the reverse dependencies of the given crate.
fn reverse_dependencies(db: &RootDatabase, of: hir::Crate) -> SearchScope {
- let mut entries = FxHashMap::default();
+ let mut entries = NoHashHashMap::default();
for rev_dep in of.transitive_reverse_dependencies(db) {
let root_file = rev_dep.root_file(db);
let source_root_id = db.file_source_root(root_file);
@@ -117,14 +120,12 @@ impl SearchScope {
let root_file = of.root_file(db);
let source_root_id = db.file_source_root(root_file);
let source_root = db.source_root(source_root_id);
- SearchScope {
- entries: source_root.iter().map(|id| (id, None)).collect::<FxHashMap<_, _>>(),
- }
+ SearchScope { entries: source_root.iter().map(|id| (id, None)).collect() }
}
/// Build a search scope spanning the given module and all its submodules.
fn module_and_children(db: &RootDatabase, module: hir::Module) -> SearchScope {
- let mut entries = FxHashMap::default();
+ let mut entries = NoHashHashMap::default();
let (file_id, range) = {
let InFile { file_id, value } = module.definition_source(db);
@@ -157,7 +158,7 @@ impl SearchScope {
/// Build an empty search scope.
pub fn empty() -> SearchScope {
- SearchScope::new(FxHashMap::default())
+ SearchScope::new(NoHashHashMap::default())
}
/// Build a empty search scope spanning the given file.
@@ -238,6 +239,7 @@ impl Definition {
DefWithBody::Function(f) => f.source(db).map(|src| src.syntax().cloned()),
DefWithBody::Const(c) => c.source(db).map(|src| src.syntax().cloned()),
DefWithBody::Static(s) => s.source(db).map(|src| src.syntax().cloned()),
+ DefWithBody::Variant(v) => v.source(db).map(|src| src.syntax().cloned()),
};
return match def {
Some(def) => SearchScope::file_range(def.as_ref().original_file_range(db)),
@@ -402,21 +404,26 @@ impl<'a> FindUsages<'a> {
.or_else(|| ty.as_builtin().map(|builtin| builtin.name()))
})
};
- self.def.name(sema.db).or_else(self_kw_refs).map(|it| it.to_smol_str())
+ // We need to unescape the name in case it is written without "r#" in earlier
+ // editions of Rust where it isn't a keyword.
+ self.def.name(sema.db).or_else(self_kw_refs).map(|it| it.unescaped().to_smol_str())
}
};
let name = match &name {
Some(s) => s.as_str(),
None => return,
};
+ let finder = &Finder::new(name);
+ let include_self_kw_refs =
+ self.include_self_kw_refs.as_ref().map(|ty| (ty, Finder::new("Self")));
- // these can't be closures because rust infers the lifetimes wrong ...
+ // for<'a> |text: &'a str, name: &'a str, search_range: TextRange| -> impl Iterator<Item = TextSize> + 'a { ... }
fn match_indices<'a>(
text: &'a str,
- name: &'a str,
+ finder: &'a Finder<'a>,
search_range: TextRange,
) -> impl Iterator<Item = TextSize> + 'a {
- text.match_indices(name).filter_map(move |(idx, _)| {
+ finder.find_iter(text.as_bytes()).filter_map(move |idx| {
let offset: TextSize = idx.try_into().unwrap();
if !search_range.contains_inclusive(offset) {
return None;
@@ -425,6 +432,7 @@ impl<'a> FindUsages<'a> {
})
}
+ // for<'a> |scope: &'a SearchScope| -> impl Iterator<Item = (Arc<String>, FileId, TextRange)> + 'a { ... }
fn scope_files<'a>(
sema: &'a Semantics<'_, RootDatabase>,
scope: &'a SearchScope,
@@ -448,7 +456,7 @@ impl<'a> FindUsages<'a> {
let tree = Lazy::new(move || sema.parse(file_id).syntax().clone());
// Search for occurrences of the items name
- for offset in match_indices(&text, name, search_range) {
+ for offset in match_indices(&text, finder, search_range) {
for name in sema.find_nodes_at_offset_with_descend(&tree, offset) {
if match name {
ast::NameLike::NameRef(name_ref) => self.found_name_ref(&name_ref, sink),
@@ -460,8 +468,8 @@ impl<'a> FindUsages<'a> {
}
}
// Search for occurrences of the `Self` referring to our type
- if let Some(self_ty) = &self.include_self_kw_refs {
- for offset in match_indices(&text, "Self", search_range) {
+ if let Some((self_ty, finder)) = &include_self_kw_refs {
+ for offset in match_indices(&text, finder, search_range) {
for name_ref in sema.find_nodes_at_offset_with_descend(&tree, offset) {
if self.found_self_ty_name_ref(self_ty, &name_ref, sink) {
return;
@@ -477,20 +485,22 @@ impl<'a> FindUsages<'a> {
let scope = search_scope
.intersection(&SearchScope::module_and_children(self.sema.db, module));
- let is_crate_root = module.is_crate_root(self.sema.db);
+ let is_crate_root =
+ module.is_crate_root(self.sema.db).then(|| Finder::new("crate"));
+ let finder = &Finder::new("super");
for (text, file_id, search_range) in scope_files(sema, &scope) {
let tree = Lazy::new(move || sema.parse(file_id).syntax().clone());
- for offset in match_indices(&text, "super", search_range) {
+ for offset in match_indices(&text, finder, search_range) {
for name_ref in sema.find_nodes_at_offset_with_descend(&tree, offset) {
if self.found_name_ref(&name_ref, sink) {
return;
}
}
}
- if is_crate_root {
- for offset in match_indices(&text, "crate", search_range) {
+ if let Some(finder) = &is_crate_root {
+ for offset in match_indices(&text, finder, search_range) {
for name_ref in sema.find_nodes_at_offset_with_descend(&tree, offset) {
if self.found_name_ref(&name_ref, sink) {
return;
@@ -531,8 +541,9 @@ impl<'a> FindUsages<'a> {
search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(text.as_str())));
let tree = Lazy::new(|| sema.parse(file_id).syntax().clone());
+ let finder = &Finder::new("self");
- for offset in match_indices(&text, "self", search_range) {
+ for offset in match_indices(&text, finder, search_range) {
for name_ref in sema.find_nodes_at_offset_with_descend(&tree, offset) {
if self.found_self_module_name_ref(&name_ref, sink) {
return;
@@ -577,7 +588,7 @@ impl<'a> FindUsages<'a> {
let reference = FileReference {
range,
name: ast::NameLike::NameRef(name_ref.clone()),
- category: None,
+ category: is_name_ref_in_import(name_ref).then(|| ReferenceCategory::Import),
};
sink(file_id, reference)
}
@@ -756,7 +767,7 @@ impl ReferenceCategory {
fn new(def: &Definition, r: &ast::NameRef) -> Option<ReferenceCategory> {
// Only Locals and Fields have accesses for now.
if !matches!(def, Definition::Local(_) | Definition::Field(_)) {
- return None;
+ return is_name_ref_in_import(r).then(|| ReferenceCategory::Import);
}
let mode = r.syntax().ancestors().find_map(|node| {
@@ -783,3 +794,12 @@ impl ReferenceCategory {
mode.or(Some(ReferenceCategory::Read))
}
}
+
+fn is_name_ref_in_import(name_ref: &ast::NameRef) -> bool {
+ name_ref
+ .syntax()
+ .parent()
+ .and_then(ast::PathSegment::cast)
+ .and_then(|it| it.parent_path().top_path().syntax().parent())
+ .map_or(false, |it| it.kind() == SyntaxKind::USE_TREE)
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
index 8132c73ef..8e338061d 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
@@ -3,16 +3,18 @@
//!
//! It can be viewed as a dual for `Change`.
-use std::{collections::hash_map::Entry, iter};
+use std::{collections::hash_map::Entry, iter, mem};
use base_db::{AnchoredPathBuf, FileId};
-use rustc_hash::FxHashMap;
-use stdx::never;
-use text_edit::TextEdit;
+use stdx::{hash::NoHashHashMap, never};
+use syntax::{algo, AstNode, SyntaxNode, SyntaxNodePtr, TextRange, TextSize};
+use text_edit::{TextEdit, TextEditBuilder};
+
+use crate::SnippetCap;
#[derive(Default, Debug, Clone)]
pub struct SourceChange {
- pub source_file_edits: FxHashMap<FileId, TextEdit>,
+ pub source_file_edits: NoHashHashMap<FileId, TextEdit>,
pub file_system_edits: Vec<FileSystemEdit>,
pub is_snippet: bool,
}
@@ -21,7 +23,7 @@ impl SourceChange {
/// Creates a new SourceChange with the given label
/// from the edits.
pub fn from_edits(
- source_file_edits: FxHashMap<FileId, TextEdit>,
+ source_file_edits: NoHashHashMap<FileId, TextEdit>,
file_system_edits: Vec<FileSystemEdit>,
) -> Self {
SourceChange { source_file_edits, file_system_edits, is_snippet: false }
@@ -75,12 +77,141 @@ impl Extend<FileSystemEdit> for SourceChange {
}
}
-impl From<FxHashMap<FileId, TextEdit>> for SourceChange {
- fn from(source_file_edits: FxHashMap<FileId, TextEdit>) -> SourceChange {
+impl From<NoHashHashMap<FileId, TextEdit>> for SourceChange {
+ fn from(source_file_edits: NoHashHashMap<FileId, TextEdit>) -> SourceChange {
SourceChange { source_file_edits, file_system_edits: Vec::new(), is_snippet: false }
}
}
+pub struct SourceChangeBuilder {
+ pub edit: TextEditBuilder,
+ pub file_id: FileId,
+ pub source_change: SourceChange,
+ pub trigger_signature_help: bool,
+
+ /// Maps the original, immutable `SyntaxNode` to a `clone_for_update` twin.
+ pub mutated_tree: Option<TreeMutator>,
+}
+
+pub struct TreeMutator {
+ immutable: SyntaxNode,
+ mutable_clone: SyntaxNode,
+}
+
+impl TreeMutator {
+ pub fn new(immutable: &SyntaxNode) -> TreeMutator {
+ let immutable = immutable.ancestors().last().unwrap();
+ let mutable_clone = immutable.clone_for_update();
+ TreeMutator { immutable, mutable_clone }
+ }
+
+ pub fn make_mut<N: AstNode>(&self, node: &N) -> N {
+ N::cast(self.make_syntax_mut(node.syntax())).unwrap()
+ }
+
+ pub fn make_syntax_mut(&self, node: &SyntaxNode) -> SyntaxNode {
+ let ptr = SyntaxNodePtr::new(node);
+ ptr.to_node(&self.mutable_clone)
+ }
+}
+
+impl SourceChangeBuilder {
+ pub fn new(file_id: FileId) -> SourceChangeBuilder {
+ SourceChangeBuilder {
+ edit: TextEdit::builder(),
+ file_id,
+ source_change: SourceChange::default(),
+ trigger_signature_help: false,
+ mutated_tree: None,
+ }
+ }
+
+ pub fn edit_file(&mut self, file_id: FileId) {
+ self.commit();
+ self.file_id = file_id;
+ }
+
+ fn commit(&mut self) {
+ if let Some(tm) = self.mutated_tree.take() {
+ algo::diff(&tm.immutable, &tm.mutable_clone).into_text_edit(&mut self.edit)
+ }
+
+ let edit = mem::take(&mut self.edit).finish();
+ if !edit.is_empty() {
+ self.source_change.insert_source_edit(self.file_id, edit);
+ }
+ }
+
+ pub fn make_mut<N: AstNode>(&mut self, node: N) -> N {
+ self.mutated_tree.get_or_insert_with(|| TreeMutator::new(node.syntax())).make_mut(&node)
+ }
+ /// Returns a copy of the `node`, suitable for mutation.
+ ///
+ /// Syntax trees in rust-analyzer are typically immutable, and mutating
+ /// operations panic at runtime. However, it is possible to make a copy of
+ /// the tree and mutate the copy freely. Mutation is based on interior
+ /// mutability, and different nodes in the same tree see the same mutations.
+ ///
+ /// The typical pattern for an assist is to find specific nodes in the read
+ /// phase, and then get their mutable couterparts using `make_mut` in the
+ /// mutable state.
+ pub fn make_syntax_mut(&mut self, node: SyntaxNode) -> SyntaxNode {
+ self.mutated_tree.get_or_insert_with(|| TreeMutator::new(&node)).make_syntax_mut(&node)
+ }
+
+ /// Remove specified `range` of text.
+ pub fn delete(&mut self, range: TextRange) {
+ self.edit.delete(range)
+ }
+ /// Append specified `text` at the given `offset`
+ pub fn insert(&mut self, offset: TextSize, text: impl Into<String>) {
+ self.edit.insert(offset, text.into())
+ }
+ /// Append specified `snippet` at the given `offset`
+ pub fn insert_snippet(
+ &mut self,
+ _cap: SnippetCap,
+ offset: TextSize,
+ snippet: impl Into<String>,
+ ) {
+ self.source_change.is_snippet = true;
+ self.insert(offset, snippet);
+ }
+ /// Replaces specified `range` of text with a given string.
+ pub fn replace(&mut self, range: TextRange, replace_with: impl Into<String>) {
+ self.edit.replace(range, replace_with.into())
+ }
+ /// Replaces specified `range` of text with a given `snippet`.
+ pub fn replace_snippet(
+ &mut self,
+ _cap: SnippetCap,
+ range: TextRange,
+ snippet: impl Into<String>,
+ ) {
+ self.source_change.is_snippet = true;
+ self.replace(range, snippet);
+ }
+ pub fn replace_ast<N: AstNode>(&mut self, old: N, new: N) {
+ algo::diff(old.syntax(), new.syntax()).into_text_edit(&mut self.edit)
+ }
+ pub fn create_file(&mut self, dst: AnchoredPathBuf, content: impl Into<String>) {
+ let file_system_edit = FileSystemEdit::CreateFile { dst, initial_contents: content.into() };
+ self.source_change.push_file_system_edit(file_system_edit);
+ }
+ pub fn move_file(&mut self, src: FileId, dst: AnchoredPathBuf) {
+ let file_system_edit = FileSystemEdit::MoveFile { src, dst };
+ self.source_change.push_file_system_edit(file_system_edit);
+ }
+ pub fn trigger_signature_help(&mut self) {
+ self.trigger_signature_help = true;
+ }
+
+ pub fn finish(mut self) -> SourceChange {
+ self.commit();
+ mem::take(&mut self.source_change)
+ }
+}
+
#[derive(Debug, Clone)]
pub enum FileSystemEdit {
CreateFile { dst: AnchoredPathBuf, initial_contents: String },
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs
index f48a57008..2d6927cee 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs
@@ -1,7 +1,8 @@
//! Tools to work with format string literals for the `format_args!` family of macros.
+use crate::syntax_helpers::node_ext::macro_call_for_string_token;
use syntax::{
ast::{self, IsString},
- AstNode, AstToken, TextRange, TextSize,
+ TextRange, TextSize,
};
pub fn is_format_string(string: &ast::String) -> bool {
@@ -14,8 +15,7 @@ pub fn is_format_string(string: &ast::String) -> bool {
// This setup lets us correctly highlight the components of `concat!("{}", "bla")` format
// strings. It still fails for `concat!("{", "}")`, but that is rare.
(|| {
- let macro_call = string.syntax().parent_ancestors().find_map(ast::MacroCall::cast)?;
- let name = macro_call.path()?.segment()?.name_ref()?;
+ let name = macro_call_for_string_token(string)?.path()?.segment()?.name_ref()?;
if !matches!(
name.text().as_str(),
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string_exprs.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string_exprs.rs
new file mode 100644
index 000000000..ac6c6e8fe
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string_exprs.rs
@@ -0,0 +1,267 @@
+//! Tools to work with expressions present in format string literals for the `format_args!` family of macros.
+//! Primarily meant for assists and completions.
+
+/// Enum for represenging extraced format string args.
+/// Can either be extracted expressions (which includes identifiers),
+/// or placeholders `{}`.
+#[derive(Debug, PartialEq, Eq)]
+pub enum Arg {
+ Placeholder,
+ Ident(String),
+ Expr(String),
+}
+
+/**
+ Add placeholders like `$1` and `$2` in place of [`Arg::Placeholder`],
+ and unwraps the [`Arg::Ident`] and [`Arg::Expr`] enums.
+ ```rust
+ # use ide_db::syntax_helpers::format_string_exprs::*;
+ assert_eq!(with_placeholders(vec![Arg::Ident("ident".to_owned()), Arg::Placeholder, Arg::Expr("expr + 2".to_owned())]), vec!["ident".to_owned(), "$1".to_owned(), "expr + 2".to_owned()])
+ ```
+*/
+
+pub fn with_placeholders(args: Vec<Arg>) -> Vec<String> {
+ let mut placeholder_id = 1;
+ args.into_iter()
+ .map(move |a| match a {
+ Arg::Expr(s) | Arg::Ident(s) => s,
+ Arg::Placeholder => {
+ let s = format!("${placeholder_id}");
+ placeholder_id += 1;
+ s
+ }
+ })
+ .collect()
+}
+
+/**
+ Parser for a format-like string. It is more allowing in terms of string contents,
+ as we expect variable placeholders to be filled with expressions.
+
+ Built for completions and assists, and escapes `\` and `$` in output.
+ (See the comments on `get_receiver_text()` for detail.)
+ Splits a format string that may contain expressions
+ like
+ ```rust
+ assert_eq!(parse("{ident} {} {expr + 42} ").unwrap(), ("{} {} {}", vec![Arg::Ident("ident"), Arg::Placeholder, Arg::Expr("expr + 42")]));
+ ```
+*/
+pub fn parse_format_exprs(input: &str) -> Result<(String, Vec<Arg>), ()> {
+ #[derive(Debug, Clone, Copy, PartialEq)]
+ enum State {
+ NotArg,
+ MaybeArg,
+ Expr,
+ Ident,
+ MaybeIncorrect,
+ FormatOpts,
+ }
+
+ let mut state = State::NotArg;
+ let mut current_expr = String::new();
+ let mut extracted_expressions = Vec::new();
+ let mut output = String::new();
+
+ // Count of open braces inside of an expression.
+ // We assume that user knows what they're doing, thus we treat it like a correct pattern, e.g.
+ // "{MyStruct { val_a: 0, val_b: 1 }}".
+ let mut inexpr_open_count = 0;
+
+ let mut chars = input.chars().peekable();
+ while let Some(chr) = chars.next() {
+ match (state, chr) {
+ (State::NotArg, '{') => {
+ output.push(chr);
+ state = State::MaybeArg;
+ }
+ (State::NotArg, '}') => {
+ output.push(chr);
+ state = State::MaybeIncorrect;
+ }
+ (State::NotArg, _) => {
+ if matches!(chr, '\\' | '$') {
+ output.push('\\');
+ }
+ output.push(chr);
+ }
+ (State::MaybeIncorrect, '}') => {
+ // It's okay, we met "}}".
+ output.push(chr);
+ state = State::NotArg;
+ }
+ (State::MaybeIncorrect, _) => {
+ // Error in the string.
+ return Err(());
+ }
+ // Escaped braces `{{`
+ (State::MaybeArg, '{') => {
+ output.push(chr);
+ state = State::NotArg;
+ }
+ (State::MaybeArg, '}') => {
+ // This is an empty sequence '{}'.
+ output.push(chr);
+ extracted_expressions.push(Arg::Placeholder);
+ state = State::NotArg;
+ }
+ (State::MaybeArg, _) => {
+ if matches!(chr, '\\' | '$') {
+ current_expr.push('\\');
+ }
+ current_expr.push(chr);
+
+ // While Rust uses the unicode sets of XID_start and XID_continue for Identifiers
+ // this is probably the best we can do to avoid a false positive
+ if chr.is_alphabetic() || chr == '_' {
+ state = State::Ident;
+ } else {
+ state = State::Expr;
+ }
+ }
+ (State::Ident | State::Expr, '}') => {
+ if inexpr_open_count == 0 {
+ output.push(chr);
+
+ if matches!(state, State::Expr) {
+ extracted_expressions.push(Arg::Expr(current_expr.trim().into()));
+ } else {
+ extracted_expressions.push(Arg::Ident(current_expr.trim().into()));
+ }
+
+ current_expr = String::new();
+ state = State::NotArg;
+ } else {
+ // We're closing one brace met before inside of the expression.
+ current_expr.push(chr);
+ inexpr_open_count -= 1;
+ }
+ }
+ (State::Ident | State::Expr, ':') if matches!(chars.peek(), Some(':')) => {
+ // path separator
+ state = State::Expr;
+ current_expr.push_str("::");
+ chars.next();
+ }
+ (State::Ident | State::Expr, ':') => {
+ if inexpr_open_count == 0 {
+ // We're outside of braces, thus assume that it's a specifier, like "{Some(value):?}"
+ output.push(chr);
+
+ if matches!(state, State::Expr) {
+ extracted_expressions.push(Arg::Expr(current_expr.trim().into()));
+ } else {
+ extracted_expressions.push(Arg::Ident(current_expr.trim().into()));
+ }
+
+ current_expr = String::new();
+ state = State::FormatOpts;
+ } else {
+ // We're inside of braced expression, assume that it's a struct field name/value delimiter.
+ current_expr.push(chr);
+ }
+ }
+ (State::Ident | State::Expr, '{') => {
+ state = State::Expr;
+ current_expr.push(chr);
+ inexpr_open_count += 1;
+ }
+ (State::Ident | State::Expr, _) => {
+ if !(chr.is_alphanumeric() || chr == '_' || chr == '#') {
+ state = State::Expr;
+ }
+
+ if matches!(chr, '\\' | '$') {
+ current_expr.push('\\');
+ }
+ current_expr.push(chr);
+ }
+ (State::FormatOpts, '}') => {
+ output.push(chr);
+ state = State::NotArg;
+ }
+ (State::FormatOpts, _) => {
+ if matches!(chr, '\\' | '$') {
+ output.push('\\');
+ }
+ output.push(chr);
+ }
+ }
+ }
+
+ if state != State::NotArg {
+ return Err(());
+ }
+
+ Ok((output, extracted_expressions))
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use expect_test::{expect, Expect};
+
+ fn check(input: &str, expect: &Expect) {
+ let (output, exprs) = parse_format_exprs(input).unwrap_or(("-".to_string(), vec![]));
+ let outcome_repr = if !exprs.is_empty() {
+ format!("{}; {}", output, with_placeholders(exprs).join(", "))
+ } else {
+ output
+ };
+
+ expect.assert_eq(&outcome_repr);
+ }
+
+ #[test]
+ fn format_str_parser() {
+ let test_vector = &[
+ ("no expressions", expect![["no expressions"]]),
+ (r"no expressions with \$0$1", expect![r"no expressions with \\\$0\$1"]),
+ ("{expr} is {2 + 2}", expect![["{} is {}; expr, 2 + 2"]]),
+ ("{expr:?}", expect![["{:?}; expr"]]),
+ ("{expr:1$}", expect![[r"{:1\$}; expr"]]),
+ ("{$0}", expect![[r"{}; \$0"]]),
+ ("{malformed", expect![["-"]]),
+ ("malformed}", expect![["-"]]),
+ ("{{correct", expect![["{{correct"]]),
+ ("correct}}", expect![["correct}}"]]),
+ ("{correct}}}", expect![["{}}}; correct"]]),
+ ("{correct}}}}}", expect![["{}}}}}; correct"]]),
+ ("{incorrect}}", expect![["-"]]),
+ ("placeholders {} {}", expect![["placeholders {} {}; $1, $2"]]),
+ ("mixed {} {2 + 2} {}", expect![["mixed {} {} {}; $1, 2 + 2, $2"]]),
+ (
+ "{SomeStruct { val_a: 0, val_b: 1 }}",
+ expect![["{}; SomeStruct { val_a: 0, val_b: 1 }"]],
+ ),
+ ("{expr:?} is {2.32f64:.5}", expect![["{:?} is {:.5}; expr, 2.32f64"]]),
+ (
+ "{SomeStruct { val_a: 0, val_b: 1 }:?}",
+ expect![["{:?}; SomeStruct { val_a: 0, val_b: 1 }"]],
+ ),
+ ("{ 2 + 2 }", expect![["{}; 2 + 2"]]),
+ ("{strsim::jaro_winkle(a)}", expect![["{}; strsim::jaro_winkle(a)"]]),
+ ("{foo::bar::baz()}", expect![["{}; foo::bar::baz()"]]),
+ ("{foo::bar():?}", expect![["{:?}; foo::bar()"]]),
+ ];
+
+ for (input, output) in test_vector {
+ check(input, output)
+ }
+ }
+
+ #[test]
+ fn arg_type() {
+ assert_eq!(
+ parse_format_exprs("{_ident} {r#raw_ident} {expr.obj} {name {thing: 42} } {}")
+ .unwrap()
+ .1,
+ vec![
+ Arg::Ident("_ident".to_owned()),
+ Arg::Ident("r#raw_ident".to_owned()),
+ Arg::Expr("expr.obj".to_owned()),
+ Arg::Expr("name {thing: 42}".to_owned()),
+ Arg::Placeholder
+ ]
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/insert_whitespace_into_node.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/insert_whitespace_into_node.rs
index f54ae6c92..8bc093a85 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/insert_whitespace_into_node.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/insert_whitespace_into_node.rs
@@ -95,7 +95,7 @@ pub fn insert_ws_into(syn: SyntaxNode) -> SyntaxNode {
AS_KW | DYN_KW | IMPL_KW | CONST_KW => {
mods.push(do_ws(after, tok));
}
- T![;] => {
+ T![;] if is_next(|it| it != R_CURLY, true) => {
if indent > 0 {
mods.push(do_indent(after, tok, indent));
}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs
index 84bde4d44..39710b8f1 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs
@@ -2,8 +2,8 @@
use itertools::Itertools;
use parser::T;
use syntax::{
- ast::{self, HasLoopBody, PathSegmentKind, VisibilityKind},
- AstNode, Preorder, RustLanguage, WalkEvent,
+ ast::{self, HasLoopBody, MacroCall, PathSegmentKind, VisibilityKind},
+ AstNode, AstToken, Preorder, RustLanguage, WalkEvent,
};
pub fn expr_as_name_ref(expr: &ast::Expr) -> Option<ast::NameRef> {
@@ -315,7 +315,6 @@ pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) {
| ast::Expr::IndexExpr(_)
| ast::Expr::Literal(_)
| ast::Expr::MacroExpr(_)
- | ast::Expr::MacroStmts(_)
| ast::Expr::MethodCallExpr(_)
| ast::Expr::ParenExpr(_)
| ast::Expr::PathExpr(_)
@@ -458,3 +457,8 @@ pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option<Vec<ast::Pat
.collect();
Some(paths)
}
+
+pub fn macro_call_for_string_token(string: &ast::String) -> Option<MacroCall> {
+ let macro_call = string.syntax().parent_ancestors().find_map(ast::MacroCall::cast)?;
+ Some(macro_call)
+}