diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-17 12:19:43 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-17 12:19:43 +0000 |
commit | 3e3e70d529d8c7d7c4d7bc4fefc9f109393b9245 (patch) | |
tree | daf049b282ab10e8c3d03e409b3cd84ff3f7690c /src/tools/rust-analyzer/crates/ide-diagnostics | |
parent | Adding debian version 1.68.2+dfsg1-1. (diff) | |
download | rustc-3e3e70d529d8c7d7c4d7bc4fefc9f109393b9245.tar.xz rustc-3e3e70d529d8c7d7c4d7bc4fefc9f109393b9245.zip |
Merging upstream version 1.69.0+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/tools/rust-analyzer/crates/ide-diagnostics')
5 files changed, 253 insertions, 137 deletions
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml index 7e9a1125d..e18624fcc 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml @@ -2,9 +2,11 @@ name = "ide-diagnostics" version = "0.0.0" description = "TBD" -license = "MIT OR Apache-2.0" -edition = "2021" -rust-version = "1.65" + +authors.workspace = true +edition.workspace = true +license.workspace = true +rust-version.workspace = true [lib] doctest = false @@ -15,19 +17,21 @@ either = "1.7.0" itertools = "0.10.5" serde_json = "1.0.86" -profile = { path = "../profile", version = "0.0.0" } -stdx = { path = "../stdx", version = "0.0.0" } -syntax = { path = "../syntax", version = "0.0.0" } -text-edit = { path = "../text-edit", version = "0.0.0" } -cfg = { path = "../cfg", version = "0.0.0" } -hir = { path = "../hir", version = "0.0.0" } -ide-db = { path = "../ide-db", version = "0.0.0" } +# local deps +profile.workspace = true +stdx.workspace = true +syntax.workspace = true +text-edit.workspace = true +cfg.workspace = true +hir.workspace = true +ide-db.workspace = true [dev-dependencies] expect-test = "1.4.0" -test-utils = { path = "../test-utils" } -sourcegen = { path = "../sourcegen" } +# local deps +test-utils.workspace = true +sourcegen.workspace = true [features] in-rust-tree = [] diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs index e8df6dcf2..04ce1e0fe 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs @@ -99,76 +99,66 @@ pub(crate) fn json_in_items( && node.last_token().map(|x| x.kind()) == Some(SyntaxKind::R_CURLY) { let node_string = node.to_string(); - if let Ok(it) = serde_json::from_str(&node_string) { - if let serde_json::Value::Object(it) = it { - let import_scope = ImportScope::find_insert_use_container(node, sema)?; - let range = node.text_range(); - let mut edit = TextEdit::builder(); - edit.delete(range); - let mut state = State::default(); - let semantics_scope = sema.scope(node)?; - let scope_resolve = - |it| semantics_scope.speculative_resolve(&make::path_from_text(it)); - let scope_has = |it| scope_resolve(it).is_some(); - let deserialize_resolved = scope_resolve("::serde::Deserialize"); - let serialize_resolved = scope_resolve("::serde::Serialize"); - state.has_deserialize = deserialize_resolved.is_some(); - state.has_serialize = serialize_resolved.is_some(); - state.build_struct(&it); - edit.insert(range.start(), state.result); - acc.push( - Diagnostic::new( - "json-is-not-rust", - "JSON syntax is not valid as a Rust item", - range, - ) - .severity(Severity::WeakWarning) - .with_fixes(Some(vec![{ - let mut scb = SourceChangeBuilder::new(file_id); - let scope = match import_scope { - ImportScope::File(it) => ImportScope::File(scb.make_mut(it)), - ImportScope::Module(it) => ImportScope::Module(scb.make_mut(it)), - ImportScope::Block(it) => ImportScope::Block(scb.make_mut(it)), - }; - let current_module = semantics_scope.module(); - if !scope_has("Serialize") { - if let Some(PathResolution::Def(it)) = serialize_resolved { - if let Some(it) = current_module.find_use_path_prefixed( - sema.db, - it, - config.insert_use.prefix_kind, - config.prefer_no_std, - ) { - insert_use( - &scope, - mod_path_to_ast(&it), - &config.insert_use, - ); - } + if let Ok(serde_json::Value::Object(it)) = serde_json::from_str(&node_string) { + let import_scope = ImportScope::find_insert_use_container(node, sema)?; + let range = node.text_range(); + let mut edit = TextEdit::builder(); + edit.delete(range); + let mut state = State::default(); + let semantics_scope = sema.scope(node)?; + let scope_resolve = + |it| semantics_scope.speculative_resolve(&make::path_from_text(it)); + let scope_has = |it| scope_resolve(it).is_some(); + let deserialize_resolved = scope_resolve("::serde::Deserialize"); + let serialize_resolved = scope_resolve("::serde::Serialize"); + state.has_deserialize = deserialize_resolved.is_some(); + state.has_serialize = serialize_resolved.is_some(); + state.build_struct(&it); + edit.insert(range.start(), state.result); + acc.push( + Diagnostic::new( + "json-is-not-rust", + "JSON syntax is not valid as a Rust item", + range, + ) + .severity(Severity::WeakWarning) + .with_fixes(Some(vec![{ + let mut scb = SourceChangeBuilder::new(file_id); + let scope = match import_scope { + ImportScope::File(it) => ImportScope::File(scb.make_mut(it)), + ImportScope::Module(it) => ImportScope::Module(scb.make_mut(it)), + ImportScope::Block(it) => ImportScope::Block(scb.make_mut(it)), + }; + let current_module = semantics_scope.module(); + if !scope_has("Serialize") { + if let Some(PathResolution::Def(it)) = serialize_resolved { + if let Some(it) = current_module.find_use_path_prefixed( + sema.db, + it, + config.insert_use.prefix_kind, + config.prefer_no_std, + ) { + insert_use(&scope, mod_path_to_ast(&it), &config.insert_use); } } - if !scope_has("Deserialize") { - if let Some(PathResolution::Def(it)) = deserialize_resolved { - if let Some(it) = current_module.find_use_path_prefixed( - sema.db, - it, - config.insert_use.prefix_kind, - config.prefer_no_std, - ) { - insert_use( - &scope, - mod_path_to_ast(&it), - &config.insert_use, - ); - } + } + if !scope_has("Deserialize") { + if let Some(PathResolution::Def(it)) = deserialize_resolved { + if let Some(it) = current_module.find_use_path_prefixed( + sema.db, + it, + config.insert_use.prefix_kind, + config.prefer_no_std, + ) { + insert_use(&scope, mod_path_to_ast(&it), &config.insert_use); } } - let mut sc = scb.finish(); - sc.insert_source_edit(file_id, edit.finish()); - fix("convert_json_to_struct", "Convert JSON to struct", sc, range) - }])), - ); - } + } + let mut sc = scb.finish(); + sc.insert_source_edit(file_id, edit.finish()); + fix("convert_json_to_struct", "Convert JSON to struct", sc, range) + }])), + ); } } Some(()) diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs index b363a516d..0b3121c76 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs @@ -11,10 +11,7 @@ pub(crate) fn private_assoc_item( d: &hir::PrivateAssocItem, ) -> Diagnostic { // FIXME: add quickfix - let name = match d.item.name(ctx.sema.db) { - Some(name) => format!("`{}` ", name), - None => String::new(), - }; + let name = d.item.name(ctx.sema.db).map(|name| format!("`{name}` ")).unwrap_or_default(); Diagnostic::new( "private-assoc-item", format!( diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs index be70f0ac4..3d45a7591 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs @@ -1,13 +1,15 @@ //! Diagnostic emitted for files that aren't part of any crate. -use hir::db::DefDatabase; +use std::iter; + +use hir::{db::DefDatabase, InFile, ModuleSource}; use ide_db::{ base_db::{FileId, FileLoader, SourceDatabase, SourceDatabaseExt}, source_change::SourceChange, RootDatabase, }; use syntax::{ - ast::{self, HasModuleItem, HasName}, + ast::{self, edit::IndentLevel, HasModuleItem, HasName}, AstNode, TextRange, TextSize, }; use text_edit::TextEdit; @@ -42,47 +44,99 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option<Vec<Assist>> { let source_root = ctx.sema.db.source_root(ctx.sema.db.file_source_root(file_id)); let our_path = source_root.path_for_file(&file_id)?; - let (mut module_name, _) = our_path.name_and_extension()?; - - // Candidates to look for: - // - `mod.rs`, `main.rs` and `lib.rs` in the same folder - // - `$dir.rs` in the parent folder, where `$dir` is the directory containing `self.file_id` let parent = our_path.parent()?; - let paths = { - let parent = if module_name == "mod" { - // for mod.rs we need to actually look up one higher - // and take the parent as our to be module name - let (name, _) = parent.name_and_extension()?; - module_name = name; - parent.parent()? - } else { - parent - }; - let mut paths = - vec![parent.join("mod.rs")?, parent.join("lib.rs")?, parent.join("main.rs")?]; - - // `submod/bla.rs` -> `submod.rs` - let parent_mod = (|| { + let (module_name, _) = our_path.name_and_extension()?; + let (parent, module_name) = match module_name { + // for mod.rs we need to actually look up one higher + // and take the parent as our to be module name + "mod" => { let (name, _) = parent.name_and_extension()?; - parent.parent()?.join(&format!("{name}.rs")) - })(); - paths.extend(parent_mod); - paths + (parent.parent()?, name.to_owned()) + } + _ => (parent, module_name.to_owned()), }; - for &parent_id in paths.iter().filter_map(|path| source_root.file_for_path(path)) { - for &krate in ctx.sema.db.relevant_crates(parent_id).iter() { - let crate_def_map = ctx.sema.db.crate_def_map(krate); - for (_, module) in crate_def_map.modules() { - if module.origin.is_inline() { - // We don't handle inline `mod parent {}`s, they use different paths. - continue; - } + // check crate roots, i.e. main.rs, lib.rs, ... + 'crates: for &krate in &*ctx.sema.db.relevant_crates(file_id) { + let crate_def_map = ctx.sema.db.crate_def_map(krate); + + let root_module = &crate_def_map[crate_def_map.root()]; + let Some(root_file_id) = root_module.origin.file_id() else { continue }; + let Some(crate_root_path) = source_root.path_for_file(&root_file_id) else { continue }; + let Some(rel) = parent.strip_prefix(&crate_root_path.parent()?) else { continue }; + + // try resolving the relative difference of the paths as inline modules + let mut current = root_module; + for ele in rel.as_ref().components() { + let seg = match ele { + std::path::Component::Normal(seg) => seg.to_str()?, + std::path::Component::RootDir => continue, + // shouldn't occur + _ => continue 'crates, + }; + match current.children.iter().find(|(name, _)| name.to_smol_str() == seg) { + Some((_, &child)) => current = &crate_def_map[child], + None => continue 'crates, + } + if !current.origin.is_inline() { + continue 'crates; + } + } + + let InFile { file_id: parent_file_id, value: source } = + current.definition_source(ctx.sema.db); + let parent_file_id = parent_file_id.file_id()?; + return make_fixes(ctx.sema.db, parent_file_id, source, &module_name, file_id); + } - if module.origin.file_id() == Some(parent_id) { - return make_fixes(ctx.sema.db, parent_id, module_name, file_id); + // if we aren't adding to a crate root, walk backwards such that we support `#[path = ...]` overrides if possible + + // build all parent paths of the form `../module_name/mod.rs` and `../module_name.rs` + let paths = iter::successors(Some(parent.clone()), |prev| prev.parent()).filter_map(|path| { + let parent = path.parent()?; + let (name, _) = path.name_and_extension()?; + Some(([parent.join(&format!("{name}.rs"))?, path.join("mod.rs")?], name.to_owned())) + }); + let mut stack = vec![]; + let &parent_id = + paths.inspect(|(_, name)| stack.push(name.clone())).find_map(|(paths, _)| { + paths.into_iter().find_map(|path| source_root.file_for_path(&path)) + })?; + stack.pop(); + 'crates: for &krate in ctx.sema.db.relevant_crates(parent_id).iter() { + let crate_def_map = ctx.sema.db.crate_def_map(krate); + let Some((_, module)) = + crate_def_map.modules() + .find(|(_, module)| module.origin.file_id() == Some(parent_id) && !module.origin.is_inline()) + else { continue }; + + if stack.is_empty() { + return make_fixes( + ctx.sema.db, + parent_id, + module.definition_source(ctx.sema.db).value, + &module_name, + file_id, + ); + } else { + // direct parent file is missing, + // try finding a parent that has an inline tree from here on + let mut current = module; + for s in stack.iter().rev() { + match module.children.iter().find(|(name, _)| name.to_smol_str() == s) { + Some((_, child)) => { + current = &crate_def_map[*child]; + } + None => continue 'crates, + } + if !current.origin.is_inline() { + continue 'crates; } } + let InFile { file_id: parent_file_id, value: source } = + current.definition_source(ctx.sema.db); + let parent_file_id = parent_file_id.file_id()?; + return make_fixes(ctx.sema.db, parent_file_id, source, &module_name, file_id); } } @@ -92,6 +146,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option<Vec<Assist>> { fn make_fixes( db: &RootDatabase, parent_file_id: FileId, + source: ModuleSource, new_mod_name: &str, added_file_id: FileId, ) -> Option<Vec<Assist>> { @@ -102,14 +157,18 @@ fn make_fixes( let mod_decl = format!("mod {new_mod_name};"); let pub_mod_decl = format!("pub mod {new_mod_name};"); - let ast: ast::SourceFile = db.parse(parent_file_id).tree(); - let mut mod_decl_builder = TextEdit::builder(); let mut pub_mod_decl_builder = TextEdit::builder(); + let mut items = match &source { + ModuleSource::SourceFile(it) => it.items(), + ModuleSource::Module(it) => it.item_list()?.items(), + ModuleSource::BlockExpr(_) => return None, + }; + // If there's an existing `mod m;` statement matching the new one, don't emit a fix (it's // probably `#[cfg]`d out). - for item in ast.items() { + for item in items.clone() { if let ast::Item::Module(m) = item { if let Some(name) = m.name() { if m.item_list().is_none() && name.to_string() == new_mod_name { @@ -121,28 +180,40 @@ fn make_fixes( } // If there are existing `mod m;` items, append after them (after the first group of them, rather). - match ast.items().skip_while(|item| !is_outline_mod(item)).take_while(is_outline_mod).last() { + match items.clone().skip_while(|item| !is_outline_mod(item)).take_while(is_outline_mod).last() { Some(last) => { cov_mark::hit!(unlinked_file_append_to_existing_mods); let offset = last.syntax().text_range().end(); - mod_decl_builder.insert(offset, format!("\n{mod_decl}")); - pub_mod_decl_builder.insert(offset, format!("\n{pub_mod_decl}")); + let indent = IndentLevel::from_node(last.syntax()); + mod_decl_builder.insert(offset, format!("\n{indent}{mod_decl}")); + pub_mod_decl_builder.insert(offset, format!("\n{indent}{pub_mod_decl}")); } None => { // Prepend before the first item in the file. - match ast.items().next() { - Some(item) => { + match items.next() { + Some(first) => { cov_mark::hit!(unlinked_file_prepend_before_first_item); - let offset = item.syntax().text_range().start(); - mod_decl_builder.insert(offset, format!("{mod_decl}\n\n")); - pub_mod_decl_builder.insert(offset, format!("{pub_mod_decl}\n\n")); + let offset = first.syntax().text_range().start(); + let indent = IndentLevel::from_node(first.syntax()); + mod_decl_builder.insert(offset, format!("{mod_decl}\n\n{indent}")); + pub_mod_decl_builder.insert(offset, format!("{pub_mod_decl}\n\n{indent}")); } None => { // No items in the file, so just append at the end. cov_mark::hit!(unlinked_file_empty_file); - let offset = ast.syntax().text_range().end(); - mod_decl_builder.insert(offset, format!("{mod_decl}\n")); - pub_mod_decl_builder.insert(offset, format!("{pub_mod_decl}\n")); + let mut indent = IndentLevel::from(0); + let offset = match &source { + ModuleSource::SourceFile(it) => it.syntax().text_range().end(), + ModuleSource::Module(it) => { + indent = IndentLevel::from_node(it.syntax()) + 1; + it.item_list()?.r_curly_token()?.text_range().start() + } + ModuleSource::BlockExpr(it) => { + it.stmt_list()?.r_curly_token()?.text_range().start() + } + }; + mod_decl_builder.insert(offset, format!("{indent}{mod_decl}\n")); + pub_mod_decl_builder.insert(offset, format!("{indent}{pub_mod_decl}\n")); } } } @@ -167,7 +238,6 @@ fn make_fixes( #[cfg(test)] mod tests { - use crate::tests::{check_diagnostics, check_fix, check_fixes, check_no_fix}; #[test] @@ -333,4 +403,62 @@ mod foo; "#, ); } + + #[test] + fn unlinked_file_insert_into_inline_simple() { + check_fix( + r#" +//- /main.rs +mod bar; +//- /bar.rs +mod foo { +} +//- /bar/foo/baz.rs +$0 +"#, + r#" +mod foo { + mod baz; +} +"#, + ); + } + + #[test] + fn unlinked_file_insert_into_inline_simple_modrs() { + check_fix( + r#" +//- /main.rs +mod bar; +//- /bar.rs +mod baz { +} +//- /bar/baz/foo/mod.rs +$0 +"#, + r#" +mod baz { + mod foo; +} +"#, + ); + } + + #[test] + fn unlinked_file_insert_into_inline_simple_modrs_main() { + check_fix( + r#" +//- /main.rs +mod bar { +} +//- /bar/foo/mod.rs +$0 +"#, + r#" +mod bar { + mod foo; +} +"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs index b2ed19104..9a984ba6b 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs @@ -34,10 +34,7 @@ pub(crate) fn unresolved_proc_macro( let message = format!( "{message}: {}", if config_enabled { - match def_map.proc_macro_loading_error() { - Some(e) => e, - None => "proc macro not found in the built dylib", - } + def_map.proc_macro_loading_error().unwrap_or("proc macro not found in the built dylib") } else { match d.kind { hir::MacroKind::Attr if proc_macros_enabled => { |