summaryrefslogtreecommitdiffstats
path: root/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:03:36 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:03:36 +0000
commit17d40c6057c88f4c432b0d7bac88e1b84cb7e67f (patch)
tree3f66c4a5918660bb8a758ab6cda5ff8ee4f6cdcd /src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
parentAdding upstream version 1.64.0+dfsg1. (diff)
downloadrustc-f7f0cc2a5d72e2c61c1f6900e70eec992bea4273.tar.xz
rustc-f7f0cc2a5d72e2c61c1f6900e70eec992bea4273.zip
Adding upstream version 1.65.0+dfsg1.upstream/1.65.0+dfsg1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs')
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs497
1 files changed, 294 insertions, 203 deletions
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
index 5845cf712..3cfbc2e4e 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
@@ -2,13 +2,16 @@
//! requests/replies and notifications back to the client.
use std::{
fmt,
+ ops::Deref,
sync::Arc,
time::{Duration, Instant},
};
use always_assert::always;
use crossbeam_channel::{select, Receiver};
-use ide_db::base_db::{SourceDatabaseExt, VfsPath};
+use flycheck::FlycheckHandle;
+use ide_db::base_db::{SourceDatabase, SourceDatabaseExt, VfsPath};
+use itertools::Itertools;
use lsp_server::{Connection, Notification, Request};
use lsp_types::notification::Notification as _;
use vfs::{ChangeKind, FileId};
@@ -203,81 +206,14 @@ impl GlobalState {
}
lsp_server::Message::Response(resp) => self.complete_request(resp),
},
- Event::Task(mut task) => {
+ Event::Task(task) => {
let _p = profile::span("GlobalState::handle_event/task");
let mut prime_caches_progress = Vec::new();
- loop {
- match task {
- Task::Response(response) => self.respond(response),
- Task::Retry(req) => self.on_request(req),
- Task::Diagnostics(diagnostics_per_file) => {
- for (file_id, diagnostics) in diagnostics_per_file {
- self.diagnostics.set_native_diagnostics(file_id, diagnostics)
- }
- }
- Task::PrimeCaches(progress) => match progress {
- PrimeCachesProgress::Begin => prime_caches_progress.push(progress),
- PrimeCachesProgress::Report(_) => {
- match prime_caches_progress.last_mut() {
- Some(last @ PrimeCachesProgress::Report(_)) => {
- // Coalesce subsequent update events.
- *last = progress;
- }
- _ => prime_caches_progress.push(progress),
- }
- }
- PrimeCachesProgress::End { .. } => prime_caches_progress.push(progress),
- },
- Task::FetchWorkspace(progress) => {
- let (state, msg) = match progress {
- ProjectWorkspaceProgress::Begin => (Progress::Begin, None),
- ProjectWorkspaceProgress::Report(msg) => {
- (Progress::Report, Some(msg))
- }
- ProjectWorkspaceProgress::End(workspaces) => {
- self.fetch_workspaces_queue.op_completed(workspaces);
-
- let old = Arc::clone(&self.workspaces);
- self.switch_workspaces("fetched workspace".to_string());
- let workspaces_updated = !Arc::ptr_eq(&old, &self.workspaces);
- if self.config.run_build_scripts() && workspaces_updated {
- self.fetch_build_data_queue
- .request_op(format!("workspace updated"));
- }
-
- (Progress::End, None)
- }
- };
-
- self.report_progress("Fetching", state, msg, None);
- }
- Task::FetchBuildData(progress) => {
- let (state, msg) = match progress {
- BuildDataProgress::Begin => (Some(Progress::Begin), None),
- BuildDataProgress::Report(msg) => {
- (Some(Progress::Report), Some(msg))
- }
- BuildDataProgress::End(build_data_result) => {
- self.fetch_build_data_queue.op_completed(build_data_result);
-
- self.switch_workspaces("fetched build data".to_string());
-
- (Some(Progress::End), None)
- }
- };
-
- if let Some(state) = state {
- self.report_progress("Loading", state, msg, None);
- }
- }
- }
-
- // Coalesce multiple task events into one loop turn
- task = match self.task_pool.receiver.try_recv() {
- Ok(task) => task,
- Err(_) => break,
- };
+ self.handle_task(&mut prime_caches_progress, task);
+ // Coalesce multiple task events into one loop turn
+ while let Ok(task) = self.task_pool.receiver.try_recv() {
+ self.handle_task(&mut prime_caches_progress, task);
}
for progress in prime_caches_progress {
@@ -324,118 +260,20 @@ impl GlobalState {
self.report_progress("Indexing", state, message, Some(fraction));
}
}
- Event::Vfs(mut task) => {
+ Event::Vfs(message) => {
let _p = profile::span("GlobalState::handle_event/vfs");
- loop {
- match task {
- vfs::loader::Message::Loaded { files } => {
- let vfs = &mut self.vfs.write().0;
- for (path, contents) in files {
- let path = VfsPath::from(path);
- if !self.mem_docs.contains(&path) {
- vfs.set_file_contents(path, contents);
- }
- }
- }
- vfs::loader::Message::Progress { n_total, n_done, config_version } => {
- always!(config_version <= self.vfs_config_version);
-
- self.vfs_progress_config_version = config_version;
- self.vfs_progress_n_total = n_total;
- self.vfs_progress_n_done = n_done;
-
- let state = if n_done == 0 {
- Progress::Begin
- } else if n_done < n_total {
- Progress::Report
- } else {
- assert_eq!(n_done, n_total);
- Progress::End
- };
- self.report_progress(
- "Roots Scanned",
- state,
- Some(format!("{}/{}", n_done, n_total)),
- Some(Progress::fraction(n_done, n_total)),
- )
- }
- }
- // Coalesce many VFS event into a single loop turn
- task = match self.loader.receiver.try_recv() {
- Ok(task) => task,
- Err(_) => break,
- }
+ self.handle_vfs_msg(message);
+ // Coalesce many VFS event into a single loop turn
+ while let Ok(message) = self.loader.receiver.try_recv() {
+ self.handle_vfs_msg(message);
}
}
- Event::Flycheck(mut task) => {
+ Event::Flycheck(message) => {
let _p = profile::span("GlobalState::handle_event/flycheck");
- loop {
- match task {
- flycheck::Message::AddDiagnostic { workspace_root, diagnostic } => {
- let snap = self.snapshot();
- let diagnostics =
- crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp(
- &self.config.diagnostics_map(),
- &diagnostic,
- &workspace_root,
- &snap,
- );
- for diag in diagnostics {
- match url_to_file_id(&self.vfs.read().0, &diag.url) {
- Ok(file_id) => self.diagnostics.add_check_diagnostic(
- file_id,
- diag.diagnostic,
- diag.fix,
- ),
- Err(err) => {
- tracing::error!(
- "File with cargo diagnostic not found in VFS: {}",
- err
- );
- }
- };
- }
- }
-
- flycheck::Message::Progress { id, progress } => {
- let (state, message) = match progress {
- flycheck::Progress::DidStart => {
- self.diagnostics.clear_check();
- (Progress::Begin, None)
- }
- flycheck::Progress::DidCheckCrate(target) => {
- (Progress::Report, Some(target))
- }
- flycheck::Progress::DidCancel => (Progress::End, None),
- flycheck::Progress::DidFinish(result) => {
- if let Err(err) = result {
- self.show_and_log_error(
- "cargo check failed".to_string(),
- Some(err.to_string()),
- );
- }
- (Progress::End, None)
- }
- };
-
- // When we're running multiple flychecks, we have to include a disambiguator in
- // the title, or the editor complains. Note that this is a user-facing string.
- let title = if self.flycheck.len() == 1 {
- match self.config.flycheck() {
- Some(config) => format!("{}", config),
- None => "cargo check".to_string(),
- }
- } else {
- format!("cargo check (#{})", id + 1)
- };
- self.report_progress(&title, state, message, None);
- }
- }
- // Coalesce many flycheck updates into a single loop turn
- task = match self.flycheck_receiver.try_recv() {
- Ok(task) => task,
- Err(_) => break,
- }
+ self.handle_flycheck_msg(message);
+ // Coalesce many flycheck updates into a single loop turn
+ while let Ok(message) = self.flycheck_receiver.try_recv() {
+ self.handle_flycheck_msg(message);
}
}
}
@@ -444,10 +282,13 @@ impl GlobalState {
let memdocs_added_or_removed = self.mem_docs.take_changes();
if self.is_quiescent() {
- if !was_quiescent {
- for flycheck in &self.flycheck {
- flycheck.update();
- }
+ let became_quiescent = !(was_quiescent
+ || self.fetch_workspaces_queue.op_requested()
+ || self.fetch_build_data_queue.op_requested());
+
+ if became_quiescent {
+ // Project has loaded properly, kick off initial flycheck
+ self.flycheck.iter().for_each(FlycheckHandle::restart);
if self.config.prefill_caches() {
self.prime_caches_queue.request_op("became quiescent".to_string());
}
@@ -486,28 +327,40 @@ impl GlobalState {
continue;
}
- let url = file_id_to_url(&self.vfs.read().0, file_id);
+ let uri = file_id_to_url(&self.vfs.read().0, file_id);
let mut diagnostics =
self.diagnostics.diagnostics_for(file_id).cloned().collect::<Vec<_>>();
- // https://github.com/rust-lang/rust-analyzer/issues/11404
- for d in &mut diagnostics {
- if d.message.is_empty() {
- d.message = " ".to_string();
+
+ // VSCode assumes diagnostic messages to be non-empty strings, so we need to patch
+ // empty diagnostics. Neither the docs of VSCode nor the LSP spec say whether
+ // diagnostic messages are actually allowed to be empty or not and patching this
+ // in the VSCode client does not work as the assertion happens in the protocol
+ // conversion. So this hack is here to stay, and will be considered a hack
+ // until the LSP decides to state that empty messages are allowed.
+
+ // See https://github.com/rust-lang/rust-analyzer/issues/11404
+ // See https://github.com/rust-lang/rust-analyzer/issues/13130
+ let patch_empty = |message: &mut String| {
+ if message.is_empty() {
+ *message = " ".to_string();
}
- if let Some(rds) = d.related_information.as_mut() {
- for rd in rds {
- if rd.message.is_empty() {
- rd.message = " ".to_string();
- }
+ };
+
+ for d in &mut diagnostics {
+ patch_empty(&mut d.message);
+ if let Some(dri) = &mut d.related_information {
+ for dri in dri {
+ patch_empty(&mut dri.message);
}
}
}
- let version = from_proto::vfs_path(&url)
+
+ let version = from_proto::vfs_path(&uri)
.map(|path| self.mem_docs.get(&path).map(|it| it.version))
.unwrap_or_default();
self.send_notification::<lsp_types::notification::PublishDiagnostics>(
- lsp_types::PublishDiagnosticsParams { uri: url, diagnostics, version },
+ lsp_types::PublishDiagnosticsParams { uri, diagnostics, version },
);
}
}
@@ -569,11 +422,178 @@ impl GlobalState {
Ok(())
}
+ fn handle_task(&mut self, prime_caches_progress: &mut Vec<PrimeCachesProgress>, task: Task) {
+ match task {
+ Task::Response(response) => self.respond(response),
+ Task::Retry(req) => self.on_request(req),
+ Task::Diagnostics(diagnostics_per_file) => {
+ for (file_id, diagnostics) in diagnostics_per_file {
+ self.diagnostics.set_native_diagnostics(file_id, diagnostics)
+ }
+ }
+ Task::PrimeCaches(progress) => match progress {
+ PrimeCachesProgress::Begin => prime_caches_progress.push(progress),
+ PrimeCachesProgress::Report(_) => {
+ match prime_caches_progress.last_mut() {
+ Some(last @ PrimeCachesProgress::Report(_)) => {
+ // Coalesce subsequent update events.
+ *last = progress;
+ }
+ _ => prime_caches_progress.push(progress),
+ }
+ }
+ PrimeCachesProgress::End { .. } => prime_caches_progress.push(progress),
+ },
+ Task::FetchWorkspace(progress) => {
+ let (state, msg) = match progress {
+ ProjectWorkspaceProgress::Begin => (Progress::Begin, None),
+ ProjectWorkspaceProgress::Report(msg) => (Progress::Report, Some(msg)),
+ ProjectWorkspaceProgress::End(workspaces) => {
+ self.fetch_workspaces_queue.op_completed(workspaces);
+
+ let old = Arc::clone(&self.workspaces);
+ self.switch_workspaces("fetched workspace".to_string());
+ let workspaces_updated = !Arc::ptr_eq(&old, &self.workspaces);
+
+ if self.config.run_build_scripts() && workspaces_updated {
+ self.fetch_build_data_queue.request_op(format!("workspace updated"));
+ }
+
+ (Progress::End, None)
+ }
+ };
+
+ self.report_progress("Fetching", state, msg, None);
+ }
+ Task::FetchBuildData(progress) => {
+ let (state, msg) = match progress {
+ BuildDataProgress::Begin => (Some(Progress::Begin), None),
+ BuildDataProgress::Report(msg) => (Some(Progress::Report), Some(msg)),
+ BuildDataProgress::End(build_data_result) => {
+ self.fetch_build_data_queue.op_completed(build_data_result);
+
+ self.switch_workspaces("fetched build data".to_string());
+
+ (Some(Progress::End), None)
+ }
+ };
+
+ if let Some(state) = state {
+ self.report_progress("Loading", state, msg, None);
+ }
+ }
+ }
+ }
+
+ fn handle_vfs_msg(&mut self, message: vfs::loader::Message) {
+ match message {
+ vfs::loader::Message::Loaded { files } => {
+ let vfs = &mut self.vfs.write().0;
+ for (path, contents) in files {
+ let path = VfsPath::from(path);
+ if !self.mem_docs.contains(&path) {
+ vfs.set_file_contents(path, contents);
+ }
+ }
+ }
+ vfs::loader::Message::Progress { n_total, n_done, config_version } => {
+ always!(config_version <= self.vfs_config_version);
+
+ self.vfs_progress_config_version = config_version;
+ self.vfs_progress_n_total = n_total;
+ self.vfs_progress_n_done = n_done;
+
+ let state = if n_done == 0 {
+ Progress::Begin
+ } else if n_done < n_total {
+ Progress::Report
+ } else {
+ assert_eq!(n_done, n_total);
+ Progress::End
+ };
+ self.report_progress(
+ "Roots Scanned",
+ state,
+ Some(format!("{}/{}", n_done, n_total)),
+ Some(Progress::fraction(n_done, n_total)),
+ )
+ }
+ }
+ }
+
+ fn handle_flycheck_msg(&mut self, message: flycheck::Message) {
+ match message {
+ flycheck::Message::AddDiagnostic { id, workspace_root, diagnostic } => {
+ let snap = self.snapshot();
+ let diagnostics = crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp(
+ &self.config.diagnostics_map(),
+ &diagnostic,
+ &workspace_root,
+ &snap,
+ );
+ for diag in diagnostics {
+ match url_to_file_id(&self.vfs.read().0, &diag.url) {
+ Ok(file_id) => self.diagnostics.add_check_diagnostic(
+ id,
+ file_id,
+ diag.diagnostic,
+ diag.fix,
+ ),
+ Err(err) => {
+ tracing::error!("File with cargo diagnostic not found in VFS: {}", err);
+ }
+ };
+ }
+ }
+
+ flycheck::Message::Progress { id, progress } => {
+ let (state, message) = match progress {
+ flycheck::Progress::DidStart => {
+ self.diagnostics.clear_check(id);
+ (Progress::Begin, None)
+ }
+ flycheck::Progress::DidCheckCrate(target) => (Progress::Report, Some(target)),
+ flycheck::Progress::DidCancel => (Progress::End, None),
+ flycheck::Progress::DidFailToRestart(err) => {
+ self.show_and_log_error(
+ "cargo check failed".to_string(),
+ Some(err.to_string()),
+ );
+ return;
+ }
+ flycheck::Progress::DidFinish(result) => {
+ if let Err(err) = result {
+ self.show_and_log_error(
+ "cargo check failed".to_string(),
+ Some(err.to_string()),
+ );
+ }
+ (Progress::End, None)
+ }
+ };
+
+ // When we're running multiple flychecks, we have to include a disambiguator in
+ // the title, or the editor complains. Note that this is a user-facing string.
+ let title = if self.flycheck.len() == 1 {
+ match self.config.flycheck() {
+ Some(config) => format!("{}", config),
+ None => "cargo check".to_string(),
+ }
+ } else {
+ format!("cargo check (#{})", id + 1)
+ };
+ self.report_progress(&title, state, message, None);
+ }
+ }
+ }
+
+ /// Registers and handles a request. This should only be called once per incoming request.
fn on_new_request(&mut self, request_received: Instant, req: Request) {
self.register_request(&req, request_received);
self.on_request(req);
}
+ /// Handles a request.
fn on_request(&mut self, req: Request) {
if self.shutdown_requested {
self.respond(lsp_server::Response::new_err(
@@ -602,6 +622,7 @@ impl GlobalState {
.on_sync_mut::<lsp_ext::ReloadWorkspace>(handlers::handle_workspace_reload)
.on_sync_mut::<lsp_ext::MemoryUsage>(handlers::handle_memory_usage)
.on_sync_mut::<lsp_ext::ShuffleCrateGraph>(handlers::handle_shuffle_crate_graph)
+ .on_sync_mut::<lsp_ext::CancelFlycheck>(handlers::handle_cancel_flycheck)
.on_sync::<lsp_ext::JoinLines>(handlers::handle_join_lines)
.on_sync::<lsp_ext::OnEnter>(handlers::handle_on_enter)
.on_sync::<lsp_types::request::SelectionRangeRequest>(handlers::handle_selection_range)
@@ -664,6 +685,7 @@ impl GlobalState {
.finish();
}
+ /// Handles an incoming notification.
fn on_notification(&mut self, not: Notification) -> Result<()> {
NotificationDispatcher { not: Some(not), global_state: self }
.on::<lsp_types::notification::Cancel>(|this, params| {
@@ -734,13 +756,82 @@ impl GlobalState {
Ok(())
})?
.on::<lsp_types::notification::DidSaveTextDocument>(|this, params| {
- for flycheck in &this.flycheck {
- flycheck.update();
+ let mut updated = false;
+ if let Ok(vfs_path) = from_proto::vfs_path(&params.text_document.uri) {
+ let (vfs, _) = &*this.vfs.read();
+
+ // Trigger flychecks for all workspaces that depend on the saved file
+ if let Some(file_id) = vfs.file_id(&vfs_path) {
+ let analysis = this.analysis_host.analysis();
+ // Crates containing or depending on the saved file
+ let crate_ids: Vec<_> = analysis
+ .crate_for(file_id)?
+ .into_iter()
+ .flat_map(|id| {
+ this.analysis_host
+ .raw_database()
+ .crate_graph()
+ .transitive_rev_deps(id)
+ })
+ .sorted()
+ .unique()
+ .collect();
+
+ let crate_root_paths: Vec<_> = crate_ids
+ .iter()
+ .filter_map(|&crate_id| {
+ analysis
+ .crate_root(crate_id)
+ .map(|file_id| {
+ vfs.file_path(file_id).as_path().map(ToOwned::to_owned)
+ })
+ .transpose()
+ })
+ .collect::<ide::Cancellable<_>>()?;
+ let crate_root_paths: Vec<_> =
+ crate_root_paths.iter().map(Deref::deref).collect();
+
+ // Find all workspaces that have at least one target containing the saved file
+ let workspace_ids =
+ this.workspaces.iter().enumerate().filter(|(_, ws)| match ws {
+ project_model::ProjectWorkspace::Cargo { cargo, .. } => {
+ cargo.packages().any(|pkg| {
+ cargo[pkg].targets.iter().any(|&it| {
+ crate_root_paths.contains(&cargo[it].root.as_path())
+ })
+ })
+ }
+ project_model::ProjectWorkspace::Json { project, .. } => project
+ .crates()
+ .any(|(c, _)| crate_ids.iter().any(|&crate_id| crate_id == c)),
+ project_model::ProjectWorkspace::DetachedFiles { .. } => false,
+ });
+
+ // Find and trigger corresponding flychecks
+ for flycheck in &this.flycheck {
+ for (id, _) in workspace_ids.clone() {
+ if id == flycheck.id() {
+ updated = true;
+ flycheck.restart();
+ continue;
+ }
+ }
+ }
+ }
+
+ // Re-fetch workspaces if a workspace related file has changed
+ if let Some(abs_path) = vfs_path.as_path() {
+ if reload::should_refresh_for_change(&abs_path, ChangeKind::Modify) {
+ this.fetch_workspaces_queue
+ .request_op(format!("DidSaveTextDocument {}", abs_path.display()));
+ }
+ }
}
- if let Ok(abs_path) = from_proto::abs_path(&params.text_document.uri) {
- if reload::should_refresh_for_change(&abs_path, ChangeKind::Modify) {
- this.fetch_workspaces_queue
- .request_op(format!("DidSaveTextDocument {}", abs_path.display()));
+
+ // No specific flycheck was triggered, so let's trigger all of them.
+ if !updated {
+ for flycheck in &this.flycheck {
+ flycheck.restart();
}
}
Ok(())