fix: Fix DidSaveDocument requests blocking the server on startup

This commit is contained in:
Lukas Wirth 2022-10-20 19:28:28 +02:00
parent f3cce5feea
commit de195ff97c
7 changed files with 132 additions and 83 deletions

View File

@ -486,6 +486,16 @@ pub fn crates_for(&self, file_id: FileId) -> Cancellable<Vec<CrateId>> {
self.with_db(|db| parent_module::crates_for(db, file_id)) self.with_db(|db| parent_module::crates_for(db, file_id))
} }
/// Returns crates this file belongs too.
pub fn transitive_rev_deps(&self, crate_id: CrateId) -> Cancellable<Vec<CrateId>> {
self.with_db(|db| db.crate_graph().transitive_rev_deps(crate_id).collect())
}
/// Returns crates this file *might* belong too.
pub fn relevant_crates_for(&self, file_id: FileId) -> Cancellable<Vec<CrateId>> {
self.with_db(|db| db.relevant_crates(file_id).iter().copied().collect())
}
/// Returns the edition of the given crate. /// Returns the edition of the given crate.
pub fn crate_edition(&self, crate_id: CrateId) -> Cancellable<Edition> { pub fn crate_edition(&self, crate_id: CrateId) -> Cancellable<Edition> {
self.with_db(|db| db.crate_graph()[crate_id].edition) self.with_db(|db| db.crate_graph()[crate_id].edition)

View File

@ -1,8 +1,9 @@
use hir::Semantics; use hir::{db::DefDatabase, Semantics};
use ide_db::{ use ide_db::{
base_db::{CrateId, FileId, FileLoader, FilePosition}, base_db::{CrateId, FileId, FileLoader, FilePosition},
RootDatabase, RootDatabase,
}; };
use itertools::Itertools;
use syntax::{ use syntax::{
algo::find_node_at_offset, algo::find_node_at_offset,
ast::{self, AstNode}, ast::{self, AstNode},
@ -55,7 +56,12 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<Na
/// Returns `Vec` for the same reason as `parent_module` /// Returns `Vec` for the same reason as `parent_module`
pub(crate) fn crates_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> { pub(crate) fn crates_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> {
db.relevant_crates(file_id).iter().copied().collect() db.relevant_crates(file_id)
.iter()
.copied()
.filter(|&crate_id| db.crate_def_map(crate_id).modules_for_file(file_id).next().is_some())
.sorted()
.collect()
} }
#[cfg(test)] #[cfg(test)]

View File

@ -118,7 +118,11 @@ pub(crate) fn for_file(
global_state_snapshot: &GlobalStateSnapshot, global_state_snapshot: &GlobalStateSnapshot,
file_id: FileId, file_id: FileId,
) -> Result<Option<CargoTargetSpec>> { ) -> Result<Option<CargoTargetSpec>> {
let (cargo_ws, target) = match global_state_snapshot.cargo_target_for_file_id(file_id) { let crate_id = match &*global_state_snapshot.analysis.crates_for(file_id)? {
&[crate_id, ..] => crate_id,
_ => return Ok(None),
};
let (cargo_ws, target) = match global_state_snapshot.cargo_target_for_crate_root(crate_id) {
Some(it) => it, Some(it) => it,
None => return Ok(None), None => return Ok(None),
}; };

View File

@ -8,7 +8,7 @@
use crossbeam_channel::{unbounded, Receiver, Sender}; use crossbeam_channel::{unbounded, Receiver, Sender};
use flycheck::FlycheckHandle; use flycheck::FlycheckHandle;
use ide::{Analysis, AnalysisHost, Cancellable, Change, FileId}; use ide::{Analysis, AnalysisHost, Cancellable, Change, FileId};
use ide_db::base_db::{FileLoader, SourceDatabase}; use ide_db::base_db::{CrateId, FileLoader, SourceDatabase};
use lsp_types::{SemanticTokens, Url}; use lsp_types::{SemanticTokens, Url};
use parking_lot::{Mutex, RwLock}; use parking_lot::{Mutex, RwLock};
use proc_macro_api::ProcMacroServer; use proc_macro_api::ProcMacroServer;
@ -64,7 +64,7 @@ pub(crate) struct GlobalState {
pub(crate) source_root_config: SourceRootConfig, pub(crate) source_root_config: SourceRootConfig,
pub(crate) proc_macro_clients: Vec<Result<ProcMacroServer, String>>, pub(crate) proc_macro_clients: Vec<Result<ProcMacroServer, String>>,
pub(crate) flycheck: Vec<FlycheckHandle>, pub(crate) flycheck: Arc<[FlycheckHandle]>,
pub(crate) flycheck_sender: Sender<flycheck::Message>, pub(crate) flycheck_sender: Sender<flycheck::Message>,
pub(crate) flycheck_receiver: Receiver<flycheck::Message>, pub(crate) flycheck_receiver: Receiver<flycheck::Message>,
@ -117,6 +117,7 @@ pub(crate) struct GlobalStateSnapshot {
vfs: Arc<RwLock<(vfs::Vfs, NoHashHashMap<FileId, LineEndings>)>>, vfs: Arc<RwLock<(vfs::Vfs, NoHashHashMap<FileId, LineEndings>)>>,
pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>, pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
pub(crate) proc_macros_loaded: bool, pub(crate) proc_macros_loaded: bool,
pub(crate) flycheck: Arc<[FlycheckHandle]>,
} }
impl std::panic::UnwindSafe for GlobalStateSnapshot {} impl std::panic::UnwindSafe for GlobalStateSnapshot {}
@ -155,7 +156,7 @@ pub(crate) fn new(sender: Sender<lsp_server::Message>, config: Config) -> Global
source_root_config: SourceRootConfig::default(), source_root_config: SourceRootConfig::default(),
proc_macro_clients: vec![], proc_macro_clients: vec![],
flycheck: Vec::new(), flycheck: Arc::new([]),
flycheck_sender, flycheck_sender,
flycheck_receiver, flycheck_receiver,
@ -295,6 +296,7 @@ pub(crate) fn snapshot(&self) -> GlobalStateSnapshot {
mem_docs: self.mem_docs.clone(), mem_docs: self.mem_docs.clone(),
semantic_tokens_cache: Arc::clone(&self.semantic_tokens_cache), semantic_tokens_cache: Arc::clone(&self.semantic_tokens_cache),
proc_macros_loaded: !self.fetch_build_data_queue.last_op_result().0.is_empty(), proc_macros_loaded: !self.fetch_build_data_queue.last_op_result().0.is_empty(),
flycheck: self.flycheck.clone(),
} }
} }
@ -398,10 +400,15 @@ pub(crate) fn anchored_path(&self, path: &AnchoredPathBuf) -> Url {
url_from_abs_path(path) url_from_abs_path(path)
} }
pub(crate) fn cargo_target_for_file_id( pub(crate) fn file_id_to_file_path(&self, file_id: FileId) -> vfs::VfsPath {
self.vfs.read().0.file_path(file_id)
}
pub(crate) fn cargo_target_for_crate_root(
&self, &self,
file_id: FileId, crate_id: CrateId,
) -> Option<(&CargoWorkspace, Target)> { ) -> Option<(&CargoWorkspace, Target)> {
let file_id = self.analysis.crate_root(crate_id).ok()?;
let path = self.vfs.read().0.file_path(file_id); let path = self.vfs.read().0.file_path(file_id);
let path = path.as_path()?; let path = path.as_path()?;
self.workspaces.iter().find_map(|ws| match ws { self.workspaces.iter().find_map(|ws| match ws {

View File

@ -1782,7 +1782,15 @@ fn run_rustfmt(
) -> Result<Option<Vec<lsp_types::TextEdit>>> { ) -> Result<Option<Vec<lsp_types::TextEdit>>> {
let file_id = from_proto::file_id(snap, &text_document.uri)?; let file_id = from_proto::file_id(snap, &text_document.uri)?;
let file = snap.analysis.file_text(file_id)?; let file = snap.analysis.file_text(file_id)?;
let crate_ids = snap.analysis.crates_for(file_id)?;
// find the edition of the package the file belongs to
// (if it belongs to multiple we'll just pick the first one and pray)
let edition = snap
.analysis
.relevant_crates_for(file_id)?
.into_iter()
.find_map(|crate_id| snap.cargo_target_for_crate_root(crate_id))
.map(|(ws, target)| ws[ws[target].package].edition);
let line_index = snap.file_line_index(file_id)?; let line_index = snap.file_line_index(file_id)?;
@ -1808,9 +1816,7 @@ fn run_rustfmt(
); );
} }
} }
if let Some(&crate_id) = crate_ids.first() { if let Some(edition) = edition {
// Assume all crates are in the same edition
let edition = snap.analysis.crate_edition(crate_id)?;
cmd.arg("--edition"); cmd.arg("--edition");
cmd.arg(edition.to_string()); cmd.arg(edition.to_string());
} }

View File

@ -10,7 +10,7 @@
use always_assert::always; use always_assert::always;
use crossbeam_channel::{select, Receiver}; use crossbeam_channel::{select, Receiver};
use flycheck::FlycheckHandle; use flycheck::FlycheckHandle;
use ide_db::base_db::{SourceDatabase, SourceDatabaseExt, VfsPath}; use ide_db::base_db::{SourceDatabaseExt, VfsPath};
use itertools::Itertools; use itertools::Itertools;
use lsp_server::{Connection, Notification, Request}; use lsp_server::{Connection, Notification, Request};
use lsp_types::notification::Notification as _; use lsp_types::notification::Notification as _;
@ -191,7 +191,7 @@ fn handle_event(&mut self, event: Event) -> Result<()> {
// NOTE: don't count blocking select! call as a loop-turn time // NOTE: don't count blocking select! call as a loop-turn time
let _p = profile::span("GlobalState::handle_event"); let _p = profile::span("GlobalState::handle_event");
tracing::debug!("handle_event({:?})", event); tracing::debug!("{:?} handle_event({:?})", loop_start, event);
let task_queue_len = self.task_pool.handle.len(); let task_queue_len = self.task_pool.handle.len();
if task_queue_len > 0 { if task_queue_len > 0 {
tracing::info!("task queue len: {}", task_queue_len); tracing::info!("task queue len: {}", task_queue_len);
@ -727,7 +727,7 @@ fn on_notification(&mut self, not: Notification) -> Result<()> {
.insert(path.clone(), DocumentData::new(params.text_document.version)) .insert(path.clone(), DocumentData::new(params.text_document.version))
.is_err(); .is_err();
if already_exists { if already_exists {
tracing::error!("duplicate DidOpenTextDocument: {}", path) tracing::error!("duplicate DidOpenTextDocument: {}", path);
} }
this.vfs this.vfs
.write() .write()
@ -774,69 +774,7 @@ fn on_notification(&mut self, not: Notification) -> Result<()> {
Ok(()) Ok(())
})? })?
.on::<lsp_types::notification::DidSaveTextDocument>(|this, params| { .on::<lsp_types::notification::DidSaveTextDocument>(|this, params| {
let mut updated = false;
if let Ok(vfs_path) = from_proto::vfs_path(&params.text_document.uri) { if let Ok(vfs_path) = from_proto::vfs_path(&params.text_document.uri) {
let (vfs, _) = &*this.vfs.read();
// Trigger flychecks for all workspaces that depend on the saved file
if let Some(file_id) = vfs.file_id(&vfs_path) {
let analysis = this.analysis_host.analysis();
// Crates containing or depending on the saved file
let crate_ids: Vec<_> = analysis
.crates_for(file_id)?
.into_iter()
.flat_map(|id| {
this.analysis_host
.raw_database()
.crate_graph()
.transitive_rev_deps(id)
})
.sorted()
.unique()
.collect();
let crate_root_paths: Vec<_> = crate_ids
.iter()
.filter_map(|&crate_id| {
analysis
.crate_root(crate_id)
.map(|file_id| {
vfs.file_path(file_id).as_path().map(ToOwned::to_owned)
})
.transpose()
})
.collect::<ide::Cancellable<_>>()?;
let crate_root_paths: Vec<_> =
crate_root_paths.iter().map(Deref::deref).collect();
// Find all workspaces that have at least one target containing the saved file
let workspace_ids =
this.workspaces.iter().enumerate().filter(|(_, ws)| match ws {
project_model::ProjectWorkspace::Cargo { cargo, .. } => {
cargo.packages().any(|pkg| {
cargo[pkg].targets.iter().any(|&it| {
crate_root_paths.contains(&cargo[it].root.as_path())
})
})
}
project_model::ProjectWorkspace::Json { project, .. } => project
.crates()
.any(|(c, _)| crate_ids.iter().any(|&crate_id| crate_id == c)),
project_model::ProjectWorkspace::DetachedFiles { .. } => false,
});
// Find and trigger corresponding flychecks
for flycheck in &this.flycheck {
for (id, _) in workspace_ids.clone() {
if id == flycheck.id() {
updated = true;
flycheck.restart();
continue;
}
}
}
}
// Re-fetch workspaces if a workspace related file has changed // Re-fetch workspaces if a workspace related file has changed
if let Some(abs_path) = vfs_path.as_path() { if let Some(abs_path) = vfs_path.as_path() {
if reload::should_refresh_for_change(&abs_path, ChangeKind::Modify) { if reload::should_refresh_for_change(&abs_path, ChangeKind::Modify) {
@ -844,13 +782,90 @@ fn on_notification(&mut self, not: Notification) -> Result<()> {
.request_op(format!("DidSaveTextDocument {}", abs_path.display())); .request_op(format!("DidSaveTextDocument {}", abs_path.display()));
} }
} }
let file_id = this.vfs.read().0.file_id(&vfs_path);
if let Some(file_id) = file_id {
let world = this.snapshot();
let mut updated = false;
let task = move || -> std::result::Result<(), ide::Cancelled> {
// Trigger flychecks for all workspaces that depend on the saved file
// Crates containing or depending on the saved file
let crate_ids: Vec<_> = world
.analysis
.crates_for(file_id)?
.into_iter()
.flat_map(|id| world.analysis.transitive_rev_deps(id))
.flatten()
.sorted()
.unique()
.collect();
let crate_root_paths: Vec<_> = crate_ids
.iter()
.filter_map(|&crate_id| {
world
.analysis
.crate_root(crate_id)
.map(|file_id| {
world
.file_id_to_file_path(file_id)
.as_path()
.map(ToOwned::to_owned)
})
.transpose()
})
.collect::<ide::Cancellable<_>>()?;
let crate_root_paths: Vec<_> =
crate_root_paths.iter().map(Deref::deref).collect();
// Find all workspaces that have at least one target containing the saved file
let workspace_ids =
world.workspaces.iter().enumerate().filter(|(_, ws)| match ws {
project_model::ProjectWorkspace::Cargo { cargo, .. } => {
cargo.packages().any(|pkg| {
cargo[pkg].targets.iter().any(|&it| {
crate_root_paths.contains(&cargo[it].root.as_path())
})
})
}
project_model::ProjectWorkspace::Json { project, .. } => {
project.crates().any(|(c, _)| {
crate_ids.iter().any(|&crate_id| crate_id == c)
})
}
project_model::ProjectWorkspace::DetachedFiles { .. } => false,
});
// Find and trigger corresponding flychecks
for flycheck in world.flycheck.iter() {
for (id, _) in workspace_ids.clone() {
if id == flycheck.id() {
updated = true;
flycheck.restart();
continue;
}
}
}
// No specific flycheck was triggered, so let's trigger all of them.
if !updated {
for flycheck in world.flycheck.iter() {
flycheck.restart();
}
}
Ok(())
};
this.task_pool.handle.spawn_with_sender(move |_| {
if let Err(e) = std::panic::catch_unwind(task) {
tracing::error!("DidSaveTextDocument flycheck task panicked: {e:?}")
}
});
return Ok(());
}
} }
// No specific flycheck was triggered, so let's trigger all of them. // No specific flycheck was triggered, so let's trigger all of them.
if !updated { for flycheck in this.flycheck.iter() {
for flycheck in &this.flycheck { flycheck.restart();
flycheck.restart();
}
} }
Ok(()) Ok(())
})? })?

View File

@ -466,7 +466,7 @@ fn reload_flycheck(&mut self) {
let config = match self.config.flycheck() { let config = match self.config.flycheck() {
Some(it) => it, Some(it) => it,
None => { None => {
self.flycheck = Vec::new(); self.flycheck = Arc::new([]);
self.diagnostics.clear_check_all(); self.diagnostics.clear_check_all();
return; return;
} }
@ -510,7 +510,8 @@ fn reload_flycheck(&mut self) {
}) })
.collect() .collect()
} }
}; }
.into();
} }
} }