Auto merge of #14710 - jhgg:chore/refactor-notification-handlers, r=Veykril
chore: rust-analyzer: refactor notification handlers Fixes the FIXME in `on_notification`. ```rust // FIXME: Move these implementations out into a module similar to on_request ``` No code has changed, this just moves stuff around.
This commit is contained in:
commit
98e76bd90f
File diff suppressed because it is too large
Load Diff
339
crates/rust-analyzer/src/handlers/notification.rs
Normal file
339
crates/rust-analyzer/src/handlers/notification.rs
Normal file
@ -0,0 +1,339 @@
|
||||
//! This module is responsible for implementing handlers for Language Server
|
||||
//! Protocol. This module specifically handles notifications.
|
||||
|
||||
use std::{ops::Deref, sync::Arc};
|
||||
|
||||
use itertools::Itertools;
|
||||
use lsp_types::{
|
||||
CancelParams, DidChangeConfigurationParams, DidChangeTextDocumentParams,
|
||||
DidChangeWatchedFilesParams, DidChangeWorkspaceFoldersParams, DidCloseTextDocumentParams,
|
||||
DidOpenTextDocumentParams, DidSaveTextDocumentParams, WorkDoneProgressCancelParams,
|
||||
};
|
||||
use vfs::{AbsPathBuf, ChangeKind, VfsPath};
|
||||
|
||||
use crate::{
|
||||
config::Config, from_proto, global_state::GlobalState, lsp_ext::RunFlycheckParams,
|
||||
lsp_utils::apply_document_changes, mem_docs::DocumentData, reload, Result,
|
||||
};
|
||||
|
||||
pub(crate) fn handle_cancel(state: &mut GlobalState, params: CancelParams) -> Result<()> {
|
||||
let id: lsp_server::RequestId = match params.id {
|
||||
lsp_types::NumberOrString::Number(id) => id.into(),
|
||||
lsp_types::NumberOrString::String(id) => id.into(),
|
||||
};
|
||||
state.cancel(id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn handle_work_done_progress_cancel(
|
||||
state: &mut GlobalState,
|
||||
params: WorkDoneProgressCancelParams,
|
||||
) -> Result<()> {
|
||||
if let lsp_types::NumberOrString::String(s) = ¶ms.token {
|
||||
if let Some(id) = s.strip_prefix("rust-analyzer/flycheck/") {
|
||||
if let Ok(id) = u32::from_str_radix(id, 10) {
|
||||
if let Some(flycheck) = state.flycheck.get(id as usize) {
|
||||
flycheck.cancel();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Just ignore this. It is OK to continue sending progress
|
||||
// notifications for this token, as the client can't know when
|
||||
// we accepted notification.
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn handle_did_open_text_document(
|
||||
state: &mut GlobalState,
|
||||
params: DidOpenTextDocumentParams,
|
||||
) -> Result<()> {
|
||||
let _p = profile::span("handle_did_open_text_document");
|
||||
|
||||
if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) {
|
||||
let already_exists = state
|
||||
.mem_docs
|
||||
.insert(path.clone(), DocumentData::new(params.text_document.version))
|
||||
.is_err();
|
||||
if already_exists {
|
||||
tracing::error!("duplicate DidOpenTextDocument: {}", path);
|
||||
}
|
||||
state.vfs.write().0.set_file_contents(path, Some(params.text_document.text.into_bytes()));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn handle_did_change_text_document(
|
||||
state: &mut GlobalState,
|
||||
params: DidChangeTextDocumentParams,
|
||||
) -> Result<()> {
|
||||
let _p = profile::span("handle_did_change_text_document");
|
||||
|
||||
if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) {
|
||||
match state.mem_docs.get_mut(&path) {
|
||||
Some(doc) => {
|
||||
// The version passed in DidChangeTextDocument is the version after all edits are applied
|
||||
// so we should apply it before the vfs is notified.
|
||||
doc.version = params.text_document.version;
|
||||
}
|
||||
None => {
|
||||
tracing::error!("unexpected DidChangeTextDocument: {}", path);
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
|
||||
let vfs = &mut state.vfs.write().0;
|
||||
let file_id = vfs.file_id(&path).unwrap();
|
||||
let text = apply_document_changes(
|
||||
state.config.position_encoding(),
|
||||
|| std::str::from_utf8(vfs.file_contents(file_id)).unwrap().into(),
|
||||
params.content_changes,
|
||||
);
|
||||
|
||||
vfs.set_file_contents(path, Some(text.into_bytes()));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn handle_did_close_text_document(
|
||||
state: &mut GlobalState,
|
||||
params: DidCloseTextDocumentParams,
|
||||
) -> Result<()> {
|
||||
let _p = profile::span("handle_did_close_text_document");
|
||||
|
||||
if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) {
|
||||
if state.mem_docs.remove(&path).is_err() {
|
||||
tracing::error!("orphan DidCloseTextDocument: {}", path);
|
||||
}
|
||||
|
||||
state.semantic_tokens_cache.lock().remove(¶ms.text_document.uri);
|
||||
|
||||
if let Some(path) = path.as_path() {
|
||||
state.loader.handle.invalidate(path.to_path_buf());
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn handle_did_save_text_document(
|
||||
state: &mut GlobalState,
|
||||
params: DidSaveTextDocumentParams,
|
||||
) -> Result<()> {
|
||||
if let Ok(vfs_path) = from_proto::vfs_path(¶ms.text_document.uri) {
|
||||
// Re-fetch workspaces if a workspace related file has changed
|
||||
if let Some(abs_path) = vfs_path.as_path() {
|
||||
if reload::should_refresh_for_change(abs_path, ChangeKind::Modify) {
|
||||
state
|
||||
.fetch_workspaces_queue
|
||||
.request_op(format!("DidSaveTextDocument {}", abs_path.display()), ());
|
||||
}
|
||||
}
|
||||
|
||||
if !state.config.check_on_save() || run_flycheck(state, vfs_path) {
|
||||
return Ok(());
|
||||
}
|
||||
} else if state.config.check_on_save() {
|
||||
// No specific flycheck was triggered, so let's trigger all of them.
|
||||
for flycheck in state.flycheck.iter() {
|
||||
flycheck.restart();
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn handle_did_change_configuration(
|
||||
state: &mut GlobalState,
|
||||
_params: DidChangeConfigurationParams,
|
||||
) -> Result<()> {
|
||||
// As stated in https://github.com/microsoft/language-server-protocol/issues/676,
|
||||
// this notification's parameters should be ignored and the actual config queried separately.
|
||||
state.send_request::<lsp_types::request::WorkspaceConfiguration>(
|
||||
lsp_types::ConfigurationParams {
|
||||
items: vec![lsp_types::ConfigurationItem {
|
||||
scope_uri: None,
|
||||
section: Some("rust-analyzer".to_string()),
|
||||
}],
|
||||
},
|
||||
|this, resp| {
|
||||
tracing::debug!("config update response: '{:?}", resp);
|
||||
let lsp_server::Response { error, result, .. } = resp;
|
||||
|
||||
match (error, result) {
|
||||
(Some(err), _) => {
|
||||
tracing::error!("failed to fetch the server settings: {:?}", err)
|
||||
}
|
||||
(None, Some(mut configs)) => {
|
||||
if let Some(json) = configs.get_mut(0) {
|
||||
// Note that json can be null according to the spec if the client can't
|
||||
// provide a configuration. This is handled in Config::update below.
|
||||
let mut config = Config::clone(&*this.config);
|
||||
if let Err(error) = config.update(json.take()) {
|
||||
this.show_message(
|
||||
lsp_types::MessageType::WARNING,
|
||||
error.to_string(),
|
||||
false,
|
||||
);
|
||||
}
|
||||
this.update_configuration(config);
|
||||
}
|
||||
}
|
||||
(None, None) => {
|
||||
tracing::error!("received empty server settings response from the client")
|
||||
}
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn handle_did_change_workspace_folders(
|
||||
state: &mut GlobalState,
|
||||
params: DidChangeWorkspaceFoldersParams,
|
||||
) -> Result<()> {
|
||||
let config = Arc::make_mut(&mut state.config);
|
||||
|
||||
for workspace in params.event.removed {
|
||||
let Ok(path) = workspace.uri.to_file_path() else { continue };
|
||||
let Ok(path) = AbsPathBuf::try_from(path) else { continue };
|
||||
config.remove_workspace(&path);
|
||||
}
|
||||
|
||||
let added = params
|
||||
.event
|
||||
.added
|
||||
.into_iter()
|
||||
.filter_map(|it| it.uri.to_file_path().ok())
|
||||
.filter_map(|it| AbsPathBuf::try_from(it).ok());
|
||||
config.add_workspaces(added);
|
||||
|
||||
if !config.has_linked_projects() && config.detached_files().is_empty() {
|
||||
config.rediscover_workspaces();
|
||||
state.fetch_workspaces_queue.request_op("client workspaces changed".to_string(), ())
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn handle_did_change_watched_files(
|
||||
state: &mut GlobalState,
|
||||
params: DidChangeWatchedFilesParams,
|
||||
) -> Result<()> {
|
||||
for change in params.changes {
|
||||
if let Ok(path) = from_proto::abs_path(&change.uri) {
|
||||
state.loader.handle.invalidate(path);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool {
|
||||
let _p = profile::span("run_flycheck");
|
||||
|
||||
let file_id = state.vfs.read().0.file_id(&vfs_path);
|
||||
if let Some(file_id) = file_id {
|
||||
let world = state.snapshot();
|
||||
let mut updated = false;
|
||||
let task = move || -> std::result::Result<(), ide::Cancelled> {
|
||||
// Trigger flychecks for all workspaces that depend on the saved file
|
||||
// Crates containing or depending on the saved file
|
||||
let crate_ids: Vec<_> = world
|
||||
.analysis
|
||||
.crates_for(file_id)?
|
||||
.into_iter()
|
||||
.flat_map(|id| world.analysis.transitive_rev_deps(id))
|
||||
.flatten()
|
||||
.sorted()
|
||||
.unique()
|
||||
.collect();
|
||||
|
||||
let crate_root_paths: Vec<_> = crate_ids
|
||||
.iter()
|
||||
.filter_map(|&crate_id| {
|
||||
world
|
||||
.analysis
|
||||
.crate_root(crate_id)
|
||||
.map(|file_id| {
|
||||
world.file_id_to_file_path(file_id).as_path().map(ToOwned::to_owned)
|
||||
})
|
||||
.transpose()
|
||||
})
|
||||
.collect::<ide::Cancellable<_>>()?;
|
||||
let crate_root_paths: Vec<_> = crate_root_paths.iter().map(Deref::deref).collect();
|
||||
|
||||
// Find all workspaces that have at least one target containing the saved file
|
||||
let workspace_ids = world.workspaces.iter().enumerate().filter(|(_, ws)| match ws {
|
||||
project_model::ProjectWorkspace::Cargo { cargo, .. } => {
|
||||
cargo.packages().any(|pkg| {
|
||||
cargo[pkg]
|
||||
.targets
|
||||
.iter()
|
||||
.any(|&it| crate_root_paths.contains(&cargo[it].root.as_path()))
|
||||
})
|
||||
}
|
||||
project_model::ProjectWorkspace::Json { project, .. } => {
|
||||
project.crates().any(|(c, _)| crate_ids.iter().any(|&crate_id| crate_id == c))
|
||||
}
|
||||
project_model::ProjectWorkspace::DetachedFiles { .. } => false,
|
||||
});
|
||||
|
||||
// Find and trigger corresponding flychecks
|
||||
for flycheck in world.flycheck.iter() {
|
||||
for (id, _) in workspace_ids.clone() {
|
||||
if id == flycheck.id() {
|
||||
updated = true;
|
||||
flycheck.restart();
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
// No specific flycheck was triggered, so let's trigger all of them.
|
||||
if !updated {
|
||||
for flycheck in world.flycheck.iter() {
|
||||
flycheck.restart();
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
};
|
||||
state.task_pool.handle.spawn_with_sender(move |_| {
|
||||
if let Err(e) = std::panic::catch_unwind(task) {
|
||||
tracing::error!("flycheck task panicked: {e:?}")
|
||||
}
|
||||
});
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn handle_cancel_flycheck(state: &mut GlobalState, _: ()) -> Result<()> {
|
||||
let _p = profile::span("handle_stop_flycheck");
|
||||
state.flycheck.iter().for_each(|flycheck| flycheck.cancel());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn handle_clear_flycheck(state: &mut GlobalState, _: ()) -> Result<()> {
|
||||
let _p = profile::span("handle_clear_flycheck");
|
||||
state.diagnostics.clear_check_all();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn handle_run_flycheck(
|
||||
state: &mut GlobalState,
|
||||
params: RunFlycheckParams,
|
||||
) -> Result<()> {
|
||||
let _p = profile::span("handle_run_flycheck");
|
||||
if let Some(text_document) = params.text_document {
|
||||
if let Ok(vfs_path) = from_proto::vfs_path(&text_document.uri) {
|
||||
if run_flycheck(state, vfs_path) {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
// No specific flycheck was triggered, so let's trigger all of them.
|
||||
for flycheck in state.flycheck.iter() {
|
||||
flycheck.restart();
|
||||
}
|
||||
Ok(())
|
||||
}
|
1883
crates/rust-analyzer/src/handlers/request.rs
Normal file
1883
crates/rust-analyzer/src/handlers/request.rs
Normal file
File diff suppressed because it is too large
Load Diff
@ -2,7 +2,6 @@
|
||||
//! requests/replies and notifications back to the client.
|
||||
use std::{
|
||||
fmt,
|
||||
ops::Deref,
|
||||
sync::Arc,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
@ -11,20 +10,18 @@
|
||||
use crossbeam_channel::{select, Receiver};
|
||||
use flycheck::FlycheckHandle;
|
||||
use ide_db::base_db::{SourceDatabaseExt, VfsPath};
|
||||
use itertools::Itertools;
|
||||
use lsp_server::{Connection, Notification, Request};
|
||||
use lsp_types::notification::Notification as _;
|
||||
use vfs::{AbsPathBuf, ChangeKind, FileId};
|
||||
use vfs::FileId;
|
||||
|
||||
use crate::{
|
||||
config::Config,
|
||||
dispatch::{NotificationDispatcher, RequestDispatcher},
|
||||
from_proto,
|
||||
global_state::{file_id_to_url, url_to_file_id, GlobalState},
|
||||
handlers, lsp_ext,
|
||||
lsp_utils::{apply_document_changes, notification_is, Progress},
|
||||
mem_docs::DocumentData,
|
||||
reload::{self, BuildDataProgress, ProcMacroProgress, ProjectWorkspaceProgress},
|
||||
lsp_ext,
|
||||
lsp_utils::{notification_is, Progress},
|
||||
reload::{BuildDataProgress, ProcMacroProgress, ProjectWorkspaceProgress},
|
||||
Result,
|
||||
};
|
||||
|
||||
@ -652,6 +649,8 @@ fn on_request(&mut self, req: Request) {
|
||||
_ => (),
|
||||
}
|
||||
|
||||
use crate::handlers::request as handlers;
|
||||
|
||||
dispatcher
|
||||
.on_sync_mut::<lsp_ext::ReloadWorkspace>(handlers::handle_workspace_reload)
|
||||
.on_sync_mut::<lsp_ext::RebuildProcMacros>(handlers::handle_proc_macros_rebuild)
|
||||
@ -723,284 +722,22 @@ fn on_request(&mut self, req: Request) {
|
||||
|
||||
/// Handles an incoming notification.
|
||||
fn on_notification(&mut self, not: Notification) -> Result<()> {
|
||||
// FIXME: Move these implementations out into a module similar to on_request
|
||||
fn run_flycheck(this: &mut GlobalState, vfs_path: VfsPath) -> bool {
|
||||
let file_id = this.vfs.read().0.file_id(&vfs_path);
|
||||
if let Some(file_id) = file_id {
|
||||
let world = this.snapshot();
|
||||
let mut updated = false;
|
||||
let task = move || -> std::result::Result<(), ide::Cancelled> {
|
||||
// Trigger flychecks for all workspaces that depend on the saved file
|
||||
// Crates containing or depending on the saved file
|
||||
let crate_ids: Vec<_> = world
|
||||
.analysis
|
||||
.crates_for(file_id)?
|
||||
.into_iter()
|
||||
.flat_map(|id| world.analysis.transitive_rev_deps(id))
|
||||
.flatten()
|
||||
.sorted()
|
||||
.unique()
|
||||
.collect();
|
||||
|
||||
let crate_root_paths: Vec<_> = crate_ids
|
||||
.iter()
|
||||
.filter_map(|&crate_id| {
|
||||
world
|
||||
.analysis
|
||||
.crate_root(crate_id)
|
||||
.map(|file_id| {
|
||||
world
|
||||
.file_id_to_file_path(file_id)
|
||||
.as_path()
|
||||
.map(ToOwned::to_owned)
|
||||
})
|
||||
.transpose()
|
||||
})
|
||||
.collect::<ide::Cancellable<_>>()?;
|
||||
let crate_root_paths: Vec<_> =
|
||||
crate_root_paths.iter().map(Deref::deref).collect();
|
||||
|
||||
// Find all workspaces that have at least one target containing the saved file
|
||||
let workspace_ids =
|
||||
world.workspaces.iter().enumerate().filter(|(_, ws)| match ws {
|
||||
project_model::ProjectWorkspace::Cargo { cargo, .. } => {
|
||||
cargo.packages().any(|pkg| {
|
||||
cargo[pkg].targets.iter().any(|&it| {
|
||||
crate_root_paths.contains(&cargo[it].root.as_path())
|
||||
})
|
||||
})
|
||||
}
|
||||
project_model::ProjectWorkspace::Json { project, .. } => project
|
||||
.crates()
|
||||
.any(|(c, _)| crate_ids.iter().any(|&crate_id| crate_id == c)),
|
||||
project_model::ProjectWorkspace::DetachedFiles { .. } => false,
|
||||
});
|
||||
|
||||
// Find and trigger corresponding flychecks
|
||||
for flycheck in world.flycheck.iter() {
|
||||
for (id, _) in workspace_ids.clone() {
|
||||
if id == flycheck.id() {
|
||||
updated = true;
|
||||
flycheck.restart();
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
// No specific flycheck was triggered, so let's trigger all of them.
|
||||
if !updated {
|
||||
for flycheck in world.flycheck.iter() {
|
||||
flycheck.restart();
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
};
|
||||
this.task_pool.handle.spawn_with_sender(move |_| {
|
||||
if let Err(e) = std::panic::catch_unwind(task) {
|
||||
tracing::error!("flycheck task panicked: {e:?}")
|
||||
}
|
||||
});
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
use crate::handlers::notification as handlers;
|
||||
use lsp_types::notification as notifs;
|
||||
|
||||
NotificationDispatcher { not: Some(not), global_state: self }
|
||||
.on::<lsp_types::notification::Cancel>(|this, params| {
|
||||
let id: lsp_server::RequestId = match params.id {
|
||||
lsp_types::NumberOrString::Number(id) => id.into(),
|
||||
lsp_types::NumberOrString::String(id) => id.into(),
|
||||
};
|
||||
this.cancel(id);
|
||||
Ok(())
|
||||
})?
|
||||
.on::<lsp_types::notification::WorkDoneProgressCancel>(|this, params| {
|
||||
if let lsp_types::NumberOrString::String(s) = ¶ms.token {
|
||||
if let Some(id) = s.strip_prefix("rust-analyzer/flycheck/") {
|
||||
if let Ok(id) = u32::from_str_radix(id, 10) {
|
||||
if let Some(flycheck) = this.flycheck.get(id as usize) {
|
||||
flycheck.cancel();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Just ignore this. It is OK to continue sending progress
|
||||
// notifications for this token, as the client can't know when
|
||||
// we accepted notification.
|
||||
Ok(())
|
||||
})?
|
||||
.on::<lsp_types::notification::DidOpenTextDocument>(|this, params| {
|
||||
if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) {
|
||||
let already_exists = this
|
||||
.mem_docs
|
||||
.insert(path.clone(), DocumentData::new(params.text_document.version))
|
||||
.is_err();
|
||||
if already_exists {
|
||||
tracing::error!("duplicate DidOpenTextDocument: {}", path);
|
||||
}
|
||||
this.vfs
|
||||
.write()
|
||||
.0
|
||||
.set_file_contents(path, Some(params.text_document.text.into_bytes()));
|
||||
}
|
||||
Ok(())
|
||||
})?
|
||||
.on::<notifs::Cancel>(handlers::handle_cancel)?
|
||||
.on::<notifs::WorkDoneProgressCancel>(handlers::handle_work_done_progress_cancel)?
|
||||
.on::<notifs::DidOpenTextDocument>(handlers::handle_did_open_text_document)?
|
||||
.on::<notifs::DidChangeTextDocument>(handlers::handle_did_change_text_document)?
|
||||
.on::<notifs::DidCloseTextDocument>(handlers::handle_did_close_text_document)?
|
||||
.on::<notifs::DidSaveTextDocument>(handlers::handle_did_save_text_document)?
|
||||
.on::<notifs::DidChangeConfiguration>(handlers::handle_did_change_configuration)?
|
||||
.on::<notifs::DidChangeWorkspaceFolders>(handlers::handle_did_change_workspace_folders)?
|
||||
.on::<notifs::DidChangeWatchedFiles>(handlers::handle_did_change_watched_files)?
|
||||
.on::<lsp_ext::CancelFlycheck>(handlers::handle_cancel_flycheck)?
|
||||
.on::<lsp_types::notification::DidChangeTextDocument>(|this, params| {
|
||||
if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) {
|
||||
match this.mem_docs.get_mut(&path) {
|
||||
Some(doc) => {
|
||||
// The version passed in DidChangeTextDocument is the version after all edits are applied
|
||||
// so we should apply it before the vfs is notified.
|
||||
doc.version = params.text_document.version;
|
||||
}
|
||||
None => {
|
||||
tracing::error!("unexpected DidChangeTextDocument: {}", path);
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
|
||||
let vfs = &mut this.vfs.write().0;
|
||||
let file_id = vfs.file_id(&path).unwrap();
|
||||
let text = apply_document_changes(
|
||||
this.config.position_encoding(),
|
||||
|| std::str::from_utf8(vfs.file_contents(file_id)).unwrap().into(),
|
||||
params.content_changes,
|
||||
);
|
||||
|
||||
vfs.set_file_contents(path, Some(text.into_bytes()));
|
||||
}
|
||||
Ok(())
|
||||
})?
|
||||
.on::<lsp_types::notification::DidCloseTextDocument>(|this, params| {
|
||||
if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) {
|
||||
if this.mem_docs.remove(&path).is_err() {
|
||||
tracing::error!("orphan DidCloseTextDocument: {}", path);
|
||||
}
|
||||
|
||||
this.semantic_tokens_cache.lock().remove(¶ms.text_document.uri);
|
||||
|
||||
if let Some(path) = path.as_path() {
|
||||
this.loader.handle.invalidate(path.to_path_buf());
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
})?
|
||||
.on::<lsp_ext::ClearFlycheck>(|this, ()| {
|
||||
this.diagnostics.clear_check_all();
|
||||
Ok(())
|
||||
})?
|
||||
.on::<lsp_ext::RunFlycheck>(|this, params| {
|
||||
if let Some(text_document) = params.text_document {
|
||||
if let Ok(vfs_path) = from_proto::vfs_path(&text_document.uri) {
|
||||
if run_flycheck(this, vfs_path) {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
// No specific flycheck was triggered, so let's trigger all of them.
|
||||
for flycheck in this.flycheck.iter() {
|
||||
flycheck.restart();
|
||||
}
|
||||
Ok(())
|
||||
})?
|
||||
.on::<lsp_types::notification::DidSaveTextDocument>(|this, params| {
|
||||
if let Ok(vfs_path) = from_proto::vfs_path(¶ms.text_document.uri) {
|
||||
// Re-fetch workspaces if a workspace related file has changed
|
||||
if let Some(abs_path) = vfs_path.as_path() {
|
||||
if reload::should_refresh_for_change(abs_path, ChangeKind::Modify) {
|
||||
this.fetch_workspaces_queue.request_op(
|
||||
format!("DidSaveTextDocument {}", abs_path.display()),
|
||||
(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if !this.config.check_on_save() || run_flycheck(this, vfs_path) {
|
||||
return Ok(());
|
||||
}
|
||||
} else if this.config.check_on_save() {
|
||||
// No specific flycheck was triggered, so let's trigger all of them.
|
||||
for flycheck in this.flycheck.iter() {
|
||||
flycheck.restart();
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
})?
|
||||
.on::<lsp_types::notification::DidChangeConfiguration>(|this, _params| {
|
||||
// As stated in https://github.com/microsoft/language-server-protocol/issues/676,
|
||||
// this notification's parameters should be ignored and the actual config queried separately.
|
||||
this.send_request::<lsp_types::request::WorkspaceConfiguration>(
|
||||
lsp_types::ConfigurationParams {
|
||||
items: vec![lsp_types::ConfigurationItem {
|
||||
scope_uri: None,
|
||||
section: Some("rust-analyzer".to_string()),
|
||||
}],
|
||||
},
|
||||
|this, resp| {
|
||||
tracing::debug!("config update response: '{:?}", resp);
|
||||
let lsp_server::Response { error, result, .. } = resp;
|
||||
|
||||
match (error, result) {
|
||||
(Some(err), _) => {
|
||||
tracing::error!("failed to fetch the server settings: {:?}", err)
|
||||
}
|
||||
(None, Some(mut configs)) => {
|
||||
if let Some(json) = configs.get_mut(0) {
|
||||
// Note that json can be null according to the spec if the client can't
|
||||
// provide a configuration. This is handled in Config::update below.
|
||||
let mut config = Config::clone(&*this.config);
|
||||
if let Err(error) = config.update(json.take()) {
|
||||
this.show_message(
|
||||
lsp_types::MessageType::WARNING,
|
||||
error.to_string(),
|
||||
false,
|
||||
);
|
||||
}
|
||||
this.update_configuration(config);
|
||||
}
|
||||
}
|
||||
(None, None) => tracing::error!(
|
||||
"received empty server settings response from the client"
|
||||
),
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
Ok(())
|
||||
})?
|
||||
.on::<lsp_types::notification::DidChangeWorkspaceFolders>(|this, params| {
|
||||
let config = Arc::make_mut(&mut this.config);
|
||||
|
||||
for workspace in params.event.removed {
|
||||
let Ok(path) = workspace.uri.to_file_path() else { continue };
|
||||
let Ok(path) = AbsPathBuf::try_from(path) else { continue };
|
||||
config.remove_workspace(&path);
|
||||
}
|
||||
|
||||
let added = params
|
||||
.event
|
||||
.added
|
||||
.into_iter()
|
||||
.filter_map(|it| it.uri.to_file_path().ok())
|
||||
.filter_map(|it| AbsPathBuf::try_from(it).ok());
|
||||
config.add_workspaces(added);
|
||||
if !config.has_linked_projects() && config.detached_files().is_empty() {
|
||||
config.rediscover_workspaces();
|
||||
this.fetch_workspaces_queue
|
||||
.request_op("client workspaces changed".to_string(), ())
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})?
|
||||
.on::<lsp_types::notification::DidChangeWatchedFiles>(|this, params| {
|
||||
for change in params.changes {
|
||||
if let Ok(path) = from_proto::abs_path(&change.uri) {
|
||||
this.loader.handle.invalidate(path);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
})?
|
||||
.on::<lsp_ext::ClearFlycheck>(handlers::handle_clear_flycheck)?
|
||||
.on::<lsp_ext::RunFlycheck>(handlers::handle_run_flycheck)?
|
||||
.finish();
|
||||
Ok(())
|
||||
}
|
||||
@ -1029,7 +766,7 @@ fn update_diagnostics(&mut self) {
|
||||
let diagnostics = subscriptions
|
||||
.into_iter()
|
||||
.filter_map(|file_id| {
|
||||
handlers::publish_diagnostics(&snapshot, file_id)
|
||||
crate::handlers::publish_diagnostics(&snapshot, file_id)
|
||||
.ok()
|
||||
.map(|diags| (file_id, diags))
|
||||
})
|
||||
|
Loading…
Reference in New Issue
Block a user