Auto merge of #15548 - Veykril:r-a-restructure, r=Veykril

Restructure some modules in rust-analyzer crate
This commit is contained in:
bors 2023-09-02 12:22:06 +00:00
commit 8a29d0776f
22 changed files with 145 additions and 134 deletions

View File

@ -16,10 +16,12 @@
}; };
use serde_json::json; use serde_json::json;
use crate::config::{Config, RustfmtConfig}; use crate::{
use crate::line_index::PositionEncoding; config::{Config, RustfmtConfig},
use crate::lsp_ext::negotiated_encoding; line_index::PositionEncoding,
use crate::semantic_tokens; lsp::semantic_tokens,
lsp_ext::negotiated_encoding,
};
pub fn server_capabilities(config: &Config) -> ServerCapabilities { pub fn server_capabilities(config: &Config) -> ServerCapabilities {
ServerCapabilities { ServerCapabilities {

View File

@ -21,7 +21,7 @@
use crate::{ use crate::{
cli::flags, cli::flags,
line_index::{LineEndings, LineIndex, PositionEncoding}, line_index::{LineEndings, LineIndex, PositionEncoding},
to_proto, lsp::to_proto,
version::version, version::version,
}; };

View File

@ -9,7 +9,7 @@
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use triomphe::Arc; use triomphe::Arc;
use crate::lsp_ext; use crate::{global_state::GlobalStateSnapshot, lsp, lsp_ext};
pub(crate) type CheckFixes = Arc<IntMap<usize, IntMap<FileId, Vec<Fix>>>>; pub(crate) type CheckFixes = Arc<IntMap<usize, IntMap<FileId, Vec<Fix>>>>;
@ -122,3 +122,41 @@ fn are_diagnostics_equal(left: &lsp_types::Diagnostic, right: &lsp_types::Diagno
&& left.range == right.range && left.range == right.range
&& left.message == right.message && left.message == right.message
} }
pub(crate) fn fetch_native_diagnostics(
snapshot: GlobalStateSnapshot,
subscriptions: Vec<FileId>,
) -> Vec<(FileId, Vec<lsp_types::Diagnostic>)> {
let _p = profile::span("fetch_native_diagnostics");
let _ctx = stdx::panic_context::enter("fetch_native_diagnostics".to_owned());
subscriptions
.into_iter()
.filter_map(|file_id| {
let line_index = snapshot.file_line_index(file_id).ok()?;
let diagnostics = snapshot
.analysis
.diagnostics(
&snapshot.config.diagnostics(),
ide::AssistResolveStrategy::None,
file_id,
)
.ok()?
.into_iter()
.map(move |d| lsp_types::Diagnostic {
range: lsp::to_proto::range(&line_index, d.range),
severity: Some(lsp::to_proto::diagnostic_severity(d.severity)),
code: Some(lsp_types::NumberOrString::String(d.code.as_str().to_string())),
code_description: Some(lsp_types::CodeDescription {
href: lsp_types::Url::parse(&d.code.url()).unwrap(),
}),
source: Some("rust-analyzer".to_string()),
message: d.message,
related_information: None,
tags: d.unused.then(|| vec![lsp_types::DiagnosticTag::UNNECESSARY]),
data: None,
})
.collect::<Vec<_>>();
Some((file_id, diagnostics))
})
.collect()
}

View File

@ -8,8 +8,8 @@
use vfs::{AbsPath, AbsPathBuf}; use vfs::{AbsPath, AbsPathBuf};
use crate::{ use crate::{
global_state::GlobalStateSnapshot, line_index::PositionEncoding, lsp_ext, global_state::GlobalStateSnapshot, line_index::PositionEncoding,
to_proto::url_from_abs_path, lsp::to_proto::url_from_abs_path, lsp_ext,
}; };
use super::{DiagnosticsMapConfig, Fix}; use super::{DiagnosticsMapConfig, Fix};

View File

@ -8,9 +8,9 @@
use crate::{ use crate::{
global_state::{GlobalState, GlobalStateSnapshot}, global_state::{GlobalState, GlobalStateSnapshot},
lsp::LspError,
main_loop::Task, main_loop::Task,
version::version, version::version,
LspError,
}; };
/// A visitor for routing a raw JSON request to an appropriate handler function. /// A visitor for routing a raw JSON request to an appropriate handler function.

View File

@ -22,15 +22,14 @@
use crate::{ use crate::{
config::{Config, ConfigError}, config::{Config, ConfigError},
diagnostics::{CheckFixes, DiagnosticCollection}, diagnostics::{CheckFixes, DiagnosticCollection},
from_proto,
line_index::{LineEndings, LineIndex}, line_index::{LineEndings, LineIndex},
lsp::{from_proto, to_proto::url_from_abs_path},
lsp_ext, lsp_ext,
main_loop::Task, main_loop::Task,
mem_docs::MemDocs, mem_docs::MemDocs,
op_queue::OpQueue, op_queue::OpQueue,
reload, reload,
task_pool::TaskPool, task_pool::TaskPool,
to_proto::url_from_abs_path,
}; };
// Enforces drop order // Enforces drop order
@ -40,7 +39,7 @@ pub(crate) struct Handle<H, C> {
} }
pub(crate) type ReqHandler = fn(&mut GlobalState, lsp_server::Response); pub(crate) type ReqHandler = fn(&mut GlobalState, lsp_server::Response);
pub(crate) type ReqQueue = lsp_server::ReqQueue<(String, Instant), ReqHandler>; type ReqQueue = lsp_server::ReqQueue<(String, Instant), ReqHandler>;
/// `GlobalState` is the primary mutable state of the language server /// `GlobalState` is the primary mutable state of the language server
/// ///
@ -49,6 +48,7 @@ pub(crate) struct Handle<H, C> {
/// incremental salsa database. /// incremental salsa database.
/// ///
/// Note that this struct has more than one impl in various modules! /// Note that this struct has more than one impl in various modules!
#[doc(alias = "GlobalMess")]
pub(crate) struct GlobalState { pub(crate) struct GlobalState {
sender: Sender<lsp_server::Message>, sender: Sender<lsp_server::Message>,
req_queue: ReqQueue, req_queue: ReqQueue,

View File

@ -13,8 +13,12 @@
use vfs::{AbsPathBuf, ChangeKind, VfsPath}; use vfs::{AbsPathBuf, ChangeKind, VfsPath};
use crate::{ use crate::{
config::Config, from_proto, global_state::GlobalState, lsp_ext::RunFlycheckParams, config::Config,
lsp_utils::apply_document_changes, mem_docs::DocumentData, reload, global_state::GlobalState,
lsp::{from_proto, utils::apply_document_changes},
lsp_ext::RunFlycheckParams,
mem_docs::DocumentData,
reload,
}; };
pub(crate) fn handle_cancel(state: &mut GlobalState, params: CancelParams) -> anyhow::Result<()> { pub(crate) fn handle_cancel(state: &mut GlobalState, params: CancelParams) -> anyhow::Result<()> {

View File

@ -36,15 +36,17 @@
cargo_target_spec::CargoTargetSpec, cargo_target_spec::CargoTargetSpec,
config::{Config, RustfmtConfig, WorkspaceSymbolConfig}, config::{Config, RustfmtConfig, WorkspaceSymbolConfig},
diff::diff, diff::diff,
from_proto,
global_state::{GlobalState, GlobalStateSnapshot}, global_state::{GlobalState, GlobalStateSnapshot},
line_index::LineEndings, line_index::LineEndings,
lsp::{
from_proto, to_proto,
utils::{all_edits_are_disjoint, invalid_params_error},
LspError,
},
lsp_ext::{ lsp_ext::{
self, CrateInfoResult, ExternalDocsPair, ExternalDocsResponse, FetchDependencyListParams, self, CrateInfoResult, ExternalDocsPair, ExternalDocsResponse, FetchDependencyListParams,
FetchDependencyListResult, PositionOrRange, ViewCrateGraphParams, WorkspaceSymbolParams, FetchDependencyListResult, PositionOrRange, ViewCrateGraphParams, WorkspaceSymbolParams,
}, },
lsp_utils::{all_edits_are_disjoint, invalid_params_error},
to_proto, LspError,
}; };
pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow::Result<()> { pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow::Result<()> {

View File

@ -23,18 +23,14 @@ macro_rules! eprintln {
mod diagnostics; mod diagnostics;
mod diff; mod diff;
mod dispatch; mod dispatch;
mod from_proto;
mod global_state; mod global_state;
mod line_index; mod line_index;
mod lsp_utils;
mod main_loop; mod main_loop;
mod markdown; mod markdown;
mod mem_docs; mod mem_docs;
mod op_queue; mod op_queue;
mod reload; mod reload;
mod semantic_tokens;
mod task_pool; mod task_pool;
mod to_proto;
mod version; mod version;
mod handlers { mod handlers {
@ -43,13 +39,12 @@ mod handlers {
} }
pub mod config; pub mod config;
pub mod lsp_ext; pub mod lsp;
use self::lsp::ext as lsp_ext;
#[cfg(test)] #[cfg(test)]
mod integrated_benchmarks; mod integrated_benchmarks;
use std::fmt;
use serde::de::DeserializeOwned; use serde::de::DeserializeOwned;
pub use crate::{caps::server_capabilities, main_loop::main_loop, version::version}; pub use crate::{caps::server_capabilities, main_loop::main_loop, version::version};
@ -61,23 +56,3 @@ pub fn from_json<T: DeserializeOwned>(
serde_json::from_value(json.clone()) serde_json::from_value(json.clone())
.map_err(|e| anyhow::format_err!("Failed to deserialize {what}: {e}; {json}")) .map_err(|e| anyhow::format_err!("Failed to deserialize {what}: {e}; {json}"))
} }
#[derive(Debug)]
struct LspError {
code: i32,
message: String,
}
impl LspError {
fn new(code: i32, message: String) -> LspError {
LspError { code, message }
}
}
impl fmt::Display for LspError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Language Server request failed with {}. ({})", self.code, self.message)
}
}
impl std::error::Error for LspError {}

View File

@ -0,0 +1,29 @@
//! Custom LSP definitions and protocol conversions.
use core::fmt;
pub(crate) mod utils;
pub(crate) mod semantic_tokens;
pub mod ext;
pub(crate) mod from_proto;
pub(crate) mod to_proto;
#[derive(Debug)]
pub(crate) struct LspError {
pub(crate) code: i32,
pub(crate) message: String,
}
impl LspError {
pub(crate) fn new(code: i32, message: String) -> LspError {
LspError { code, message }
}
}
impl fmt::Display for LspError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Language Server request failed with {}. ({})", self.code, self.message)
}
}
impl std::error::Error for LspError {}

View File

@ -12,8 +12,8 @@
from_json, from_json,
global_state::GlobalStateSnapshot, global_state::GlobalStateSnapshot,
line_index::{LineIndex, PositionEncoding}, line_index::{LineIndex, PositionEncoding},
lsp::utils::invalid_params_error,
lsp_ext, lsp_ext,
lsp_utils::invalid_params_error,
}; };
pub(crate) fn abs_path(url: &lsp_types::Url) -> anyhow::Result<AbsPathBuf> { pub(crate) fn abs_path(url: &lsp_types::Url) -> anyhow::Result<AbsPathBuf> {

View File

@ -22,9 +22,12 @@
config::{CallInfoConfig, Config}, config::{CallInfoConfig, Config},
global_state::GlobalStateSnapshot, global_state::GlobalStateSnapshot,
line_index::{LineEndings, LineIndex, PositionEncoding}, line_index::{LineEndings, LineIndex, PositionEncoding},
lsp::{
semantic_tokens::{self, standard_fallback_type},
utils::invalid_params_error,
LspError,
},
lsp_ext::{self, SnippetTextEdit}, lsp_ext::{self, SnippetTextEdit},
lsp_utils::invalid_params_error,
semantic_tokens::{self, standard_fallback_type},
}; };
pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::Position { pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::Position {
@ -1425,8 +1428,8 @@ pub(crate) mod command {
use crate::{ use crate::{
global_state::GlobalStateSnapshot, global_state::GlobalStateSnapshot,
lsp::to_proto::{location, location_link},
lsp_ext, lsp_ext,
to_proto::{location, location_link},
}; };
pub(crate) fn show_references( pub(crate) fn show_references(
@ -1532,7 +1535,7 @@ pub(crate) fn markup_content(
lsp_types::MarkupContent { kind, value } lsp_types::MarkupContent { kind, value }
} }
pub(crate) fn rename_error(err: RenameError) -> crate::LspError { pub(crate) fn rename_error(err: RenameError) -> LspError {
// This is wrong, but we don't have a better alternative I suppose? // This is wrong, but we don't have a better alternative I suppose?
// https://github.com/microsoft/language-server-protocol/issues/1341 // https://github.com/microsoft/language-server-protocol/issues/1341
invalid_params_error(err.to_string()) invalid_params_error(err.to_string())

View File

@ -6,10 +6,10 @@
use triomphe::Arc; use triomphe::Arc;
use crate::{ use crate::{
from_proto,
global_state::GlobalState, global_state::GlobalState,
line_index::{LineEndings, LineIndex, PositionEncoding}, line_index::{LineEndings, LineIndex, PositionEncoding},
lsp_ext, LspError, lsp::{from_proto, LspError},
lsp_ext,
}; };
pub(crate) fn invalid_params_error(message: String) -> LspError { pub(crate) fn invalid_params_error(message: String) -> LspError {

View File

@ -17,11 +17,14 @@
use crate::{ use crate::{
config::Config, config::Config,
diagnostics::fetch_native_diagnostics,
dispatch::{NotificationDispatcher, RequestDispatcher}, dispatch::{NotificationDispatcher, RequestDispatcher},
from_proto,
global_state::{file_id_to_url, url_to_file_id, GlobalState}, global_state::{file_id_to_url, url_to_file_id, GlobalState},
lsp::{
from_proto,
utils::{notification_is, Progress},
},
lsp_ext, lsp_ext,
lsp_utils::{notification_is, Progress},
reload::{BuildDataProgress, ProcMacroProgress, ProjectWorkspaceProgress}, reload::{BuildDataProgress, ProcMacroProgress, ProjectWorkspaceProgress},
}; };
@ -420,6 +423,32 @@ fn prime_caches(&mut self, cause: String) {
}); });
} }
fn update_diagnostics(&mut self) {
let db = self.analysis_host.raw_database();
let subscriptions = self
.mem_docs
.iter()
.map(|path| self.vfs.read().0.file_id(path).unwrap())
.filter(|&file_id| {
let source_root = db.file_source_root(file_id);
// Only publish diagnostics for files in the workspace, not from crates.io deps
// or the sysroot.
// While theoretically these should never have errors, we have quite a few false
// positives particularly in the stdlib, and those diagnostics would stay around
// forever if we emitted them here.
!db.source_root(source_root).is_library
})
.collect::<Vec<_>>();
tracing::trace!("updating notifications for {:?}", subscriptions);
// Diagnostics are triggered by the user typing
// so we run them on a latency sensitive thread.
self.task_pool.handle.spawn(ThreadIntent::LatencySensitive, {
let snapshot = self.snapshot();
move || Task::Diagnostics(fetch_native_diagnostics(snapshot, subscriptions))
});
}
fn update_status_or_notify(&mut self) { fn update_status_or_notify(&mut self) {
let status = self.current_status(); let status = self.current_status();
if self.last_reported_status.as_ref() != Some(&status) { if self.last_reported_status.as_ref() != Some(&status) {
@ -785,77 +814,4 @@ fn on_notification(&mut self, not: Notification) -> anyhow::Result<()> {
.finish(); .finish();
Ok(()) Ok(())
} }
fn update_diagnostics(&mut self) {
let db = self.analysis_host.raw_database();
let subscriptions = self
.mem_docs
.iter()
.map(|path| self.vfs.read().0.file_id(path).unwrap())
.filter(|&file_id| {
let source_root = db.file_source_root(file_id);
// Only publish diagnostics for files in the workspace, not from crates.io deps
// or the sysroot.
// While theoretically these should never have errors, we have quite a few false
// positives particularly in the stdlib, and those diagnostics would stay around
// forever if we emitted them here.
!db.source_root(source_root).is_library
})
.collect::<Vec<_>>();
tracing::trace!("updating notifications for {:?}", subscriptions);
let snapshot = self.snapshot();
// Diagnostics are triggered by the user typing
// so we run them on a latency sensitive thread.
self.task_pool.handle.spawn(ThreadIntent::LatencySensitive, move || {
let _p = profile::span("publish_diagnostics");
let _ctx = stdx::panic_context::enter("publish_diagnostics".to_owned());
let diagnostics = subscriptions
.into_iter()
.filter_map(|file_id| {
let line_index = snapshot.file_line_index(file_id).ok()?;
Some((
file_id,
line_index,
snapshot
.analysis
.diagnostics(
&snapshot.config.diagnostics(),
ide::AssistResolveStrategy::None,
file_id,
)
.ok()?,
))
})
.map(|(file_id, line_index, it)| {
(
file_id,
it.into_iter()
.map(move |d| lsp_types::Diagnostic {
range: crate::to_proto::range(&line_index, d.range),
severity: Some(crate::to_proto::diagnostic_severity(d.severity)),
code: Some(lsp_types::NumberOrString::String(
d.code.as_str().to_string(),
)),
code_description: Some(lsp_types::CodeDescription {
href: lsp_types::Url::parse(&d.code.url()).unwrap(),
}),
source: Some("rust-analyzer".to_string()),
message: d.message,
related_information: None,
tags: if d.unused {
Some(vec![lsp_types::DiagnosticTag::UNNECESSARY])
} else {
None
},
data: None,
})
.collect::<Vec<_>>(),
)
});
Task::Diagnostics(diagnostics.collect())
});
}
} }

View File

@ -3,6 +3,7 @@
const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"]; const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"];
// FIXME: why is this in this crate?
pub(crate) fn format_docs(src: &str) -> String { pub(crate) fn format_docs(src: &str) -> String {
let mut processed_lines = Vec::new(); let mut processed_lines = Vec::new();
let mut in_code_block = false; let mut in_code_block = false;

View File

@ -12,6 +12,7 @@
//! correct. Instead, we try to provide a best-effort service. Even if the //! correct. Instead, we try to provide a best-effort service. Even if the
//! project is currently loading and we don't have a full project model, we //! project is currently loading and we don't have a full project model, we
//! still want to respond to various requests. //! still want to respond to various requests.
// FIXME: This is a mess that needs some untangling work
use std::{iter, mem}; use std::{iter, mem};
use flycheck::{FlycheckConfig, FlycheckHandle}; use flycheck::{FlycheckConfig, FlycheckHandle};

View File

@ -29,7 +29,7 @@
PartialResultParams, Position, Range, RenameFilesParams, TextDocumentItem, PartialResultParams, Position, Range, RenameFilesParams, TextDocumentItem,
TextDocumentPositionParams, WorkDoneProgressParams, TextDocumentPositionParams, WorkDoneProgressParams,
}; };
use rust_analyzer::lsp_ext::{OnEnter, Runnables, RunnablesParams}; use rust_analyzer::lsp::ext::{OnEnter, Runnables, RunnablesParams};
use serde_json::json; use serde_json::json;
use test_utils::skip_slow_tests; use test_utils::skip_slow_tests;

View File

@ -9,7 +9,7 @@
use crossbeam_channel::{after, select, Receiver}; use crossbeam_channel::{after, select, Receiver};
use lsp_server::{Connection, Message, Notification, Request}; use lsp_server::{Connection, Message, Notification, Request};
use lsp_types::{notification::Exit, request::Shutdown, TextDocumentIdentifier, Url}; use lsp_types::{notification::Exit, request::Shutdown, TextDocumentIdentifier, Url};
use rust_analyzer::{config::Config, lsp_ext, main_loop}; use rust_analyzer::{config::Config, lsp, main_loop};
use serde::Serialize; use serde::Serialize;
use serde_json::{json, to_string_pretty, Value}; use serde_json::{json, to_string_pretty, Value};
use test_utils::FixtureWithProjectMeta; use test_utils::FixtureWithProjectMeta;
@ -260,9 +260,9 @@ pub(crate) fn wait_until_workspace_is_loaded(self) -> Server {
Message::Notification(n) if n.method == "experimental/serverStatus" => { Message::Notification(n) if n.method == "experimental/serverStatus" => {
let status = n let status = n
.clone() .clone()
.extract::<lsp_ext::ServerStatusParams>("experimental/serverStatus") .extract::<lsp::ext::ServerStatusParams>("experimental/serverStatus")
.unwrap(); .unwrap();
if status.health != lsp_ext::Health::Ok { if status.health != lsp::ext::Health::Ok {
panic!("server errored/warned while loading workspace: {:?}", status.message); panic!("server errored/warned while loading workspace: {:?}", status.message);
} }
status.quiescent status.quiescent

View File

@ -35,7 +35,7 @@ fn check_lsp_extensions_docs() {
let expected_hash = { let expected_hash = {
let lsp_ext_rs = sh let lsp_ext_rs = sh
.read_file(sourcegen::project_root().join("crates/rust-analyzer/src/lsp_ext.rs")) .read_file(sourcegen::project_root().join("crates/rust-analyzer/src/lsp/ext.rs"))
.unwrap(); .unwrap();
stable_hash(lsp_ext_rs.as_str()) stable_hash(lsp_ext_rs.as_str())
}; };
@ -45,7 +45,7 @@ fn check_lsp_extensions_docs() {
sh.read_file(sourcegen::project_root().join("docs/dev/lsp-extensions.md")).unwrap(); sh.read_file(sourcegen::project_root().join("docs/dev/lsp-extensions.md")).unwrap();
let text = lsp_extensions_md let text = lsp_extensions_md
.lines() .lines()
.find_map(|line| line.strip_prefix("lsp_ext.rs hash:")) .find_map(|line| line.strip_prefix("lsp/ext.rs hash:"))
.unwrap() .unwrap()
.trim(); .trim();
u64::from_str_radix(text, 16).unwrap() u64::from_str_radix(text, 16).unwrap()
@ -54,7 +54,7 @@ fn check_lsp_extensions_docs() {
if actual_hash != expected_hash { if actual_hash != expected_hash {
panic!( panic!(
" "
lsp_ext.rs was changed without touching lsp-extensions.md. lsp/ext.rs was changed without touching lsp-extensions.md.
Expected hash: {expected_hash:x} Expected hash: {expected_hash:x}
Actual hash: {actual_hash:x} Actual hash: {actual_hash:x}

View File

@ -1,5 +1,5 @@
<!--- <!---
lsp_ext.rs hash: 149a5be3c5e469d1 lsp/ext.rs hash: 149a5be3c5e469d1
If you need to change the above hash to make the test pass, please check if you If you need to change the above hash to make the test pass, please check if you
need to adjust this doc as well and ping this issue: need to adjust this doc as well and ping this issue: