2022-05-21 07:11:05 -05:00
|
|
|
//! Project loading & configuration updates.
|
|
|
|
//!
|
|
|
|
//! This is quite tricky. The main problem is time and changes -- there's no
|
|
|
|
//! fixed "project" rust-analyzer is working with, "current project" is itself
|
|
|
|
//! mutable state. For example, when the user edits `Cargo.toml` by adding a new
|
|
|
|
//! dependency, project model changes. What's more, switching project model is
|
|
|
|
//! not instantaneous -- it takes time to run `cargo metadata` and (for proc
|
|
|
|
//! macros) `cargo check`.
|
|
|
|
//!
|
2022-05-23 03:29:03 -05:00
|
|
|
//! The main guiding principle here is, as elsewhere in rust-analyzer,
|
2022-05-21 07:11:05 -05:00
|
|
|
//! robustness. We try not to assume that the project model exists or is
|
|
|
|
//! correct. Instead, we try to provide a best-effort service. Even if the
|
|
|
|
//! project is currently loading and we don't have a full project model, we
|
|
|
|
//! still want to respond to various requests.
|
2020-06-26 10:28:04 -05:00
|
|
|
use std::{mem, sync::Arc};
|
2020-06-25 16:44:58 -05:00
|
|
|
|
2020-09-15 20:51:57 -05:00
|
|
|
use flycheck::{FlycheckConfig, FlycheckHandle};
|
2021-06-03 09:11:20 -05:00
|
|
|
use hir::db::DefDatabase;
|
2020-10-02 08:45:09 -05:00
|
|
|
use ide::Change;
|
2021-08-22 06:05:12 -05:00
|
|
|
use ide_db::base_db::{
|
2021-08-31 11:14:33 -05:00
|
|
|
CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind,
|
2022-06-15 10:33:55 -05:00
|
|
|
ProcMacroLoadResult, SourceRoot, VfsPath,
|
2021-08-22 06:05:12 -05:00
|
|
|
};
|
internal: cleanup proc macro server error handlig
When dealing with proc macros, there are two very different kinds of
errors:
* first, usual errors of "proc macro panicked on this particular input"
* second, the proc macro server might day if the user, eg, kills it
First kind of errors are expected and are a normal output, while the
second kind are genuine IO-errors.
For this reason, we use a curious nested result here: `Result<Result<T,
E1>, E2>` pattern, which is 100% inspired by http://sled.rs/errors.html
2021-08-31 11:01:39 -05:00
|
|
|
use proc_macro_api::{MacroDylib, ProcMacroServer};
|
2021-08-22 05:32:00 -05:00
|
|
|
use project_model::{ProjectWorkspace, WorkspaceBuildScripts};
|
2022-01-04 13:40:16 -06:00
|
|
|
use syntax::SmolStr;
|
2020-07-10 11:48:39 -05:00
|
|
|
use vfs::{file_set::FileSetConfig, AbsPath, AbsPathBuf, ChangeKind};
|
2020-06-25 16:44:58 -05:00
|
|
|
|
|
|
|
use crate::{
|
|
|
|
config::{Config, FilesWatcher, LinkedProject},
|
2021-04-06 06:16:35 -05:00
|
|
|
global_state::GlobalState,
|
2020-07-10 08:27:34 -05:00
|
|
|
lsp_ext,
|
2020-07-02 09:47:42 -05:00
|
|
|
main_loop::Task,
|
2022-04-16 07:16:58 -05:00
|
|
|
op_queue::Cause,
|
2020-06-25 16:44:58 -05:00
|
|
|
};
|
|
|
|
|
2021-01-07 11:08:46 -06:00
|
|
|
#[derive(Debug)]
|
|
|
|
pub(crate) enum ProjectWorkspaceProgress {
|
|
|
|
Begin,
|
|
|
|
Report(String),
|
2021-01-10 09:02:02 -06:00
|
|
|
End(Vec<anyhow::Result<ProjectWorkspace>>),
|
2021-01-07 11:08:46 -06:00
|
|
|
}
|
|
|
|
|
2021-01-28 09:33:02 -06:00
|
|
|
#[derive(Debug)]
|
|
|
|
pub(crate) enum BuildDataProgress {
|
|
|
|
Begin,
|
|
|
|
Report(String),
|
2021-07-18 05:13:03 -05:00
|
|
|
End((Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)),
|
2021-01-28 09:33:02 -06:00
|
|
|
}
|
|
|
|
|
2020-06-25 16:44:58 -05:00
|
|
|
impl GlobalState {
|
2021-04-06 06:16:35 -05:00
|
|
|
pub(crate) fn is_quiescent(&self) -> bool {
|
|
|
|
!(self.fetch_workspaces_queue.op_in_progress()
|
|
|
|
|| self.fetch_build_data_queue.op_in_progress()
|
|
|
|
|| self.vfs_progress_config_version < self.vfs_config_version
|
|
|
|
|| self.vfs_progress_n_done < self.vfs_progress_n_total)
|
|
|
|
}
|
|
|
|
|
2020-06-26 10:28:04 -05:00
|
|
|
pub(crate) fn update_configuration(&mut self, config: Config) {
|
2020-08-12 09:32:36 -05:00
|
|
|
let _p = profile::span("GlobalState::update_configuration");
|
2021-01-06 06:46:31 -06:00
|
|
|
let old_config = mem::replace(&mut self.config, Arc::new(config));
|
2021-01-06 04:54:28 -06:00
|
|
|
if self.config.lru_capacity() != old_config.lru_capacity() {
|
|
|
|
self.analysis_host.update_lru_capacity(self.config.lru_capacity());
|
2020-06-26 10:28:04 -05:00
|
|
|
}
|
2021-01-06 04:54:28 -06:00
|
|
|
if self.config.linked_projects() != old_config.linked_projects() {
|
2022-04-16 07:16:58 -05:00
|
|
|
self.fetch_workspaces_queue.request_op("linked projects changed".to_string())
|
2021-01-06 04:54:28 -06:00
|
|
|
} else if self.config.flycheck() != old_config.flycheck() {
|
2020-06-25 16:44:58 -05:00
|
|
|
self.reload_flycheck();
|
|
|
|
}
|
2021-06-03 09:11:20 -05:00
|
|
|
|
2022-05-06 01:46:02 -05:00
|
|
|
if self.analysis_host.raw_database().enable_proc_attr_macros()
|
|
|
|
!= self.config.expand_proc_attr_macros()
|
|
|
|
{
|
|
|
|
self.analysis_host
|
|
|
|
.raw_database_mut()
|
|
|
|
.set_enable_proc_attr_macros(self.config.expand_proc_attr_macros());
|
|
|
|
}
|
2020-06-25 16:44:58 -05:00
|
|
|
}
|
2021-08-30 11:35:49 -05:00
|
|
|
|
|
|
|
pub(crate) fn current_status(&self) -> lsp_ext::ServerStatusParams {
|
2021-04-06 06:16:35 -05:00
|
|
|
let mut status = lsp_ext::ServerStatusParams {
|
|
|
|
health: lsp_ext::Health::Ok,
|
|
|
|
quiescent: self.is_quiescent(),
|
|
|
|
message: None,
|
|
|
|
};
|
2021-04-06 10:08:05 -05:00
|
|
|
|
2021-10-20 08:29:50 -05:00
|
|
|
if self.proc_macro_changed {
|
|
|
|
status.health = lsp_ext::Health::Warning;
|
|
|
|
status.message =
|
|
|
|
Some("Reload required due to source changes of a procedural macro.".into())
|
|
|
|
}
|
2022-04-11 07:38:30 -05:00
|
|
|
if let Err(_) = self.fetch_build_data_error() {
|
2021-04-06 10:08:05 -05:00
|
|
|
status.health = lsp_ext::Health::Warning;
|
2022-04-11 07:38:30 -05:00
|
|
|
status.message =
|
|
|
|
Some("Failed to run build scripts of some packages, check the logs.".to_string());
|
2021-04-06 10:08:05 -05:00
|
|
|
}
|
2021-04-06 06:16:35 -05:00
|
|
|
if !self.config.cargo_autoreload()
|
|
|
|
&& self.is_quiescent()
|
|
|
|
&& self.fetch_workspaces_queue.op_requested()
|
|
|
|
{
|
|
|
|
status.health = lsp_ext::Health::Warning;
|
|
|
|
status.message = Some("Workspace reload required".to_string())
|
|
|
|
}
|
2021-04-06 10:08:05 -05:00
|
|
|
|
2022-04-11 07:38:30 -05:00
|
|
|
if let Err(error) = self.fetch_workspace_error() {
|
2021-04-06 06:16:35 -05:00
|
|
|
status.health = lsp_ext::Health::Error;
|
2021-04-06 10:08:05 -05:00
|
|
|
status.message = Some(error)
|
2021-04-06 06:16:35 -05:00
|
|
|
}
|
2021-08-30 11:35:49 -05:00
|
|
|
status
|
2020-07-10 08:27:34 -05:00
|
|
|
}
|
2021-01-10 09:02:02 -06:00
|
|
|
|
2022-04-16 07:16:58 -05:00
|
|
|
pub(crate) fn fetch_workspaces(&mut self, cause: Cause) {
|
|
|
|
tracing::info!(%cause, "will fetch workspaces");
|
2021-01-07 11:08:46 -06:00
|
|
|
|
|
|
|
self.task_pool.handle.spawn_with_sender({
|
2021-01-06 04:54:28 -06:00
|
|
|
let linked_projects = self.config.linked_projects();
|
2021-05-23 12:56:54 -05:00
|
|
|
let detached_files = self.config.detached_files().to_vec();
|
2021-01-06 04:54:28 -06:00
|
|
|
let cargo_config = self.config.cargo();
|
2021-01-07 11:08:46 -06:00
|
|
|
|
|
|
|
move |sender| {
|
|
|
|
let progress = {
|
|
|
|
let sender = sender.clone();
|
|
|
|
move |msg| {
|
|
|
|
sender
|
|
|
|
.send(Task::FetchWorkspace(ProjectWorkspaceProgress::Report(msg)))
|
|
|
|
.unwrap()
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
sender.send(Task::FetchWorkspace(ProjectWorkspaceProgress::Begin)).unwrap();
|
|
|
|
|
2021-05-23 12:56:54 -05:00
|
|
|
let mut workspaces = linked_projects
|
2020-07-02 09:47:42 -05:00
|
|
|
.iter()
|
|
|
|
.map(|project| match project {
|
|
|
|
LinkedProject::ProjectManifest(manifest) => {
|
2021-01-07 11:08:46 -06:00
|
|
|
project_model::ProjectWorkspace::load(
|
|
|
|
manifest.clone(),
|
|
|
|
&cargo_config,
|
|
|
|
&progress,
|
|
|
|
)
|
2020-07-02 09:47:42 -05:00
|
|
|
}
|
|
|
|
LinkedProject::InlineJsonProject(it) => {
|
2021-01-18 05:52:12 -06:00
|
|
|
project_model::ProjectWorkspace::load_inline(
|
|
|
|
it.clone(),
|
|
|
|
cargo_config.target.as_deref(),
|
|
|
|
)
|
2020-07-02 09:47:42 -05:00
|
|
|
}
|
2020-06-26 09:33:57 -05:00
|
|
|
})
|
2020-07-02 09:47:42 -05:00
|
|
|
.collect::<Vec<_>>();
|
2021-01-07 11:08:46 -06:00
|
|
|
|
2021-05-23 12:56:54 -05:00
|
|
|
if !detached_files.is_empty() {
|
|
|
|
workspaces
|
|
|
|
.push(project_model::ProjectWorkspace::load_detached_files(detached_files));
|
|
|
|
}
|
|
|
|
|
2021-08-15 07:46:13 -05:00
|
|
|
tracing::info!("did fetch workspaces {:?}", workspaces);
|
2021-01-10 09:02:02 -06:00
|
|
|
sender
|
|
|
|
.send(Task::FetchWorkspace(ProjectWorkspaceProgress::End(workspaces)))
|
|
|
|
.unwrap();
|
2020-07-02 09:47:42 -05:00
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
2021-07-18 03:29:22 -05:00
|
|
|
|
2022-04-16 07:16:58 -05:00
|
|
|
pub(crate) fn fetch_build_data(&mut self, cause: Cause) {
|
2022-06-15 07:29:13 -05:00
|
|
|
tracing::info!(%cause, "will fetch build data");
|
2021-07-18 03:29:22 -05:00
|
|
|
let workspaces = Arc::clone(&self.workspaces);
|
|
|
|
let config = self.config.cargo();
|
2021-04-06 04:35:40 -05:00
|
|
|
self.task_pool.handle.spawn_with_sender(move |sender| {
|
|
|
|
sender.send(Task::FetchBuildData(BuildDataProgress::Begin)).unwrap();
|
|
|
|
|
|
|
|
let progress = {
|
|
|
|
let sender = sender.clone();
|
|
|
|
move |msg| {
|
|
|
|
sender.send(Task::FetchBuildData(BuildDataProgress::Report(msg))).unwrap()
|
|
|
|
}
|
|
|
|
};
|
2021-07-18 03:29:22 -05:00
|
|
|
let mut res = Vec::new();
|
|
|
|
for ws in workspaces.iter() {
|
2021-07-18 05:13:03 -05:00
|
|
|
res.push(ws.run_build_scripts(&config, &progress));
|
2021-07-18 03:29:22 -05:00
|
|
|
}
|
2021-07-18 05:13:03 -05:00
|
|
|
sender.send(Task::FetchBuildData(BuildDataProgress::End((workspaces, res)))).unwrap();
|
2021-04-06 04:35:40 -05:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2022-04-18 01:26:00 -05:00
|
|
|
pub(crate) fn switch_workspaces(&mut self, cause: Cause) {
|
2020-08-12 09:32:36 -05:00
|
|
|
let _p = profile::span("GlobalState::switch_workspaces");
|
2022-04-18 01:26:00 -05:00
|
|
|
tracing::info!(%cause, "will switch workspaces");
|
2021-04-06 06:16:35 -05:00
|
|
|
|
2022-04-11 07:38:30 -05:00
|
|
|
if let Err(error_message) = self.fetch_workspace_error() {
|
|
|
|
self.show_and_log_error(error_message, None);
|
2021-04-06 06:16:35 -05:00
|
|
|
if !self.workspaces.is_empty() {
|
2021-07-18 05:13:03 -05:00
|
|
|
// It only makes sense to switch to a partially broken workspace
|
|
|
|
// if we don't have any workspace at all yet.
|
2021-04-06 06:16:35 -05:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
2020-07-10 08:27:34 -05:00
|
|
|
|
2022-04-11 07:38:30 -05:00
|
|
|
if let Err(error) = self.fetch_build_data_error() {
|
|
|
|
self.show_and_log_error(
|
|
|
|
"rust-analyzer failed to run build scripts".to_string(),
|
|
|
|
Some(error),
|
|
|
|
);
|
2021-04-06 10:08:05 -05:00
|
|
|
}
|
|
|
|
|
2021-04-06 06:16:35 -05:00
|
|
|
let workspaces = self
|
|
|
|
.fetch_workspaces_queue
|
|
|
|
.last_op_result()
|
2021-04-05 14:30:19 -05:00
|
|
|
.iter()
|
2021-04-06 06:16:35 -05:00
|
|
|
.filter_map(|res| res.as_ref().ok().cloned())
|
2020-07-02 09:47:42 -05:00
|
|
|
.collect::<Vec<_>>();
|
2020-06-25 16:44:58 -05:00
|
|
|
|
2021-07-18 05:13:03 -05:00
|
|
|
fn eq_ignore_build_data<'a>(
|
|
|
|
left: &'a ProjectWorkspace,
|
|
|
|
right: &'a ProjectWorkspace,
|
|
|
|
) -> bool {
|
|
|
|
let key = |p: &'a ProjectWorkspace| match p {
|
|
|
|
ProjectWorkspace::Cargo {
|
|
|
|
cargo,
|
|
|
|
sysroot,
|
|
|
|
rustc,
|
|
|
|
rustc_cfg,
|
|
|
|
cfg_overrides,
|
|
|
|
|
|
|
|
build_scripts: _,
|
|
|
|
} => Some((cargo, sysroot, rustc, rustc_cfg, cfg_overrides)),
|
|
|
|
_ => None,
|
|
|
|
};
|
|
|
|
match (key(left), key(right)) {
|
|
|
|
(Some(lk), Some(rk)) => lk == rk,
|
|
|
|
_ => left == right,
|
|
|
|
}
|
|
|
|
}
|
2021-07-18 03:29:22 -05:00
|
|
|
|
2021-07-18 05:13:03 -05:00
|
|
|
let same_workspaces = workspaces.len() == self.workspaces.len()
|
|
|
|
&& workspaces
|
|
|
|
.iter()
|
|
|
|
.zip(self.workspaces.iter())
|
|
|
|
.all(|(l, r)| eq_ignore_build_data(l, r));
|
2021-01-28 09:33:02 -06:00
|
|
|
|
2021-07-18 05:13:03 -05:00
|
|
|
if same_workspaces {
|
|
|
|
let (workspaces, build_scripts) = self.fetch_build_data_queue.last_op_result();
|
2021-10-16 06:32:55 -05:00
|
|
|
if Arc::ptr_eq(workspaces, &self.workspaces) {
|
2022-04-18 01:26:00 -05:00
|
|
|
tracing::debug!("set build scripts to workspaces");
|
|
|
|
|
2021-07-18 05:13:03 -05:00
|
|
|
let workspaces = workspaces
|
|
|
|
.iter()
|
|
|
|
.cloned()
|
|
|
|
.zip(build_scripts)
|
|
|
|
.map(|(mut ws, bs)| {
|
|
|
|
ws.set_build_scripts(bs.as_ref().ok().cloned().unwrap_or_default());
|
|
|
|
ws
|
|
|
|
})
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
|
|
|
// Workspaces are the same, but we've updated build data.
|
|
|
|
self.workspaces = Arc::new(workspaces);
|
|
|
|
} else {
|
2022-04-29 00:59:32 -05:00
|
|
|
tracing::info!("build scripts do not match the version of the active workspace");
|
2021-07-18 05:13:03 -05:00
|
|
|
// Current build scripts do not match the version of the active
|
|
|
|
// workspace, so there's nothing for us to update.
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
} else {
|
2022-04-18 01:26:00 -05:00
|
|
|
tracing::debug!("abandon build scripts for workspaces");
|
|
|
|
|
2021-07-18 05:13:03 -05:00
|
|
|
// Here, we completely changed the workspace (Cargo.toml edit), so
|
|
|
|
// we don't care about build-script results, they are stale.
|
|
|
|
self.workspaces = Arc::new(workspaces)
|
2020-07-10 08:27:34 -05:00
|
|
|
}
|
|
|
|
|
2021-01-06 04:54:28 -06:00
|
|
|
if let FilesWatcher::Client = self.config.files().watcher {
|
2022-01-01 08:26:54 -06:00
|
|
|
let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions {
|
|
|
|
watchers: self
|
|
|
|
.workspaces
|
|
|
|
.iter()
|
|
|
|
.flat_map(|ws| ws.to_roots())
|
|
|
|
.filter(|it| it.is_local)
|
|
|
|
.flat_map(|root| {
|
|
|
|
root.include.into_iter().flat_map(|it| {
|
|
|
|
[
|
|
|
|
format!("{}/**/*.rs", it.display()),
|
|
|
|
format!("{}/**/Cargo.toml", it.display()),
|
|
|
|
format!("{}/**/Cargo.lock", it.display()),
|
|
|
|
]
|
2021-01-10 13:38:35 -06:00
|
|
|
})
|
2022-01-01 08:26:54 -06:00
|
|
|
})
|
|
|
|
.map(|glob_pattern| lsp_types::FileSystemWatcher { glob_pattern, kind: None })
|
|
|
|
.collect(),
|
|
|
|
};
|
|
|
|
let registration = lsp_types::Registration {
|
|
|
|
id: "workspace/didChangeWatchedFiles".to_string(),
|
|
|
|
method: "workspace/didChangeWatchedFiles".to_string(),
|
|
|
|
register_options: Some(serde_json::to_value(registration_options).unwrap()),
|
|
|
|
};
|
|
|
|
self.send_request::<lsp_types::request::RegisterCapability>(
|
|
|
|
lsp_types::RegistrationParams { registrations: vec![registration] },
|
|
|
|
|_, _| (),
|
|
|
|
);
|
2020-06-25 16:44:58 -05:00
|
|
|
}
|
|
|
|
|
2020-10-02 08:45:09 -05:00
|
|
|
let mut change = Change::new();
|
2020-06-25 16:44:58 -05:00
|
|
|
|
2021-01-26 07:18:01 -06:00
|
|
|
let files_config = self.config.files();
|
2021-07-18 05:13:03 -05:00
|
|
|
let project_folders = ProjectFolders::new(&self.workspaces, &files_config.exclude);
|
2020-06-25 16:44:58 -05:00
|
|
|
|
2021-02-01 14:55:17 -06:00
|
|
|
if self.proc_macro_client.is_none() {
|
2022-04-11 07:38:30 -05:00
|
|
|
if let Some((path, args)) = self.config.proc_macro_srv() {
|
|
|
|
match ProcMacroServer::spawn(path.clone(), args) {
|
|
|
|
Ok(it) => self.proc_macro_client = Some(it),
|
2021-02-01 14:55:17 -06:00
|
|
|
Err(err) => {
|
2021-08-15 07:46:13 -05:00
|
|
|
tracing::error!(
|
2021-02-01 14:55:17 -06:00
|
|
|
"Failed to run proc_macro_srv from path {}, error: {:?}",
|
|
|
|
path.display(),
|
|
|
|
err
|
|
|
|
);
|
|
|
|
}
|
2022-04-11 07:38:30 -05:00
|
|
|
}
|
|
|
|
}
|
2021-02-01 14:55:17 -06:00
|
|
|
}
|
2020-07-10 16:39:25 -05:00
|
|
|
|
2021-01-26 07:18:01 -06:00
|
|
|
let watch = match files_config.watcher {
|
2020-06-25 16:44:58 -05:00
|
|
|
FilesWatcher::Client => vec![],
|
2022-07-18 10:50:56 -05:00
|
|
|
FilesWatcher::Server => project_folders.watch,
|
2020-06-25 16:44:58 -05:00
|
|
|
};
|
2021-02-12 08:58:29 -06:00
|
|
|
self.vfs_config_version += 1;
|
|
|
|
self.loader.handle.set_config(vfs::loader::Config {
|
|
|
|
load: project_folders.load,
|
|
|
|
watch,
|
|
|
|
version: self.vfs_config_version,
|
|
|
|
});
|
2020-06-25 16:44:58 -05:00
|
|
|
|
|
|
|
// Create crate graph from all the workspaces
|
|
|
|
let crate_graph = {
|
2021-08-22 05:32:00 -05:00
|
|
|
let proc_macro_client = self.proc_macro_client.as_ref();
|
2022-05-19 08:29:35 -05:00
|
|
|
let dummy_replacements = self.config.dummy_replacements();
|
|
|
|
let mut load_proc_macro = move |crate_name: &str, path: &AbsPath| {
|
|
|
|
load_proc_macro(
|
|
|
|
proc_macro_client,
|
|
|
|
path,
|
|
|
|
dummy_replacements.get(crate_name).map(|v| &**v).unwrap_or_default(),
|
|
|
|
)
|
2022-01-04 13:40:16 -06:00
|
|
|
};
|
2021-08-22 05:32:00 -05:00
|
|
|
|
2020-06-25 16:44:58 -05:00
|
|
|
let vfs = &mut self.vfs.write().0;
|
|
|
|
let loader = &mut self.loader;
|
2020-09-18 14:15:44 -05:00
|
|
|
let mem_docs = &self.mem_docs;
|
2021-08-22 05:32:00 -05:00
|
|
|
let mut load = move |path: &AbsPath| {
|
2021-04-22 13:25:29 -05:00
|
|
|
let _p = profile::span("GlobalState::load");
|
2020-09-18 14:15:44 -05:00
|
|
|
let vfs_path = vfs::VfsPath::from(path.to_path_buf());
|
2021-07-26 12:16:47 -05:00
|
|
|
if !mem_docs.contains(&vfs_path) {
|
2020-09-18 14:15:44 -05:00
|
|
|
let contents = loader.handle.load_sync(path);
|
|
|
|
vfs.set_file_contents(vfs_path.clone(), contents);
|
|
|
|
}
|
2020-11-17 05:17:24 -06:00
|
|
|
let res = vfs.file_id(&vfs_path);
|
|
|
|
if res.is_none() {
|
2021-08-15 07:46:13 -05:00
|
|
|
tracing::warn!("failed to load {}", path.display())
|
2020-11-17 05:17:24 -06:00
|
|
|
}
|
|
|
|
res
|
2020-06-25 16:44:58 -05:00
|
|
|
};
|
2021-08-22 05:32:00 -05:00
|
|
|
|
|
|
|
let mut crate_graph = CrateGraph::default();
|
2021-07-18 05:13:03 -05:00
|
|
|
for ws in self.workspaces.iter() {
|
2022-05-19 08:29:35 -05:00
|
|
|
crate_graph.extend(ws.to_crate_graph(&mut load_proc_macro, &mut load));
|
2020-06-25 16:44:58 -05:00
|
|
|
}
|
|
|
|
crate_graph
|
|
|
|
};
|
|
|
|
change.set_crate_graph(crate_graph);
|
|
|
|
|
|
|
|
self.source_root_config = project_folders.source_root_config;
|
|
|
|
|
|
|
|
self.analysis_host.apply_change(change);
|
|
|
|
self.process_changes();
|
|
|
|
self.reload_flycheck();
|
2021-08-15 07:46:13 -05:00
|
|
|
tracing::info!("did switch workspaces");
|
2020-06-25 16:44:58 -05:00
|
|
|
}
|
|
|
|
|
2022-04-11 07:38:30 -05:00
|
|
|
fn fetch_workspace_error(&self) -> Result<(), String> {
|
2021-04-06 10:08:05 -05:00
|
|
|
let mut buf = String::new();
|
2021-04-06 06:16:35 -05:00
|
|
|
|
|
|
|
for ws in self.fetch_workspaces_queue.last_op_result() {
|
|
|
|
if let Err(err) = ws {
|
2021-04-06 10:08:05 -05:00
|
|
|
stdx::format_to!(buf, "rust-analyzer failed to load workspace: {:#}\n", err);
|
2021-04-06 06:16:35 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-04-06 10:08:05 -05:00
|
|
|
if buf.is_empty() {
|
2022-04-11 07:38:30 -05:00
|
|
|
return Ok(());
|
2021-04-06 06:16:35 -05:00
|
|
|
}
|
|
|
|
|
2022-04-11 07:38:30 -05:00
|
|
|
Err(buf)
|
2021-04-06 10:08:05 -05:00
|
|
|
}
|
|
|
|
|
2022-04-11 07:38:30 -05:00
|
|
|
fn fetch_build_data_error(&self) -> Result<(), String> {
|
|
|
|
let mut buf = String::new();
|
2021-07-18 03:29:22 -05:00
|
|
|
|
2021-07-18 05:13:03 -05:00
|
|
|
for ws in &self.fetch_build_data_queue.last_op_result().1 {
|
2021-08-25 10:56:39 -05:00
|
|
|
match ws {
|
2022-04-11 07:38:30 -05:00
|
|
|
Ok(data) => match data.error() {
|
|
|
|
Some(stderr) => stdx::format_to!(buf, "{:#}\n", stderr),
|
|
|
|
_ => (),
|
|
|
|
},
|
|
|
|
// io errors
|
|
|
|
Err(err) => stdx::format_to!(buf, "{:#}\n", err),
|
2021-04-06 10:08:05 -05:00
|
|
|
}
|
|
|
|
}
|
2021-07-18 03:29:22 -05:00
|
|
|
|
2022-04-11 07:38:30 -05:00
|
|
|
if buf.is_empty() {
|
|
|
|
Ok(())
|
2021-08-25 10:56:39 -05:00
|
|
|
} else {
|
2022-04-11 07:38:30 -05:00
|
|
|
Err(buf)
|
2021-07-18 03:29:22 -05:00
|
|
|
}
|
2021-04-06 06:16:35 -05:00
|
|
|
}
|
|
|
|
|
2020-06-25 16:44:58 -05:00
|
|
|
fn reload_flycheck(&mut self) {
|
2021-01-18 04:25:57 -06:00
|
|
|
let _p = profile::span("GlobalState::reload_flycheck");
|
2021-01-06 04:54:28 -06:00
|
|
|
let config = match self.config.flycheck() {
|
2020-06-25 16:44:58 -05:00
|
|
|
Some(it) => it,
|
|
|
|
None => {
|
2020-09-17 11:50:30 -05:00
|
|
|
self.flycheck = Vec::new();
|
2022-05-20 08:07:55 -05:00
|
|
|
self.diagnostics.clear_check();
|
2020-06-25 16:44:58 -05:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2020-07-15 07:37:44 -05:00
|
|
|
let sender = self.flycheck_sender.clone();
|
|
|
|
self.flycheck = self
|
|
|
|
.workspaces
|
|
|
|
.iter()
|
2020-09-17 11:50:30 -05:00
|
|
|
.enumerate()
|
|
|
|
.filter_map(|(id, w)| match w {
|
2021-01-18 05:52:12 -06:00
|
|
|
ProjectWorkspace::Cargo { cargo, .. } => Some((id, cargo.workspace_root())),
|
2020-09-15 20:51:57 -05:00
|
|
|
ProjectWorkspace::Json { project, .. } => {
|
|
|
|
// Enable flychecks for json projects if a custom flycheck command was supplied
|
|
|
|
// in the workspace configuration.
|
|
|
|
match config {
|
2020-09-17 11:50:30 -05:00
|
|
|
FlycheckConfig::CustomCommand { .. } => Some((id, project.path())),
|
2020-09-15 20:51:57 -05:00
|
|
|
_ => None,
|
|
|
|
}
|
|
|
|
}
|
2021-05-23 12:56:54 -05:00
|
|
|
ProjectWorkspace::DetachedFiles { .. } => None,
|
2020-07-15 07:37:44 -05:00
|
|
|
})
|
2020-09-17 11:50:30 -05:00
|
|
|
.map(|(id, root)| {
|
|
|
|
let sender = sender.clone();
|
|
|
|
FlycheckHandle::spawn(
|
|
|
|
id,
|
|
|
|
Box::new(move |msg| sender.send(msg).unwrap()),
|
|
|
|
config.clone(),
|
2021-10-16 06:32:55 -05:00
|
|
|
root.to_path_buf(),
|
2020-09-17 11:50:30 -05:00
|
|
|
)
|
|
|
|
})
|
|
|
|
.collect();
|
2020-06-25 16:44:58 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Default)]
|
|
|
|
pub(crate) struct ProjectFolders {
|
|
|
|
pub(crate) load: Vec<vfs::loader::Entry>,
|
|
|
|
pub(crate) watch: Vec<usize>,
|
|
|
|
pub(crate) source_root_config: SourceRootConfig,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl ProjectFolders {
|
2021-01-26 07:18:01 -06:00
|
|
|
pub(crate) fn new(
|
|
|
|
workspaces: &[ProjectWorkspace],
|
|
|
|
global_excludes: &[AbsPathBuf],
|
|
|
|
) -> ProjectFolders {
|
2020-06-25 16:44:58 -05:00
|
|
|
let mut res = ProjectFolders::default();
|
|
|
|
let mut fsc = FileSetConfig::builder();
|
|
|
|
let mut local_filesets = vec![];
|
|
|
|
|
2021-07-18 05:13:03 -05:00
|
|
|
for root in workspaces.iter().flat_map(|ws| ws.to_roots()) {
|
2020-07-21 05:52:51 -05:00
|
|
|
let file_set_roots: Vec<VfsPath> =
|
|
|
|
root.include.iter().cloned().map(VfsPath::from).collect();
|
2020-06-25 16:44:58 -05:00
|
|
|
|
2020-07-21 05:52:51 -05:00
|
|
|
let entry = {
|
|
|
|
let mut dirs = vfs::loader::Directories::default();
|
|
|
|
dirs.extensions.push("rs".into());
|
|
|
|
dirs.include.extend(root.include);
|
|
|
|
dirs.exclude.extend(root.exclude);
|
2021-01-26 07:18:01 -06:00
|
|
|
for excl in global_excludes {
|
2021-05-25 10:07:01 -05:00
|
|
|
if dirs
|
|
|
|
.include
|
|
|
|
.iter()
|
|
|
|
.any(|incl| incl.starts_with(excl) || excl.starts_with(incl))
|
|
|
|
{
|
2021-01-26 07:18:01 -06:00
|
|
|
dirs.exclude.push(excl.clone());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-07-21 05:52:51 -05:00
|
|
|
vfs::loader::Entry::Directories(dirs)
|
2020-06-25 16:44:58 -05:00
|
|
|
};
|
|
|
|
|
2021-09-07 10:29:58 -05:00
|
|
|
if root.is_local {
|
2020-07-21 05:52:51 -05:00
|
|
|
res.watch.push(res.load.len());
|
2020-06-25 16:44:58 -05:00
|
|
|
}
|
2020-07-21 05:52:51 -05:00
|
|
|
res.load.push(entry);
|
2020-06-25 16:44:58 -05:00
|
|
|
|
2021-09-07 10:29:58 -05:00
|
|
|
if root.is_local {
|
2020-06-25 16:44:58 -05:00
|
|
|
local_filesets.push(fsc.len());
|
|
|
|
}
|
|
|
|
fsc.add_file_set(file_set_roots)
|
|
|
|
}
|
|
|
|
|
|
|
|
let fsc = fsc.build();
|
|
|
|
res.source_root_config = SourceRootConfig { fsc, local_filesets };
|
|
|
|
|
|
|
|
res
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Default, Debug)]
|
|
|
|
pub(crate) struct SourceRootConfig {
|
|
|
|
pub(crate) fsc: FileSetConfig,
|
|
|
|
pub(crate) local_filesets: Vec<usize>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl SourceRootConfig {
|
|
|
|
pub(crate) fn partition(&self, vfs: &vfs::Vfs) -> Vec<SourceRoot> {
|
2020-08-12 09:32:36 -05:00
|
|
|
let _p = profile::span("SourceRootConfig::partition");
|
2020-06-25 16:44:58 -05:00
|
|
|
self.fsc
|
|
|
|
.partition(vfs)
|
|
|
|
.into_iter()
|
|
|
|
.enumerate()
|
|
|
|
.map(|(idx, file_set)| {
|
|
|
|
let is_local = self.local_filesets.contains(&idx);
|
|
|
|
if is_local {
|
|
|
|
SourceRoot::new_local(file_set)
|
|
|
|
} else {
|
|
|
|
SourceRoot::new_library(file_set)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.collect()
|
|
|
|
}
|
|
|
|
}
|
2021-08-22 06:05:12 -05:00
|
|
|
|
2022-01-05 12:35:48 -06:00
|
|
|
/// Load the proc-macros for the given lib path, replacing all expanders whose names are in `dummy_replace`
|
|
|
|
/// with an identity dummy expander.
|
2022-01-04 13:40:16 -06:00
|
|
|
pub(crate) fn load_proc_macro(
|
2022-06-15 10:33:55 -05:00
|
|
|
server: Option<&ProcMacroServer>,
|
2022-01-04 13:40:16 -06:00
|
|
|
path: &AbsPath,
|
|
|
|
dummy_replace: &[Box<str>],
|
2022-06-15 10:33:55 -05:00
|
|
|
) -> ProcMacroLoadResult {
|
2022-06-28 03:41:10 -05:00
|
|
|
let res: Result<Vec<_>, String> = (|| {
|
2022-06-15 10:33:55 -05:00
|
|
|
let dylib = MacroDylib::new(path.to_path_buf())
|
|
|
|
.map_err(|io| format!("Proc-macro dylib loading failed: {io}"))?;
|
2022-06-28 03:41:10 -05:00
|
|
|
let server = server.ok_or_else(|| format!("Proc-macro server not started"))?;
|
|
|
|
let vec = server.load_dylib(dylib).map_err(|e| format!("{e}"))?;
|
|
|
|
if vec.is_empty() {
|
|
|
|
return Err("proc macro library returned no proc macros".to_string());
|
|
|
|
}
|
|
|
|
Ok(vec
|
|
|
|
.into_iter()
|
|
|
|
.map(|expander| expander_to_proc_macro(expander, dummy_replace))
|
|
|
|
.collect())
|
2022-06-15 10:33:55 -05:00
|
|
|
})();
|
|
|
|
return match res {
|
|
|
|
Ok(proc_macros) => {
|
|
|
|
tracing::info!(
|
|
|
|
"Loaded proc-macros for {}: {:?}",
|
|
|
|
path.display(),
|
|
|
|
proc_macros.iter().map(|it| it.name.clone()).collect::<Vec<_>>()
|
|
|
|
);
|
|
|
|
Ok(proc_macros)
|
|
|
|
}
|
|
|
|
Err(e) => {
|
|
|
|
tracing::warn!("proc-macro loading for {} failed: {e}", path.display());
|
|
|
|
Err(e)
|
internal: cleanup proc macro server error handlig
When dealing with proc macros, there are two very different kinds of
errors:
* first, usual errors of "proc macro panicked on this particular input"
* second, the proc macro server might day if the user, eg, kills it
First kind of errors are expected and are a normal output, while the
second kind are genuine IO-errors.
For this reason, we use a curious nested result here: `Result<Result<T,
E1>, E2>` pattern, which is 100% inspired by http://sled.rs/errors.html
2021-08-31 11:01:39 -05:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2022-01-04 13:40:16 -06:00
|
|
|
fn expander_to_proc_macro(
|
|
|
|
expander: proc_macro_api::ProcMacro,
|
|
|
|
dummy_replace: &[Box<str>],
|
|
|
|
) -> ProcMacro {
|
|
|
|
let name = SmolStr::from(expander.name());
|
2021-08-22 06:05:12 -05:00
|
|
|
let kind = match expander.kind() {
|
|
|
|
proc_macro_api::ProcMacroKind::CustomDerive => ProcMacroKind::CustomDerive,
|
|
|
|
proc_macro_api::ProcMacroKind::FuncLike => ProcMacroKind::FuncLike,
|
|
|
|
proc_macro_api::ProcMacroKind::Attr => ProcMacroKind::Attr,
|
|
|
|
};
|
2022-01-04 13:40:16 -06:00
|
|
|
let expander: Arc<dyn ProcMacroExpander> =
|
|
|
|
if dummy_replace.iter().any(|replace| &**replace == name) {
|
|
|
|
Arc::new(DummyExpander)
|
|
|
|
} else {
|
|
|
|
Arc::new(Expander(expander))
|
|
|
|
};
|
2021-08-22 06:05:12 -05:00
|
|
|
ProcMacro { name, kind, expander }
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug)]
|
2021-08-31 07:44:43 -05:00
|
|
|
struct Expander(proc_macro_api::ProcMacro);
|
2021-08-22 06:05:12 -05:00
|
|
|
|
|
|
|
impl ProcMacroExpander for Expander {
|
|
|
|
fn expand(
|
|
|
|
&self,
|
|
|
|
subtree: &tt::Subtree,
|
|
|
|
attrs: Option<&tt::Subtree>,
|
|
|
|
env: &Env,
|
2021-08-31 11:14:33 -05:00
|
|
|
) -> Result<tt::Subtree, ProcMacroExpansionError> {
|
2021-08-22 06:05:12 -05:00
|
|
|
let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect();
|
internal: cleanup proc macro server error handlig
When dealing with proc macros, there are two very different kinds of
errors:
* first, usual errors of "proc macro panicked on this particular input"
* second, the proc macro server might day if the user, eg, kills it
First kind of errors are expected and are a normal output, while the
second kind are genuine IO-errors.
For this reason, we use a curious nested result here: `Result<Result<T,
E1>, E2>` pattern, which is 100% inspired by http://sled.rs/errors.html
2021-08-31 11:01:39 -05:00
|
|
|
match self.0.expand(subtree, attrs, env) {
|
|
|
|
Ok(Ok(subtree)) => Ok(subtree),
|
2021-08-31 11:14:33 -05:00
|
|
|
Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)),
|
|
|
|
Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
|
internal: cleanup proc macro server error handlig
When dealing with proc macros, there are two very different kinds of
errors:
* first, usual errors of "proc macro panicked on this particular input"
* second, the proc macro server might day if the user, eg, kills it
First kind of errors are expected and are a normal output, while the
second kind are genuine IO-errors.
For this reason, we use a curious nested result here: `Result<Result<T,
E1>, E2>` pattern, which is 100% inspired by http://sled.rs/errors.html
2021-08-31 11:01:39 -05:00
|
|
|
}
|
2021-08-22 06:05:12 -05:00
|
|
|
}
|
|
|
|
}
|
2022-01-04 13:40:16 -06:00
|
|
|
|
2022-01-05 12:35:48 -06:00
|
|
|
/// Dummy identity expander, used for proc-macros that are deliberately ignored by the user.
|
2022-01-04 13:40:16 -06:00
|
|
|
#[derive(Debug)]
|
|
|
|
struct DummyExpander;
|
|
|
|
|
|
|
|
impl ProcMacroExpander for DummyExpander {
|
|
|
|
fn expand(
|
|
|
|
&self,
|
|
|
|
subtree: &tt::Subtree,
|
|
|
|
_: Option<&tt::Subtree>,
|
|
|
|
_: &Env,
|
|
|
|
) -> Result<tt::Subtree, ProcMacroExpansionError> {
|
|
|
|
Ok(subtree.clone())
|
|
|
|
}
|
|
|
|
}
|
2021-08-22 06:05:12 -05:00
|
|
|
}
|
2021-09-13 12:58:09 -05:00
|
|
|
|
|
|
|
pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool {
|
|
|
|
const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"];
|
|
|
|
const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"];
|
|
|
|
let file_name = path.file_name().unwrap_or_default();
|
|
|
|
|
|
|
|
if file_name == "Cargo.toml" || file_name == "Cargo.lock" {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
if change_kind == ChangeKind::Modify {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
if path.extension().unwrap_or_default() != "rs" {
|
2022-04-26 17:02:45 -05:00
|
|
|
if (file_name == "config.toml" || file_name == "config")
|
|
|
|
&& path.parent().map(|parent| parent.as_ref().ends_with(".cargo")) == Some(true)
|
|
|
|
{
|
|
|
|
return true;
|
|
|
|
}
|
2021-09-13 12:58:09 -05:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
if IMPLICIT_TARGET_FILES.iter().any(|it| path.as_ref().ends_with(it)) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
let parent = match path.parent() {
|
|
|
|
Some(it) => it,
|
|
|
|
None => return false,
|
|
|
|
};
|
|
|
|
if IMPLICIT_TARGET_DIRS.iter().any(|it| parent.as_ref().ends_with(it)) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
if file_name == "main.rs" {
|
|
|
|
let grand_parent = match parent.parent() {
|
|
|
|
Some(it) => it,
|
|
|
|
None => return false,
|
|
|
|
};
|
|
|
|
if IMPLICIT_TARGET_DIRS.iter().any(|it| grand_parent.as_ref().ends_with(it)) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
false
|
|
|
|
}
|