Auto merge of #14603 - Veykril:workspaces, r=Veykril

fix: Deduplicate passed workspaces by top level cargo workspace they belong to

Fixes https://github.com/rust-lang/rust-analyzer/issues/14571

With this we should be supporting vscode workspaces properly
This commit is contained in:
bors 2023-04-18 12:28:00 +00:00
commit e84781ad58
5 changed files with 56 additions and 47 deletions

View File

@ -27,7 +27,7 @@
};
use rustc_hash::{FxHashMap, FxHashSet};
use serde::{de::DeserializeOwned, Deserialize};
use vfs::AbsPathBuf;
use vfs::{AbsPath, AbsPathBuf};
use crate::{
caps::completion_item_edit_resolve,
@ -535,8 +535,9 @@ fn default() -> Self {
#[derive(Debug, Clone)]
pub struct Config {
pub discovered_projects: Option<Vec<ProjectManifest>>,
pub workspace_roots: Vec<AbsPathBuf>,
discovered_projects: Vec<ProjectManifest>,
/// The workspace roots as registered by the LSP client
workspace_roots: Vec<AbsPathBuf>,
caps: lsp_types::ClientCapabilities,
root_path: AbsPathBuf,
data: ConfigData,
@ -742,7 +743,7 @@ pub fn new(
caps,
data: ConfigData::default(),
detached_files: Vec::new(),
discovered_projects: None,
discovered_projects: Vec::new(),
root_path,
snippets: Default::default(),
workspace_roots,
@ -755,7 +756,17 @@ pub fn rediscover_workspaces(&mut self) {
if discovered.is_empty() {
tracing::error!("failed to find any projects in {:?}", &self.workspace_roots);
}
self.discovered_projects = Some(discovered);
self.discovered_projects = discovered;
}
pub fn remove_workspace(&mut self, path: &AbsPath) {
if let Some(position) = self.workspace_roots.iter().position(|it| it == path) {
self.workspace_roots.remove(position);
}
}
pub fn add_workspaces(&mut self, paths: impl Iterator<Item = AbsPathBuf>) {
self.workspace_roots.extend(paths);
}
pub fn update(&mut self, mut json: serde_json::Value) -> Result<(), ConfigUpdateError> {
@ -860,25 +871,19 @@ pub fn has_linked_projects(&self) -> bool {
pub fn linked_projects(&self) -> Vec<LinkedProject> {
match self.data.linkedProjects.as_slice() {
[] => {
match self.discovered_projects.as_ref() {
Some(discovered_projects) => {
let exclude_dirs: Vec<_> = self
.data
.files_excludeDirs
.iter()
.map(|p| self.root_path.join(p))
.collect();
discovered_projects
.iter()
.filter(|(ProjectManifest::ProjectJson(path) | ProjectManifest::CargoToml(path))| {
let exclude_dirs: Vec<_> =
self.data.files_excludeDirs.iter().map(|p| self.root_path.join(p)).collect();
self.discovered_projects
.iter()
.filter(
|(ProjectManifest::ProjectJson(path)
| ProjectManifest::CargoToml(path))| {
!exclude_dirs.iter().any(|p| path.starts_with(p))
})
.cloned()
.map(LinkedProject::from)
.collect()
}
None => Vec::new(),
}
},
)
.cloned()
.map(LinkedProject::from)
.collect()
}
linked_projects => linked_projects
.iter()

View File

@ -777,14 +777,7 @@ pub(crate) fn handle_runnables(
}
}
None => {
if !snap.config.linked_projects().is_empty()
|| !snap
.config
.discovered_projects
.as_ref()
.map(|projects| projects.is_empty())
.unwrap_or(true)
{
if !snap.config.linked_projects().is_empty() {
res.push(lsp_ext::Runnable {
label: "cargo check --workspace".to_string(),
location: None,

View File

@ -908,8 +908,10 @@ fn run_flycheck(this: &mut GlobalState, vfs_path: VfsPath) -> bool {
// Re-fetch workspaces if a workspace related file has changed
if let Some(abs_path) = vfs_path.as_path() {
if reload::should_refresh_for_change(abs_path, ChangeKind::Modify) {
this.fetch_workspaces_queue
.request_op(format!("DidSaveTextDocument {}", abs_path.display()), ());
this.fetch_workspaces_queue.request_op(
format!("DidSaveTextDocument {}", abs_path.display()),
(),
);
}
}
@ -972,8 +974,7 @@ fn run_flycheck(this: &mut GlobalState, vfs_path: VfsPath) -> bool {
for workspace in params.event.removed {
let Ok(path) = workspace.uri.to_file_path() else { continue };
let Ok(path) = AbsPathBuf::try_from(path) else { continue };
let Some(position) = config.workspace_roots.iter().position(|it| it == &path) else { continue };
config.workspace_roots.remove(position);
config.remove_workspace(&path);
}
let added = params
@ -982,11 +983,12 @@ fn run_flycheck(this: &mut GlobalState, vfs_path: VfsPath) -> bool {
.into_iter()
.filter_map(|it| it.uri.to_file_path().ok())
.filter_map(|it| AbsPathBuf::try_from(it).ok());
config.workspace_roots.extend(added);
if !config.has_linked_projects() && config.detached_files().is_empty() {
config.rediscover_workspaces();
this.fetch_workspaces_queue.request_op("client workspaces changed".to_string(), ())
}
config.add_workspaces(added);
if !config.has_linked_projects() && config.detached_files().is_empty() {
config.rediscover_workspaces();
this.fetch_workspaces_queue
.request_op("client workspaces changed".to_string(), ())
}
Ok(())
})?

View File

@ -212,6 +212,20 @@ pub(crate) fn fetch_workspaces(&mut self, cause: Cause) {
})
.collect::<Vec<_>>();
let mut i = 0;
while i < workspaces.len() {
if let Ok(w) = &workspaces[i] {
if let Some(dupe) = workspaces[i + 1..]
.iter()
.filter_map(|it| it.as_ref().ok())
.position(|ws| ws.eq_ignore_build_data(w))
{
_ = workspaces.remove(dupe);
}
}
i += 1;
}
if !detached_files.is_empty() {
workspaces.push(project_model::ProjectWorkspace::load_detached_files(
detached_files,

View File

@ -9,7 +9,6 @@
use crossbeam_channel::{after, select, Receiver};
use lsp_server::{Connection, Message, Notification, Request};
use lsp_types::{notification::Exit, request::Shutdown, TextDocumentIdentifier, Url};
use project_model::ProjectManifest;
use rust_analyzer::{config::Config, lsp_ext, main_loop};
use serde::Serialize;
use serde_json::{json, to_string_pretty, Value};
@ -101,10 +100,6 @@ pub(crate) fn server(self) -> Server {
if roots.is_empty() {
roots.push(tmp_dir_path.clone());
}
let discovered_projects = roots
.into_iter()
.map(|it| ProjectManifest::discover_single(&it).unwrap())
.collect::<Vec<_>>();
let mut config = Config::new(
tmp_dir_path,
@ -144,10 +139,10 @@ pub(crate) fn server(self) -> Server {
})),
..Default::default()
},
Vec::new(),
roots,
);
config.discovered_projects = Some(discovered_projects);
config.update(self.config).expect("invalid config");
config.rediscover_workspaces();
Server::new(tmp_dir, config)
}