Auto merge of #14427 - davidbarsky:davidbarsky/allow-subsequent-workspaces-to-have-proc-macros, r=Veykril
fix: allow new, subsequent `rust-project.json`-based workspaces to get proc macro expansion As detailed in https://github.com/rust-lang/rust-analyzer/issues/14417#issuecomment-1485336174, `rust-project.json` workspaces added after the initial `rust-project.json`-based workspace was already indexed by rust-analyzer would not receive procedural macro expansion despite `config.expand_proc_macros` returning true. To fix this issue: 1. I changed `reload.rs` to check which workspaces are newly added. 2. Spawned new procedural macro expansion servers based on the _new_ workspaces. 1. This is to prevent spawning duplicate procedural macro expansion servers for already existing workspaces. While the overall memory usage of duplicate procedural macro servers is minimal, this is more about the _principle_ of not leaking processes 😅. 3. Launched procedural macro expansion if any workspaces are `rust-project.json`-based _or_ `same_workspaces` is true. `same_workspaces` being true (and reachable) indicates that that build scripts have finished building (in Cargo-based projects), while the build scripts in `rust-project.json`-based projects have _already been built_ by the build system that produced the `rust-project.json`. I couldn't really think of structuring this code in a better way without engaging with https://github.com/rust-lang/rust-analyzer/issues/7444.
This commit is contained in:
commit
b915eb32fa
@ -17,7 +17,7 @@
|
||||
|
||||
// Map from crate id to the name of the crate and path of the proc-macro. If the value is `None`,
|
||||
// then the crate for the proc-macro hasn't been build yet as the build data is missing.
|
||||
pub type ProcMacroPaths = FxHashMap<CrateId, Option<(Option<String>, AbsPathBuf)>>;
|
||||
pub type ProcMacroPaths = FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf), String>>;
|
||||
pub type ProcMacros = FxHashMap<CrateId, ProcMacroLoadResult>;
|
||||
|
||||
/// Files are grouped into source roots. A source root is a directory on the
|
||||
|
@ -667,6 +667,14 @@ pub fn eq_ignore_build_data(&self, other: &Self) -> bool {
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the project workspace is [`Json`].
|
||||
///
|
||||
/// [`Json`]: ProjectWorkspace::Json
|
||||
#[must_use]
|
||||
pub fn is_json(&self) -> bool {
|
||||
matches!(self, Self::Json { .. })
|
||||
}
|
||||
}
|
||||
|
||||
fn project_json_to_crate_graph(
|
||||
@ -678,7 +686,7 @@ fn project_json_to_crate_graph(
|
||||
target_layout: TargetLayoutLoadResult,
|
||||
) -> (CrateGraph, ProcMacroPaths) {
|
||||
let mut crate_graph = CrateGraph::default();
|
||||
let mut proc_macros = FxHashMap::default();
|
||||
let mut proc_macros: ProcMacroPaths = FxHashMap::default();
|
||||
let sysroot_deps = sysroot.as_ref().map(|sysroot| {
|
||||
sysroot_to_crate_graph(
|
||||
&mut crate_graph,
|
||||
@ -730,13 +738,11 @@ fn project_json_to_crate_graph(
|
||||
);
|
||||
if krate.is_proc_macro {
|
||||
if let Some(path) = krate.proc_macro_dylib_path.clone() {
|
||||
proc_macros.insert(
|
||||
crate_graph_crate_id,
|
||||
Some((
|
||||
let node = Ok((
|
||||
krate.display_name.as_ref().map(|it| it.canonical_name().to_owned()),
|
||||
path,
|
||||
)),
|
||||
);
|
||||
));
|
||||
proc_macros.insert(crate_graph_crate_id, node);
|
||||
}
|
||||
}
|
||||
(crate_id, crate_graph_crate_id)
|
||||
@ -1180,8 +1186,8 @@ fn add_target_crate_root(
|
||||
);
|
||||
if is_proc_macro {
|
||||
let proc_macro = match build_data.as_ref().map(|it| it.proc_macro_dylib_path.as_ref()) {
|
||||
Some(it) => it.cloned().map(|path| Some((Some(cargo_name.to_owned()), path))),
|
||||
None => Some(None),
|
||||
Some(it) => it.cloned().map(|path| Ok((Some(cargo_name.to_owned()), path))),
|
||||
None => Some(Err("crate has not yet been build".to_owned())),
|
||||
};
|
||||
if let Some(proc_macro) = proc_macro {
|
||||
proc_macros.insert(crate_id, proc_macro);
|
||||
|
@ -102,7 +102,7 @@ pub fn load_workspace(
|
||||
(
|
||||
crate_id,
|
||||
path.map_or_else(
|
||||
|| Err("proc macro crate is missing dylib".to_owned()),
|
||||
|_| Err("proc macro crate is missing dylib".to_owned()),
|
||||
|(_, path)| load_proc_macro(proc_macro_server, &path, &[]),
|
||||
),
|
||||
)
|
||||
|
@ -251,7 +251,7 @@ pub(crate) fn fetch_proc_macros(&mut self, cause: Cause, paths: Vec<ProcMacroPat
|
||||
(
|
||||
crate_id,
|
||||
res.map_or_else(
|
||||
|| Err("proc macro crate is missing dylib".to_owned()),
|
||||
|_| Err("proc macro crate is missing dylib".to_owned()),
|
||||
|(crate_name, path)| {
|
||||
progress(path.display().to_string());
|
||||
load_proc_macro(
|
||||
@ -370,7 +370,7 @@ pub(crate) fn switch_workspaces(&mut self, cause: Cause) {
|
||||
let files_config = self.config.files();
|
||||
let project_folders = ProjectFolders::new(&self.workspaces, &files_config.exclude);
|
||||
|
||||
if self.proc_macro_clients.is_empty() {
|
||||
if self.proc_macro_clients.is_empty() || !same_workspaces {
|
||||
if let Some((path, path_manually_set)) = self.config.proc_macro_srv() {
|
||||
tracing::info!("Spawning proc-macro servers");
|
||||
self.proc_macro_clients = self
|
||||
@ -448,20 +448,9 @@ pub(crate) fn switch_workspaces(&mut self, cause: Cause) {
|
||||
};
|
||||
let mut change = Change::new();
|
||||
|
||||
if same_workspaces {
|
||||
if self.config.expand_proc_macros() {
|
||||
self.fetch_proc_macros_queue.request_op(cause, proc_macro_paths);
|
||||
}
|
||||
} else {
|
||||
// Set up errors for proc-macros upfront that we haven't run build scripts yet
|
||||
let mut proc_macros = FxHashMap::default();
|
||||
for paths in proc_macro_paths {
|
||||
proc_macros.extend(paths.into_iter().map(move |(crate_id, _)| {
|
||||
(crate_id, Err("crate has not yet been build".to_owned()))
|
||||
}));
|
||||
}
|
||||
change.set_proc_macros(proc_macros);
|
||||
}
|
||||
change.set_crate_graph(crate_graph);
|
||||
self.analysis_host.apply_change(change);
|
||||
self.process_changes();
|
||||
|
Loading…
Reference in New Issue
Block a user