diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index 3a3effae036..f0a4b9bf855 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -295,7 +295,7 @@ pub struct CrateData { impl CrateData { /// Check if [`other`] is almost equal to [`self`] ignoring `CrateOrigin` value. - fn eq_ignoring_origin_and_deps(&self, other: &CrateData, ignore_dev_deps: bool) -> bool { + pub fn eq_ignoring_origin_and_deps(&self, other: &CrateData, ignore_dev_deps: bool) -> bool { // This method has some obscure bits. These are mostly there to be compliant with // some patches. References to the patches are given. if self.root_file_id != other.root_file_id { @@ -622,8 +622,9 @@ pub fn extend( &mut self, mut other: CrateGraph, proc_macros: &mut ProcMacroPaths, - may_merge: impl Fn((CrateId, &CrateData), (CrateId, &CrateData)) -> bool, + merge: impl Fn((CrateId, &mut CrateData), (CrateId, &CrateData)) -> bool, ) -> FxHashMap { + let m = self.len(); let topo = other.crates_in_topological_order(); let mut id_map: FxHashMap = FxHashMap::default(); for topo in topo { @@ -631,48 +632,14 @@ pub fn extend( crate_data.dependencies.iter_mut().for_each(|dep| dep.crate_id = id_map[&dep.crate_id]); crate_data.dependencies.sort_by_key(|dep| dep.crate_id); - let res = self.arena.iter().find_map(|(id, data)| { - if !may_merge((id, &data), (topo, &crate_data)) { - return None; - } + let res = self + .arena + .iter_mut() + .take(m) + .find_map(|(id, data)| merge((id, data), (topo, &crate_data)).then_some(id)); - match (&data.origin, &crate_data.origin) { - (a, b) if a == b => { - if data.eq_ignoring_origin_and_deps(crate_data, false) { - return Some((id, false)); - } - } - (a @ CrateOrigin::Local { .. }, CrateOrigin::Library { .. }) - | (a @ CrateOrigin::Library { .. }, CrateOrigin::Local { .. }) => { - // If the origins differ, check if the two crates are equal without - // considering the dev dependencies, if they are, they most likely are in - // different loaded workspaces which may cause issues. We keep the local - // version and discard the library one as the local version may have - // dev-dependencies that we want to keep resolving. See #15656 for more - // information. - if data.eq_ignoring_origin_and_deps(crate_data, true) { - return Some((id, !a.is_local())); - } - } - (_, _) => return None, - } - - None - }); - - let new_id = if let Some((res, should_update_lib_to_local)) = res { - if should_update_lib_to_local { - assert!(self.arena[res].origin.is_lib()); - assert!(crate_data.origin.is_local()); - self.arena[res].origin = crate_data.origin.clone(); - - // Move local's dev dependencies into the newly-local-formerly-lib crate. - self.arena[res].dependencies = crate_data.dependencies.clone(); - } - res - } else { - self.arena.alloc(crate_data.clone()) - }; + let new_id = + if let Some(res) = res { res } else { self.arena.alloc(crate_data.clone()) }; id_map.insert(topo, new_id); } diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs index 017b055d729..b9b1b701f6d 100644 --- a/crates/project-model/src/tests.rs +++ b/crates/project-model/src/tests.rs @@ -238,7 +238,7 @@ fn crate_graph_dedup_identical() { let (d_crate_graph, mut d_proc_macros) = (crate_graph.clone(), proc_macros.clone()); - crate_graph.extend(d_crate_graph.clone(), &mut d_proc_macros, |_, _| true); + crate_graph.extend(d_crate_graph.clone(), &mut d_proc_macros, |(_, a), (_, b)| a == b); assert!(crate_graph.iter().eq(d_crate_graph.iter())); assert_eq!(proc_macros, d_proc_macros); } @@ -254,62 +254,10 @@ fn crate_graph_dedup() { load_cargo_with_fake_sysroot(path_map, "regex-metadata.json"); assert_eq!(regex_crate_graph.iter().count(), 60); - crate_graph.extend(regex_crate_graph, &mut regex_proc_macros, |_, _| true); + crate_graph.extend(regex_crate_graph, &mut regex_proc_macros, |(_, a), (_, b)| a == b); assert_eq!(crate_graph.iter().count(), 118); } -#[test] -fn test_deduplicate_origin_dev() { - let path_map = &mut Default::default(); - let (mut crate_graph, _proc_macros) = - load_cargo_with_fake_sysroot(path_map, "deduplication_crate_graph_A.json"); - crate_graph.sort_deps(); - let (crate_graph_1, mut _proc_macros_2) = - load_cargo_with_fake_sysroot(path_map, "deduplication_crate_graph_B.json"); - - crate_graph.extend(crate_graph_1, &mut _proc_macros_2, |_, _| true); - - let mut crates_named_p2 = vec![]; - for id in crate_graph.iter() { - let krate = &crate_graph[id]; - if let Some(name) = krate.display_name.as_ref() { - if name.to_string() == "p2" { - crates_named_p2.push(krate); - } - } - } - - assert!(crates_named_p2.len() == 1); - let p2 = crates_named_p2[0]; - assert!(p2.origin.is_local()); -} - -#[test] -fn test_deduplicate_origin_dev_rev() { - let path_map = &mut Default::default(); - let (mut crate_graph, _proc_macros) = - load_cargo_with_fake_sysroot(path_map, "deduplication_crate_graph_B.json"); - crate_graph.sort_deps(); - let (crate_graph_1, mut _proc_macros_2) = - load_cargo_with_fake_sysroot(path_map, "deduplication_crate_graph_A.json"); - - crate_graph.extend(crate_graph_1, &mut _proc_macros_2, |_, _| true); - - let mut crates_named_p2 = vec![]; - for id in crate_graph.iter() { - let krate = &crate_graph[id]; - if let Some(name) = krate.display_name.as_ref() { - if name.to_string() == "p2" { - crates_named_p2.push(krate); - } - } - } - - assert!(crates_named_p2.len() == 1); - let p2 = crates_named_p2[0]; - assert!(p2.origin.is_local()); -} - #[test] fn smoke_test_real_sysroot_cargo() { if std::env::var("SYSROOT_CARGO_METADATA").is_err() { diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index 9d3d0168806..fcebca80b3e 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -1411,7 +1411,7 @@ fn sysroot_to_crate_graph( // Remove all crates except the ones we are interested in to keep the sysroot graph small. let removed_mapping = cg.remove_crates_except(&marker_set); - let mapping = crate_graph.extend(cg, &mut pm, |_, _| true); + let mapping = crate_graph.extend(cg, &mut pm, |(_, a), (_, b)| a == b); // Map the id through the removal mapping first, then through the crate graph extension mapping. pub_deps.iter_mut().for_each(|(_, cid, _)| { diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs index b1809f58ae7..473ca991ad9 100644 --- a/crates/rust-analyzer/src/lib.rs +++ b/crates/rust-analyzer/src/lib.rs @@ -47,7 +47,9 @@ pub mod tracing { use serde::de::DeserializeOwned; -pub use crate::{caps::server_capabilities, main_loop::main_loop, version::version}; +pub use crate::{ + caps::server_capabilities, main_loop::main_loop, reload::ws_to_crate_graph, version::version, +}; pub fn from_json( what: &'static str, diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index cfdb5c1861c..8a9ee177728 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -17,8 +17,9 @@ use flycheck::{FlycheckConfig, FlycheckHandle}; use hir::{db::DefDatabase, Change, ProcMacros}; +use ide::CrateId; use ide_db::{ - base_db::{salsa::Durability, CrateGraph, ProcMacroPaths}, + base_db::{salsa::Durability, CrateGraph, CrateOrigin, ProcMacroPaths, Version}, FxHashMap, }; use itertools::Itertools; @@ -28,7 +29,7 @@ use rustc_hash::FxHashSet; use stdx::{format_to, thread::ThreadIntent}; use triomphe::Arc; -use vfs::{AbsPath, ChangeKind}; +use vfs::{AbsPath, AbsPathBuf, ChangeKind}; use crate::{ config::{Config, FilesWatcher, LinkedProject}, @@ -532,7 +533,7 @@ fn recreate_crate_graph(&mut self, cause: String) { // deleted or created we trigger a reconstruction of the crate graph let mut crate_graph_file_dependencies = FxHashSet::default(); - let mut load = |path: &AbsPath| { + let load = |path: &AbsPath| { let _p = tracing::span!(tracing::Level::DEBUG, "switch_workspaces::load").entered(); let vfs_path = vfs::VfsPath::from(path.to_path_buf()); crate_graph_file_dependencies.insert(vfs_path.clone()); @@ -547,56 +548,8 @@ fn recreate_crate_graph(&mut self, cause: String) { } }; - let mut crate_graph = CrateGraph::default(); - let mut proc_macro_paths = Vec::default(); - let mut layouts = Vec::default(); - let mut toolchains = Vec::default(); - let e = Err(Arc::from("missing layout")); - for ws in &**self.workspaces { - let (other, mut crate_proc_macros) = - ws.to_crate_graph(&mut load, self.config.extra_env()); - let num_layouts = layouts.len(); - let num_toolchains = toolchains.len(); - let (toolchain, layout) = match ws { - ProjectWorkspace::Cargo { toolchain, target_layout, .. } - | ProjectWorkspace::Json { toolchain, target_layout, .. } => { - (toolchain.clone(), target_layout.clone()) - } - ProjectWorkspace::DetachedFiles { .. } => { - (None, Err("detached files have no layout".into())) - } - }; - - let mapping = crate_graph.extend( - other, - &mut crate_proc_macros, - |(cg_id, _cg_data), (_o_id, _o_data)| { - // if the newly created crate graph's layout is equal to the crate of the merged graph, then - // we can merge the crates. - layouts[cg_id.into_raw().into_u32() as usize] == layout - && toolchains[cg_id.into_raw().into_u32() as usize] == toolchain - }, - ); - // Populate the side tables for the newly merged crates - mapping.values().for_each(|val| { - let idx = val.into_raw().into_u32() as usize; - // we only need to consider crates that were not merged and remapped, as the - // ones that were remapped already have the correct layout and toolchain - if idx >= num_layouts { - if layouts.len() <= idx { - layouts.resize(idx + 1, e.clone()); - } - layouts[idx] = layout.clone(); - } - if idx >= num_toolchains { - if toolchains.len() <= idx { - toolchains.resize(idx + 1, None); - } - toolchains[idx] = toolchain.clone(); - } - }); - proc_macro_paths.push(crate_proc_macros); - } + let (crate_graph, proc_macro_paths, layouts, toolchains) = + ws_to_crate_graph(&self.workspaces, self.config.extra_env(), load); let mut change = Change::new(); if self.config.expand_proc_macros() { @@ -609,6 +562,8 @@ fn recreate_crate_graph(&mut self, cause: String) { self.fetch_proc_macros_queue.request_op(cause, proc_macro_paths); } change.set_crate_graph(crate_graph); + change.set_target_data_layouts(layouts); + change.set_toolchains(toolchains); self.analysis_host.apply_change(change); self.crate_graph_file_dependencies = crate_graph_file_dependencies; } @@ -719,6 +674,97 @@ fn reload_flycheck(&mut self) { } } +// FIXME: Move this into load-cargo? +pub fn ws_to_crate_graph( + workspaces: &[ProjectWorkspace], + extra_env: &FxHashMap, + mut load: impl FnMut(&AbsPath) -> Option, +) -> ( + CrateGraph, + Vec, AbsPathBuf), String>>>, + Vec, Arc>>, + Vec>, +) { + let mut crate_graph = CrateGraph::default(); + let mut proc_macro_paths = Vec::default(); + let mut layouts = Vec::default(); + let mut toolchains = Vec::default(); + let e = Err(Arc::from("missing layout")); + for ws in workspaces { + let (other, mut crate_proc_macros) = ws.to_crate_graph(&mut load, extra_env); + let num_layouts = layouts.len(); + let num_toolchains = toolchains.len(); + let (toolchain, layout) = match ws { + ProjectWorkspace::Cargo { toolchain, target_layout, .. } + | ProjectWorkspace::Json { toolchain, target_layout, .. } => { + (toolchain.clone(), target_layout.clone()) + } + ProjectWorkspace::DetachedFiles { .. } => { + (None, Err("detached files have no layout".into())) + } + }; + + let mapping = crate_graph.extend( + other, + &mut crate_proc_macros, + |(cg_id, _cg_data), (_o_id, _o_data)| { + // if the newly created crate graph's layout is equal to the crate of the merged graph, then + // we can merge the crates. + let id = cg_id.into_raw().into_u32() as usize; + if layouts[id] == layout && toolchains[id] == toolchain { + let (res, update) = match (&_cg_data.origin, &_o_data.origin) { + (a, b) + if a == b && _cg_data.eq_ignoring_origin_and_deps(_o_data, false) => + { + (true, false) + } + (a @ CrateOrigin::Local { .. }, CrateOrigin::Library { .. }) + | (a @ CrateOrigin::Library { .. }, CrateOrigin::Local { .. }) + if _cg_data.eq_ignoring_origin_and_deps(_o_data, true) => + { + // If the origins differ, check if the two crates are equal without + // considering the dev dependencies, if they are, they most likely are in + // different loaded workspaces which may cause issues. We keep the local + // version and discard the library one as the local version may have + // dev-dependencies that we want to keep resolving. See #15656 for more + // information. + (true, !a.is_local()) + } + (_, _) => (false, false), + }; + if res && update { + _cg_data.origin = _o_data.origin.clone(); + _cg_data.dependencies = _o_data.dependencies.clone(); + } + res + } else { + false + } + }, + ); + // Populate the side tables for the newly merged crates + mapping.values().for_each(|val| { + let idx = val.into_raw().into_u32() as usize; + // we only need to consider crates that were not merged and remapped, as the + // ones that were remapped already have the correct layout and toolchain + if idx >= num_layouts { + if layouts.len() <= idx { + layouts.resize(idx + 1, e.clone()); + } + layouts[idx] = layout.clone(); + } + if idx >= num_toolchains { + if toolchains.len() <= idx { + toolchains.resize(idx + 1, None); + } + toolchains[idx] = toolchain.clone(); + } + }); + proc_macro_paths.push(crate_proc_macros); + } + (crate_graph, proc_macro_paths, layouts, toolchains) +} + pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool { const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"]; const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"]; diff --git a/crates/rust-analyzer/tests/crate_graph.rs b/crates/rust-analyzer/tests/crate_graph.rs new file mode 100644 index 00000000000..efd42fadf7e --- /dev/null +++ b/crates/rust-analyzer/tests/crate_graph.rs @@ -0,0 +1,118 @@ +use std::path::PathBuf; + +use project_model::{CargoWorkspace, ProjectWorkspace, Sysroot, WorkspaceBuildScripts}; +use rust_analyzer::ws_to_crate_graph; +use rustc_hash::FxHashMap; +use serde::de::DeserializeOwned; +use vfs::{AbsPathBuf, FileId}; + +fn load_cargo_with_fake_sysroot(file: &str) -> ProjectWorkspace { + let meta = get_test_json_file(file); + let cargo_workspace = CargoWorkspace::new(meta); + ProjectWorkspace::Cargo { + cargo: cargo_workspace, + build_scripts: WorkspaceBuildScripts::default(), + sysroot: Ok(get_fake_sysroot()), + rustc: Err(None), + rustc_cfg: Vec::new(), + cfg_overrides: Default::default(), + toolchain: None, + target_layout: Err("target_data_layout not loaded".into()), + cargo_config_extra_env: Default::default(), + } +} + +fn get_test_json_file(file: &str) -> T { + let base = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + let file = base.join("tests/test_data").join(file); + let data = std::fs::read_to_string(file).unwrap(); + let mut json = data.parse::().unwrap(); + fixup_paths(&mut json); + return serde_json::from_value(json).unwrap(); + + fn fixup_paths(val: &mut serde_json::Value) { + match val { + serde_json::Value::String(s) => replace_root(s, true), + serde_json::Value::Array(vals) => vals.iter_mut().for_each(fixup_paths), + serde_json::Value::Object(kvals) => kvals.values_mut().for_each(fixup_paths), + serde_json::Value::Null | serde_json::Value::Bool(_) | serde_json::Value::Number(_) => { + } + } + } +} + +fn replace_root(s: &mut String, direction: bool) { + if direction { + let root = if cfg!(windows) { r#"C:\\ROOT\"# } else { "/ROOT/" }; + *s = s.replace("$ROOT$", root) + } else { + let root = if cfg!(windows) { r#"C:\\\\ROOT\\"# } else { "/ROOT/" }; + *s = s.replace(root, "$ROOT$") + } +} + +fn get_fake_sysroot_path() -> PathBuf { + let base = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + base.join("../project-model/test_data/fake-sysroot") +} + +fn get_fake_sysroot() -> Sysroot { + let sysroot_path = get_fake_sysroot_path(); + // there's no `libexec/` directory with a `proc-macro-srv` binary in that + // fake sysroot, so we give them both the same path: + let sysroot_dir = AbsPathBuf::assert(sysroot_path); + let sysroot_src_dir = sysroot_dir.clone(); + Sysroot::load(sysroot_dir, Some(Ok(sysroot_src_dir)), false) +} + +#[test] +fn test_deduplicate_origin_dev() { + let path_map = &mut FxHashMap::default(); + let ws = load_cargo_with_fake_sysroot("deduplication_crate_graph_A.json"); + let ws2 = load_cargo_with_fake_sysroot("deduplication_crate_graph_B.json"); + + let (crate_graph, ..) = ws_to_crate_graph(&[ws, ws2], &Default::default(), |path| { + let len = path_map.len(); + Some(*path_map.entry(path.to_path_buf()).or_insert(FileId::from_raw(len as u32))) + }); + + let mut crates_named_p2 = vec![]; + for id in crate_graph.iter() { + let krate = &crate_graph[id]; + if let Some(name) = krate.display_name.as_ref() { + if name.to_string() == "p2" { + crates_named_p2.push(krate); + } + } + } + + assert!(crates_named_p2.len() == 1); + let p2 = crates_named_p2[0]; + assert!(p2.origin.is_local()); +} + +#[test] +fn test_deduplicate_origin_dev_rev() { + let path_map = &mut FxHashMap::default(); + let ws = load_cargo_with_fake_sysroot("deduplication_crate_graph_B.json"); + let ws2 = load_cargo_with_fake_sysroot("deduplication_crate_graph_A.json"); + + let (crate_graph, ..) = ws_to_crate_graph(&[ws, ws2], &Default::default(), |path| { + let len = path_map.len(); + Some(*path_map.entry(path.to_path_buf()).or_insert(FileId::from_raw(len as u32))) + }); + + let mut crates_named_p2 = vec![]; + for id in crate_graph.iter() { + let krate = &crate_graph[id]; + if let Some(name) = krate.display_name.as_ref() { + if name.to_string() == "p2" { + crates_named_p2.push(krate); + } + } + } + + assert!(crates_named_p2.len() == 1); + let p2 = crates_named_p2[0]; + assert!(p2.origin.is_local()); +} diff --git a/crates/project-model/test_data/deduplication_crate_graph_A.json b/crates/rust-analyzer/tests/test_data/deduplication_crate_graph_A.json similarity index 100% rename from crates/project-model/test_data/deduplication_crate_graph_A.json rename to crates/rust-analyzer/tests/test_data/deduplication_crate_graph_A.json diff --git a/crates/project-model/test_data/deduplication_crate_graph_B.json b/crates/rust-analyzer/tests/test_data/deduplication_crate_graph_B.json similarity index 100% rename from crates/project-model/test_data/deduplication_crate_graph_B.json rename to crates/rust-analyzer/tests/test_data/deduplication_crate_graph_B.json