Move dedup-dev-deps tests into rust-analyzer crate

This commit is contained in:
Lukas Wirth 2024-02-16 15:47:25 +01:00
parent b1404d387a
commit 0ccb3b8731
8 changed files with 233 additions and 152 deletions

View File

@ -295,7 +295,7 @@ pub struct CrateData {
impl CrateData { impl CrateData {
/// Check if [`other`] is almost equal to [`self`] ignoring `CrateOrigin` value. /// Check if [`other`] is almost equal to [`self`] ignoring `CrateOrigin` value.
fn eq_ignoring_origin_and_deps(&self, other: &CrateData, ignore_dev_deps: bool) -> bool { pub fn eq_ignoring_origin_and_deps(&self, other: &CrateData, ignore_dev_deps: bool) -> bool {
// This method has some obscure bits. These are mostly there to be compliant with // This method has some obscure bits. These are mostly there to be compliant with
// some patches. References to the patches are given. // some patches. References to the patches are given.
if self.root_file_id != other.root_file_id { if self.root_file_id != other.root_file_id {
@ -622,8 +622,9 @@ pub fn extend(
&mut self, &mut self,
mut other: CrateGraph, mut other: CrateGraph,
proc_macros: &mut ProcMacroPaths, proc_macros: &mut ProcMacroPaths,
may_merge: impl Fn((CrateId, &CrateData), (CrateId, &CrateData)) -> bool, merge: impl Fn((CrateId, &mut CrateData), (CrateId, &CrateData)) -> bool,
) -> FxHashMap<CrateId, CrateId> { ) -> FxHashMap<CrateId, CrateId> {
let m = self.len();
let topo = other.crates_in_topological_order(); let topo = other.crates_in_topological_order();
let mut id_map: FxHashMap<CrateId, CrateId> = FxHashMap::default(); let mut id_map: FxHashMap<CrateId, CrateId> = FxHashMap::default();
for topo in topo { for topo in topo {
@ -631,48 +632,14 @@ pub fn extend(
crate_data.dependencies.iter_mut().for_each(|dep| dep.crate_id = id_map[&dep.crate_id]); crate_data.dependencies.iter_mut().for_each(|dep| dep.crate_id = id_map[&dep.crate_id]);
crate_data.dependencies.sort_by_key(|dep| dep.crate_id); crate_data.dependencies.sort_by_key(|dep| dep.crate_id);
let res = self.arena.iter().find_map(|(id, data)| { let res = self
if !may_merge((id, &data), (topo, &crate_data)) { .arena
return None; .iter_mut()
} .take(m)
.find_map(|(id, data)| merge((id, data), (topo, &crate_data)).then_some(id));
match (&data.origin, &crate_data.origin) { let new_id =
(a, b) if a == b => { if let Some(res) = res { res } else { self.arena.alloc(crate_data.clone()) };
if data.eq_ignoring_origin_and_deps(crate_data, false) {
return Some((id, false));
}
}
(a @ CrateOrigin::Local { .. }, CrateOrigin::Library { .. })
| (a @ CrateOrigin::Library { .. }, CrateOrigin::Local { .. }) => {
// If the origins differ, check if the two crates are equal without
// considering the dev dependencies, if they are, they most likely are in
// different loaded workspaces which may cause issues. We keep the local
// version and discard the library one as the local version may have
// dev-dependencies that we want to keep resolving. See #15656 for more
// information.
if data.eq_ignoring_origin_and_deps(crate_data, true) {
return Some((id, !a.is_local()));
}
}
(_, _) => return None,
}
None
});
let new_id = if let Some((res, should_update_lib_to_local)) = res {
if should_update_lib_to_local {
assert!(self.arena[res].origin.is_lib());
assert!(crate_data.origin.is_local());
self.arena[res].origin = crate_data.origin.clone();
// Move local's dev dependencies into the newly-local-formerly-lib crate.
self.arena[res].dependencies = crate_data.dependencies.clone();
}
res
} else {
self.arena.alloc(crate_data.clone())
};
id_map.insert(topo, new_id); id_map.insert(topo, new_id);
} }

View File

@ -238,7 +238,7 @@ fn crate_graph_dedup_identical() {
let (d_crate_graph, mut d_proc_macros) = (crate_graph.clone(), proc_macros.clone()); let (d_crate_graph, mut d_proc_macros) = (crate_graph.clone(), proc_macros.clone());
crate_graph.extend(d_crate_graph.clone(), &mut d_proc_macros, |_, _| true); crate_graph.extend(d_crate_graph.clone(), &mut d_proc_macros, |(_, a), (_, b)| a == b);
assert!(crate_graph.iter().eq(d_crate_graph.iter())); assert!(crate_graph.iter().eq(d_crate_graph.iter()));
assert_eq!(proc_macros, d_proc_macros); assert_eq!(proc_macros, d_proc_macros);
} }
@ -254,62 +254,10 @@ fn crate_graph_dedup() {
load_cargo_with_fake_sysroot(path_map, "regex-metadata.json"); load_cargo_with_fake_sysroot(path_map, "regex-metadata.json");
assert_eq!(regex_crate_graph.iter().count(), 60); assert_eq!(regex_crate_graph.iter().count(), 60);
crate_graph.extend(regex_crate_graph, &mut regex_proc_macros, |_, _| true); crate_graph.extend(regex_crate_graph, &mut regex_proc_macros, |(_, a), (_, b)| a == b);
assert_eq!(crate_graph.iter().count(), 118); assert_eq!(crate_graph.iter().count(), 118);
} }
#[test]
fn test_deduplicate_origin_dev() {
let path_map = &mut Default::default();
let (mut crate_graph, _proc_macros) =
load_cargo_with_fake_sysroot(path_map, "deduplication_crate_graph_A.json");
crate_graph.sort_deps();
let (crate_graph_1, mut _proc_macros_2) =
load_cargo_with_fake_sysroot(path_map, "deduplication_crate_graph_B.json");
crate_graph.extend(crate_graph_1, &mut _proc_macros_2, |_, _| true);
let mut crates_named_p2 = vec![];
for id in crate_graph.iter() {
let krate = &crate_graph[id];
if let Some(name) = krate.display_name.as_ref() {
if name.to_string() == "p2" {
crates_named_p2.push(krate);
}
}
}
assert!(crates_named_p2.len() == 1);
let p2 = crates_named_p2[0];
assert!(p2.origin.is_local());
}
#[test]
fn test_deduplicate_origin_dev_rev() {
let path_map = &mut Default::default();
let (mut crate_graph, _proc_macros) =
load_cargo_with_fake_sysroot(path_map, "deduplication_crate_graph_B.json");
crate_graph.sort_deps();
let (crate_graph_1, mut _proc_macros_2) =
load_cargo_with_fake_sysroot(path_map, "deduplication_crate_graph_A.json");
crate_graph.extend(crate_graph_1, &mut _proc_macros_2, |_, _| true);
let mut crates_named_p2 = vec![];
for id in crate_graph.iter() {
let krate = &crate_graph[id];
if let Some(name) = krate.display_name.as_ref() {
if name.to_string() == "p2" {
crates_named_p2.push(krate);
}
}
}
assert!(crates_named_p2.len() == 1);
let p2 = crates_named_p2[0];
assert!(p2.origin.is_local());
}
#[test] #[test]
fn smoke_test_real_sysroot_cargo() { fn smoke_test_real_sysroot_cargo() {
if std::env::var("SYSROOT_CARGO_METADATA").is_err() { if std::env::var("SYSROOT_CARGO_METADATA").is_err() {

View File

@ -1411,7 +1411,7 @@ fn sysroot_to_crate_graph(
// Remove all crates except the ones we are interested in to keep the sysroot graph small. // Remove all crates except the ones we are interested in to keep the sysroot graph small.
let removed_mapping = cg.remove_crates_except(&marker_set); let removed_mapping = cg.remove_crates_except(&marker_set);
let mapping = crate_graph.extend(cg, &mut pm, |_, _| true); let mapping = crate_graph.extend(cg, &mut pm, |(_, a), (_, b)| a == b);
// Map the id through the removal mapping first, then through the crate graph extension mapping. // Map the id through the removal mapping first, then through the crate graph extension mapping.
pub_deps.iter_mut().for_each(|(_, cid, _)| { pub_deps.iter_mut().for_each(|(_, cid, _)| {

View File

@ -47,7 +47,9 @@ pub mod tracing {
use serde::de::DeserializeOwned; use serde::de::DeserializeOwned;
pub use crate::{caps::server_capabilities, main_loop::main_loop, version::version}; pub use crate::{
caps::server_capabilities, main_loop::main_loop, reload::ws_to_crate_graph, version::version,
};
pub fn from_json<T: DeserializeOwned>( pub fn from_json<T: DeserializeOwned>(
what: &'static str, what: &'static str,

View File

@ -17,8 +17,9 @@
use flycheck::{FlycheckConfig, FlycheckHandle}; use flycheck::{FlycheckConfig, FlycheckHandle};
use hir::{db::DefDatabase, Change, ProcMacros}; use hir::{db::DefDatabase, Change, ProcMacros};
use ide::CrateId;
use ide_db::{ use ide_db::{
base_db::{salsa::Durability, CrateGraph, ProcMacroPaths}, base_db::{salsa::Durability, CrateGraph, CrateOrigin, ProcMacroPaths, Version},
FxHashMap, FxHashMap,
}; };
use itertools::Itertools; use itertools::Itertools;
@ -28,7 +29,7 @@
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use stdx::{format_to, thread::ThreadIntent}; use stdx::{format_to, thread::ThreadIntent};
use triomphe::Arc; use triomphe::Arc;
use vfs::{AbsPath, ChangeKind}; use vfs::{AbsPath, AbsPathBuf, ChangeKind};
use crate::{ use crate::{
config::{Config, FilesWatcher, LinkedProject}, config::{Config, FilesWatcher, LinkedProject},
@ -532,7 +533,7 @@ fn recreate_crate_graph(&mut self, cause: String) {
// deleted or created we trigger a reconstruction of the crate graph // deleted or created we trigger a reconstruction of the crate graph
let mut crate_graph_file_dependencies = FxHashSet::default(); let mut crate_graph_file_dependencies = FxHashSet::default();
let mut load = |path: &AbsPath| { let load = |path: &AbsPath| {
let _p = tracing::span!(tracing::Level::DEBUG, "switch_workspaces::load").entered(); let _p = tracing::span!(tracing::Level::DEBUG, "switch_workspaces::load").entered();
let vfs_path = vfs::VfsPath::from(path.to_path_buf()); let vfs_path = vfs::VfsPath::from(path.to_path_buf());
crate_graph_file_dependencies.insert(vfs_path.clone()); crate_graph_file_dependencies.insert(vfs_path.clone());
@ -547,56 +548,8 @@ fn recreate_crate_graph(&mut self, cause: String) {
} }
}; };
let mut crate_graph = CrateGraph::default(); let (crate_graph, proc_macro_paths, layouts, toolchains) =
let mut proc_macro_paths = Vec::default(); ws_to_crate_graph(&self.workspaces, self.config.extra_env(), load);
let mut layouts = Vec::default();
let mut toolchains = Vec::default();
let e = Err(Arc::from("missing layout"));
for ws in &**self.workspaces {
let (other, mut crate_proc_macros) =
ws.to_crate_graph(&mut load, self.config.extra_env());
let num_layouts = layouts.len();
let num_toolchains = toolchains.len();
let (toolchain, layout) = match ws {
ProjectWorkspace::Cargo { toolchain, target_layout, .. }
| ProjectWorkspace::Json { toolchain, target_layout, .. } => {
(toolchain.clone(), target_layout.clone())
}
ProjectWorkspace::DetachedFiles { .. } => {
(None, Err("detached files have no layout".into()))
}
};
let mapping = crate_graph.extend(
other,
&mut crate_proc_macros,
|(cg_id, _cg_data), (_o_id, _o_data)| {
// if the newly created crate graph's layout is equal to the crate of the merged graph, then
// we can merge the crates.
layouts[cg_id.into_raw().into_u32() as usize] == layout
&& toolchains[cg_id.into_raw().into_u32() as usize] == toolchain
},
);
// Populate the side tables for the newly merged crates
mapping.values().for_each(|val| {
let idx = val.into_raw().into_u32() as usize;
// we only need to consider crates that were not merged and remapped, as the
// ones that were remapped already have the correct layout and toolchain
if idx >= num_layouts {
if layouts.len() <= idx {
layouts.resize(idx + 1, e.clone());
}
layouts[idx] = layout.clone();
}
if idx >= num_toolchains {
if toolchains.len() <= idx {
toolchains.resize(idx + 1, None);
}
toolchains[idx] = toolchain.clone();
}
});
proc_macro_paths.push(crate_proc_macros);
}
let mut change = Change::new(); let mut change = Change::new();
if self.config.expand_proc_macros() { if self.config.expand_proc_macros() {
@ -609,6 +562,8 @@ fn recreate_crate_graph(&mut self, cause: String) {
self.fetch_proc_macros_queue.request_op(cause, proc_macro_paths); self.fetch_proc_macros_queue.request_op(cause, proc_macro_paths);
} }
change.set_crate_graph(crate_graph); change.set_crate_graph(crate_graph);
change.set_target_data_layouts(layouts);
change.set_toolchains(toolchains);
self.analysis_host.apply_change(change); self.analysis_host.apply_change(change);
self.crate_graph_file_dependencies = crate_graph_file_dependencies; self.crate_graph_file_dependencies = crate_graph_file_dependencies;
} }
@ -719,6 +674,97 @@ fn reload_flycheck(&mut self) {
} }
} }
// FIXME: Move this into load-cargo?
pub fn ws_to_crate_graph(
workspaces: &[ProjectWorkspace],
extra_env: &FxHashMap<String, String>,
mut load: impl FnMut(&AbsPath) -> Option<vfs::FileId>,
) -> (
CrateGraph,
Vec<FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf), String>>>,
Vec<Result<Arc<str>, Arc<str>>>,
Vec<Option<Version>>,
) {
let mut crate_graph = CrateGraph::default();
let mut proc_macro_paths = Vec::default();
let mut layouts = Vec::default();
let mut toolchains = Vec::default();
let e = Err(Arc::from("missing layout"));
for ws in workspaces {
let (other, mut crate_proc_macros) = ws.to_crate_graph(&mut load, extra_env);
let num_layouts = layouts.len();
let num_toolchains = toolchains.len();
let (toolchain, layout) = match ws {
ProjectWorkspace::Cargo { toolchain, target_layout, .. }
| ProjectWorkspace::Json { toolchain, target_layout, .. } => {
(toolchain.clone(), target_layout.clone())
}
ProjectWorkspace::DetachedFiles { .. } => {
(None, Err("detached files have no layout".into()))
}
};
let mapping = crate_graph.extend(
other,
&mut crate_proc_macros,
|(cg_id, _cg_data), (_o_id, _o_data)| {
// if the newly created crate graph's layout is equal to the crate of the merged graph, then
// we can merge the crates.
let id = cg_id.into_raw().into_u32() as usize;
if layouts[id] == layout && toolchains[id] == toolchain {
let (res, update) = match (&_cg_data.origin, &_o_data.origin) {
(a, b)
if a == b && _cg_data.eq_ignoring_origin_and_deps(_o_data, false) =>
{
(true, false)
}
(a @ CrateOrigin::Local { .. }, CrateOrigin::Library { .. })
| (a @ CrateOrigin::Library { .. }, CrateOrigin::Local { .. })
if _cg_data.eq_ignoring_origin_and_deps(_o_data, true) =>
{
// If the origins differ, check if the two crates are equal without
// considering the dev dependencies, if they are, they most likely are in
// different loaded workspaces which may cause issues. We keep the local
// version and discard the library one as the local version may have
// dev-dependencies that we want to keep resolving. See #15656 for more
// information.
(true, !a.is_local())
}
(_, _) => (false, false),
};
if res && update {
_cg_data.origin = _o_data.origin.clone();
_cg_data.dependencies = _o_data.dependencies.clone();
}
res
} else {
false
}
},
);
// Populate the side tables for the newly merged crates
mapping.values().for_each(|val| {
let idx = val.into_raw().into_u32() as usize;
// we only need to consider crates that were not merged and remapped, as the
// ones that were remapped already have the correct layout and toolchain
if idx >= num_layouts {
if layouts.len() <= idx {
layouts.resize(idx + 1, e.clone());
}
layouts[idx] = layout.clone();
}
if idx >= num_toolchains {
if toolchains.len() <= idx {
toolchains.resize(idx + 1, None);
}
toolchains[idx] = toolchain.clone();
}
});
proc_macro_paths.push(crate_proc_macros);
}
(crate_graph, proc_macro_paths, layouts, toolchains)
}
pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool { pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool {
const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"]; const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"];
const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"]; const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"];

View File

@ -0,0 +1,118 @@
use std::path::PathBuf;
use project_model::{CargoWorkspace, ProjectWorkspace, Sysroot, WorkspaceBuildScripts};
use rust_analyzer::ws_to_crate_graph;
use rustc_hash::FxHashMap;
use serde::de::DeserializeOwned;
use vfs::{AbsPathBuf, FileId};
fn load_cargo_with_fake_sysroot(file: &str) -> ProjectWorkspace {
let meta = get_test_json_file(file);
let cargo_workspace = CargoWorkspace::new(meta);
ProjectWorkspace::Cargo {
cargo: cargo_workspace,
build_scripts: WorkspaceBuildScripts::default(),
sysroot: Ok(get_fake_sysroot()),
rustc: Err(None),
rustc_cfg: Vec::new(),
cfg_overrides: Default::default(),
toolchain: None,
target_layout: Err("target_data_layout not loaded".into()),
cargo_config_extra_env: Default::default(),
}
}
fn get_test_json_file<T: DeserializeOwned>(file: &str) -> T {
let base = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
let file = base.join("tests/test_data").join(file);
let data = std::fs::read_to_string(file).unwrap();
let mut json = data.parse::<serde_json::Value>().unwrap();
fixup_paths(&mut json);
return serde_json::from_value(json).unwrap();
fn fixup_paths(val: &mut serde_json::Value) {
match val {
serde_json::Value::String(s) => replace_root(s, true),
serde_json::Value::Array(vals) => vals.iter_mut().for_each(fixup_paths),
serde_json::Value::Object(kvals) => kvals.values_mut().for_each(fixup_paths),
serde_json::Value::Null | serde_json::Value::Bool(_) | serde_json::Value::Number(_) => {
}
}
}
}
fn replace_root(s: &mut String, direction: bool) {
if direction {
let root = if cfg!(windows) { r#"C:\\ROOT\"# } else { "/ROOT/" };
*s = s.replace("$ROOT$", root)
} else {
let root = if cfg!(windows) { r#"C:\\\\ROOT\\"# } else { "/ROOT/" };
*s = s.replace(root, "$ROOT$")
}
}
fn get_fake_sysroot_path() -> PathBuf {
let base = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
base.join("../project-model/test_data/fake-sysroot")
}
fn get_fake_sysroot() -> Sysroot {
let sysroot_path = get_fake_sysroot_path();
// there's no `libexec/` directory with a `proc-macro-srv` binary in that
// fake sysroot, so we give them both the same path:
let sysroot_dir = AbsPathBuf::assert(sysroot_path);
let sysroot_src_dir = sysroot_dir.clone();
Sysroot::load(sysroot_dir, Some(Ok(sysroot_src_dir)), false)
}
#[test]
fn test_deduplicate_origin_dev() {
let path_map = &mut FxHashMap::default();
let ws = load_cargo_with_fake_sysroot("deduplication_crate_graph_A.json");
let ws2 = load_cargo_with_fake_sysroot("deduplication_crate_graph_B.json");
let (crate_graph, ..) = ws_to_crate_graph(&[ws, ws2], &Default::default(), |path| {
let len = path_map.len();
Some(*path_map.entry(path.to_path_buf()).or_insert(FileId::from_raw(len as u32)))
});
let mut crates_named_p2 = vec![];
for id in crate_graph.iter() {
let krate = &crate_graph[id];
if let Some(name) = krate.display_name.as_ref() {
if name.to_string() == "p2" {
crates_named_p2.push(krate);
}
}
}
assert!(crates_named_p2.len() == 1);
let p2 = crates_named_p2[0];
assert!(p2.origin.is_local());
}
#[test]
fn test_deduplicate_origin_dev_rev() {
let path_map = &mut FxHashMap::default();
let ws = load_cargo_with_fake_sysroot("deduplication_crate_graph_B.json");
let ws2 = load_cargo_with_fake_sysroot("deduplication_crate_graph_A.json");
let (crate_graph, ..) = ws_to_crate_graph(&[ws, ws2], &Default::default(), |path| {
let len = path_map.len();
Some(*path_map.entry(path.to_path_buf()).or_insert(FileId::from_raw(len as u32)))
});
let mut crates_named_p2 = vec![];
for id in crate_graph.iter() {
let krate = &crate_graph[id];
if let Some(name) = krate.display_name.as_ref() {
if name.to_string() == "p2" {
crates_named_p2.push(krate);
}
}
}
assert!(crates_named_p2.len() == 1);
let p2 = crates_named_p2[0];
assert!(p2.origin.is_local());
}