fix: potential bugs when build scripts do not match the current project

This commit is contained in:
Aleksey Kladov 2021-07-18 13:13:03 +03:00
parent f4de2ece0d
commit b8b166e674
4 changed files with 173 additions and 80 deletions

View File

@ -10,6 +10,7 @@ use cfg::{CfgDiff, CfgOptions};
use paths::{AbsPath, AbsPathBuf};
use proc_macro_api::ProcMacroClient;
use rustc_hash::{FxHashMap, FxHashSet};
use stdx::always;
use crate::{
build_scripts::BuildScriptOutput,
@ -39,6 +40,7 @@ pub enum ProjectWorkspace {
/// Project workspace was discovered by running `cargo metadata` and `rustc --print sysroot`.
Cargo {
cargo: CargoWorkspace,
build_scripts: WorkspaceBuildScripts,
sysroot: Sysroot,
rustc: Option<CargoWorkspace>,
/// Holds cfg flags for the current target. We get those by running
@ -69,7 +71,14 @@ impl fmt::Debug for ProjectWorkspace {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// Make sure this isn't too verbose.
match self {
ProjectWorkspace::Cargo { cargo, sysroot, rustc, rustc_cfg, cfg_overrides } => f
ProjectWorkspace::Cargo {
cargo,
build_scripts: _,
sysroot,
rustc,
rustc_cfg,
cfg_overrides,
} => f
.debug_struct("Cargo")
.field("root", &cargo.workspace_root().file_name())
.field("n_packages", &cargo.packages().len())
@ -169,7 +178,14 @@ impl ProjectWorkspace {
let rustc_cfg = rustc_cfg::get(Some(&cargo_toml), config.target.as_deref());
let cfg_overrides = config.cfg_overrides();
ProjectWorkspace::Cargo { cargo, sysroot, rustc, rustc_cfg, cfg_overrides }
ProjectWorkspace::Cargo {
cargo,
build_scripts: WorkspaceBuildScripts::default(),
sysroot,
rustc,
rustc_cfg,
cfg_overrides,
}
}
};
@ -196,10 +212,34 @@ impl ProjectWorkspace {
Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg })
}
pub fn run_build_scripts(
&self,
config: &CargoConfig,
progress: &dyn Fn(String),
) -> Result<WorkspaceBuildScripts> {
match self {
ProjectWorkspace::Cargo { cargo, .. } => {
WorkspaceBuildScripts::run(config, cargo, progress)
}
ProjectWorkspace::Json { .. } | ProjectWorkspace::DetachedFiles { .. } => {
Ok(WorkspaceBuildScripts::default())
}
}
}
pub fn set_build_scripts(&mut self, bs: WorkspaceBuildScripts) {
match self {
ProjectWorkspace::Cargo { build_scripts, .. } => *build_scripts = bs,
_ => {
always!(bs == WorkspaceBuildScripts::default());
}
}
}
/// Returns the roots for the current `ProjectWorkspace`
/// The return type contains the path and whether or not
/// the root is a member of the current workspace
pub fn to_roots(&self, build_scripts: &WorkspaceBuildScripts) -> Vec<PackageRoot> {
pub fn to_roots(&self) -> Vec<PackageRoot> {
match self {
ProjectWorkspace::Json { project, sysroot, rustc_cfg: _ } => project
.crates()
@ -218,7 +258,14 @@ impl ProjectWorkspace {
})
}))
.collect::<Vec<_>>(),
ProjectWorkspace::Cargo { cargo, sysroot, rustc, rustc_cfg: _, cfg_overrides: _ } => {
ProjectWorkspace::Cargo {
cargo,
sysroot,
rustc,
rustc_cfg: _,
cfg_overrides: _,
build_scripts,
} => {
cargo
.packages()
.map(|pkg| {
@ -302,7 +349,6 @@ impl ProjectWorkspace {
pub fn to_crate_graph(
&self,
build_scripts: &WorkspaceBuildScripts,
proc_macro_client: Option<&ProcMacroClient>,
load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
) -> CrateGraph {
@ -320,18 +366,23 @@ impl ProjectWorkspace {
project,
sysroot,
),
ProjectWorkspace::Cargo { cargo, sysroot, rustc, rustc_cfg, cfg_overrides } => {
cargo_to_crate_graph(
rustc_cfg.clone(),
cfg_overrides,
&proc_macro_loader,
load,
cargo,
build_scripts,
sysroot,
rustc,
)
}
ProjectWorkspace::Cargo {
cargo,
sysroot,
rustc,
rustc_cfg,
cfg_overrides,
build_scripts,
} => cargo_to_crate_graph(
rustc_cfg.clone(),
cfg_overrides,
&proc_macro_loader,
load,
cargo,
build_scripts,
sysroot,
rustc,
),
ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => {
detached_files_to_crate_graph(rustc_cfg.clone(), load, files, sysroot)
}

View File

@ -36,7 +36,7 @@ pub(crate) fn load_workspace_at(
}
fn load_workspace(
ws: ProjectWorkspace,
mut ws: ProjectWorkspace,
cargo_config: &CargoConfig,
load_config: &LoadCargoConfig,
progress: &dyn Fn(String),
@ -56,22 +56,20 @@ fn load_workspace(
None
};
let build_scripts = match &ws {
ProjectWorkspace::Cargo { cargo, .. } if load_config.load_out_dirs_from_check => {
WorkspaceBuildScripts::run(cargo_config, cargo, progress)?
}
_ => WorkspaceBuildScripts::default(),
};
ws.set_build_scripts(if load_config.load_out_dirs_from_check {
ws.run_build_scripts(cargo_config, progress)?
} else {
WorkspaceBuildScripts::default()
});
let crate_graph =
ws.to_crate_graph(&build_scripts, proc_macro_client.as_ref(), &mut |path: &AbsPath| {
let contents = loader.load_sync(path);
let path = vfs::VfsPath::from(path.to_path_buf());
vfs.set_file_contents(path.clone(), contents);
vfs.file_id(&path)
});
let crate_graph = ws.to_crate_graph(proc_macro_client.as_ref(), &mut |path: &AbsPath| {
let contents = loader.load_sync(path);
let path = vfs::VfsPath::from(path.to_path_buf());
vfs.set_file_contents(path.clone(), contents);
vfs.file_id(&path)
});
let project_folders = ProjectFolders::new(&[ws], &[build_scripts], &[]);
let project_folders = ProjectFolders::new(&[ws], &[]);
loader.set_config(vfs::loader::Config {
load: project_folders.load,
watch: vec![],

View File

@ -74,18 +74,35 @@ pub(crate) struct GlobalState {
pub(crate) vfs_progress_n_total: usize,
pub(crate) vfs_progress_n_done: usize,
/// For both `workspaces` and `workspace_build_data`, the field stores the
/// data we actually use, while the `OpQueue` stores the result of the last
/// fetch.
/// `workspaces` field stores the data we actually use, while the `OpQueue`
/// stores the result of the last fetch.
///
/// If the fetch (partially) fails, we do not update the values.
/// If the fetch (partially) fails, we do not update the current value.
///
/// Invariant: workspaces.len() == workspace_build_data
/// The handling of build data is subtle. We fetch workspace in two phases:
///
/// *First*, we run `cargo metadata`, which gives us fast results for
/// initial analysis.
///
/// *Second*, we run `cargo check` which runs build scripts and compiles
/// proc macros.
///
/// We need both for the precise analysis, but we want rust-analyzer to be
/// at least partially available just after the first phase. That's because
/// first phase is much faster, and is much less likely to fail.
///
/// This creates a complication -- by the time the second phase completes,
/// the results of the fist phase could be invalid. That is, while we run
/// `cargo check`, the user edits `Cargo.toml`, we notice this, and the new
/// `cargo metadata` completes before `cargo check`.
///
/// An additional complication is that we want to avoid needless work. When
/// the user just adds comments or whitespace to Cargo.toml, we do not want
/// to invalidate any salsa caches.
pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
pub(crate) fetch_workspaces_queue: OpQueue<Vec<anyhow::Result<ProjectWorkspace>>>,
pub(crate) workspace_build_data: Vec<WorkspaceBuildScripts>,
pub(crate) fetch_build_data_queue: OpQueue<Vec<anyhow::Result<WorkspaceBuildScripts>>>,
pub(crate) fetch_build_data_queue:
OpQueue<(Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)>,
pub(crate) prime_caches_queue: OpQueue<()>,
@ -149,7 +166,6 @@ impl GlobalState {
workspaces: Arc::new(Vec::new()),
fetch_workspaces_queue: OpQueue::default(),
workspace_build_data: Vec::new(),
prime_caches_queue: OpQueue::default(),
fetch_build_data_queue: OpQueue::default(),

View File

@ -1,7 +1,6 @@
//! Project loading & configuration updates
use std::{mem, sync::Arc};
use always_assert::always;
use flycheck::{FlycheckConfig, FlycheckHandle};
use hir::db::DefDatabase;
use ide::Change;
@ -27,7 +26,7 @@ pub(crate) enum ProjectWorkspaceProgress {
pub(crate) enum BuildDataProgress {
Begin,
Report(String),
End(Vec<anyhow::Result<WorkspaceBuildScripts>>),
End((Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)),
}
impl GlobalState {
@ -114,7 +113,7 @@ impl GlobalState {
message: None,
};
if let Some(error) = self.build_data_error() {
if let Some(error) = self.fetch_build_data_error() {
status.health = lsp_ext::Health::Warning;
status.message = Some(error)
}
@ -229,6 +228,7 @@ impl GlobalState {
};
let mut res = Vec::new();
for ws in workspaces.iter() {
res.push(ws.run_build_scripts(&config, &progress));
let ws = match ws {
ProjectWorkspace::Cargo { cargo, .. } => cargo,
ProjectWorkspace::DetachedFiles { .. } | ProjectWorkspace::Json { .. } => {
@ -238,12 +238,12 @@ impl GlobalState {
};
res.push(WorkspaceBuildScripts::run(&config, ws, &progress))
}
sender.send(Task::FetchBuildData(BuildDataProgress::End(res))).unwrap();
sender.send(Task::FetchBuildData(BuildDataProgress::End((workspaces, res)))).unwrap();
});
}
pub(crate) fn fetch_build_data_completed(
&mut self,
build_data: Vec<anyhow::Result<WorkspaceBuildScripts>>,
build_data: (Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>),
) {
self.fetch_build_data_queue.op_completed(build_data)
}
@ -255,11 +255,13 @@ impl GlobalState {
if let Some(error_message) = self.fetch_workspace_error() {
log::error!("failed to switch workspaces: {}", error_message);
if !self.workspaces.is_empty() {
// It only makes sense to switch to a partially broken workspace
// if we don't have any workspace at all yet.
return;
}
}
if let Some(error_message) = self.build_data_error() {
if let Some(error_message) = self.fetch_build_data_error() {
log::error!("failed to switch build data: {}", error_message);
}
@ -270,32 +272,67 @@ impl GlobalState {
.filter_map(|res| res.as_ref().ok().cloned())
.collect::<Vec<_>>();
let mut build_scripts = self
.fetch_build_data_queue
.last_op_result()
.iter()
.map(|res| res.as_ref().ok().cloned().unwrap_or_default())
.collect::<Vec<_>>();
fn eq_ignore_build_data<'a>(
left: &'a ProjectWorkspace,
right: &'a ProjectWorkspace,
) -> bool {
let key = |p: &'a ProjectWorkspace| match p {
ProjectWorkspace::Cargo {
cargo,
sysroot,
rustc,
rustc_cfg,
cfg_overrides,
// FIXME: This is not even remotely correct. I do hope that this is
// eventually consistent though. We need to figure a better way to map
// `cargo metadata` to `cargo check` in the future.
//
// I *think* what we need here is an extra field on `ProjectWorkspace`,
// and a workflow to set it, once build data is ready.
build_scripts.resize_with(workspaces.len(), WorkspaceBuildScripts::default);
build_scripts: _,
} => Some((cargo, sysroot, rustc, rustc_cfg, cfg_overrides)),
_ => None,
};
match (key(left), key(right)) {
(Some(lk), Some(rk)) => lk == rk,
_ => left == right,
}
}
if *self.workspaces == workspaces && self.workspace_build_data == build_scripts {
return;
let same_workspaces = workspaces.len() == self.workspaces.len()
&& workspaces
.iter()
.zip(self.workspaces.iter())
.all(|(l, r)| eq_ignore_build_data(l, r));
if same_workspaces {
let (workspaces, build_scripts) = self.fetch_build_data_queue.last_op_result();
if Arc::ptr_eq(&workspaces, &self.workspaces) {
let workspaces = workspaces
.iter()
.cloned()
.zip(build_scripts)
.map(|(mut ws, bs)| {
ws.set_build_scripts(bs.as_ref().ok().cloned().unwrap_or_default());
ws
})
.collect::<Vec<_>>();
// Workspaces are the same, but we've updated build data.
self.workspaces = Arc::new(workspaces);
} else {
// Current build scripts do not match the version of the active
// workspace, so there's nothing for us to update.
return;
}
} else {
// Here, we completely changed the workspace (Cargo.toml edit), so
// we don't care about build-script results, they are stale.
self.workspaces = Arc::new(workspaces)
}
if let FilesWatcher::Client = self.config.files().watcher {
if self.config.did_change_watched_files_dynamic_registration() {
let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions {
watchers: workspaces
watchers: self
.workspaces
.iter()
.zip(&build_scripts)
.flat_map(|(ws, bs)| ws.to_roots(bs))
.flat_map(|ws| ws.to_roots())
.filter(|it| it.is_member)
.flat_map(|root| {
root.include.into_iter().flat_map(|it| {
@ -327,8 +364,7 @@ impl GlobalState {
let mut change = Change::new();
let files_config = self.config.files();
let project_folders =
ProjectFolders::new(&workspaces, &build_scripts, &files_config.exclude);
let project_folders = ProjectFolders::new(&self.workspaces, &files_config.exclude);
if self.proc_macro_client.is_none() {
self.proc_macro_client = match self.config.proc_macro_srv() {
@ -377,12 +413,8 @@ impl GlobalState {
}
res
};
for (ws, bs) in workspaces.iter().zip(&build_scripts) {
crate_graph.extend(ws.to_crate_graph(
bs,
self.proc_macro_client.as_ref(),
&mut load,
));
for ws in self.workspaces.iter() {
crate_graph.extend(ws.to_crate_graph(self.proc_macro_client.as_ref(), &mut load));
}
crate_graph
@ -390,8 +422,6 @@ impl GlobalState {
change.set_crate_graph(crate_graph);
self.source_root_config = project_folders.source_root_config;
self.workspaces = Arc::new(workspaces);
self.workspace_build_data = build_scripts;
self.analysis_host.apply_change(change);
self.process_changes();
@ -415,10 +445,10 @@ impl GlobalState {
Some(buf)
}
fn build_data_error(&self) -> Option<String> {
fn fetch_build_data_error(&self) -> Option<String> {
let mut buf = String::new();
for ws in self.fetch_build_data_queue.last_op_result() {
for ws in &self.fetch_build_data_queue.last_op_result().1 {
if let Err(err) = ws {
stdx::format_to!(buf, "rust-analyzer failed to run custom build: {:#}\n", err);
}
@ -481,15 +511,13 @@ pub(crate) struct ProjectFolders {
impl ProjectFolders {
pub(crate) fn new(
workspaces: &[ProjectWorkspace],
build_scripts: &[WorkspaceBuildScripts],
global_excludes: &[AbsPathBuf],
) -> ProjectFolders {
always!(workspaces.len() == build_scripts.len());
let mut res = ProjectFolders::default();
let mut fsc = FileSetConfig::builder();
let mut local_filesets = vec![];
for root in workspaces.iter().zip(build_scripts).flat_map(|(ws, bs)| ws.to_roots(bs)) {
for root in workspaces.iter().flat_map(|ws| ws.to_roots()) {
let file_set_roots: Vec<VfsPath> =
root.include.iter().cloned().map(VfsPath::from).collect();