Async Loading outdir and proc-macro

This commit is contained in:
Edwin Cheng 2021-01-28 23:33:02 +08:00
parent f421ee6722
commit 9358eecc04
11 changed files with 398 additions and 200 deletions

View File

@ -5,10 +5,11 @@
io::BufReader,
path::{Path, PathBuf},
process::{Command, Stdio},
sync::Arc,
};
use anyhow::Result;
use cargo_metadata::{BuildScript, Message, Package, PackageId};
use cargo_metadata::{BuildScript, Message};
use itertools::Itertools;
use paths::{AbsPath, AbsPathBuf};
use rustc_hash::FxHashMap;
@ -16,150 +17,195 @@
use crate::{cfg_flag::CfgFlag, CargoConfig};
#[derive(Debug, Clone, Default)]
pub(crate) struct BuildDataMap {
data: FxHashMap<PackageId, BuildData>,
}
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub struct BuildData {
pub(crate) struct BuildData {
/// List of config flags defined by this package's build script
pub cfgs: Vec<CfgFlag>,
pub(crate) cfgs: Vec<CfgFlag>,
/// List of cargo-related environment variables with their value
///
/// If the package has a build script which defines environment variables,
/// they can also be found here.
pub envs: Vec<(String, String)>,
pub(crate) envs: Vec<(String, String)>,
/// Directory where a build script might place its output
pub out_dir: Option<AbsPathBuf>,
pub(crate) out_dir: Option<AbsPathBuf>,
/// Path to the proc-macro library file if this package exposes proc-macros
pub proc_macro_dylib_path: Option<AbsPathBuf>,
pub(crate) proc_macro_dylib_path: Option<AbsPathBuf>,
}
impl BuildDataMap {
pub(crate) fn new(
cargo_toml: &AbsPath,
cargo_features: &CargoConfig,
packages: &Vec<Package>,
progress: &dyn Fn(String),
) -> Result<BuildDataMap> {
let mut cmd = Command::new(toolchain::cargo());
cmd.args(&["check", "--workspace", "--message-format=json", "--manifest-path"])
.arg(cargo_toml.as_ref());
#[derive(Clone, Debug)]
pub(crate) struct BuildDataConfig {
cargo_toml: AbsPathBuf,
cargo_features: CargoConfig,
packages: Arc<Vec<cargo_metadata::Package>>,
}
// --all-targets includes tests, benches and examples in addition to the
// default lib and bins. This is an independent concept from the --targets
// flag below.
cmd.arg("--all-targets");
impl PartialEq for BuildDataConfig {
fn eq(&self, other: &Self) -> bool {
Arc::ptr_eq(&self.packages, &other.packages)
}
}
if let Some(target) = &cargo_features.target {
cmd.args(&["--target", target]);
impl Eq for BuildDataConfig {}
#[derive(Debug, Default)]
pub struct BuildDataCollector {
configs: FxHashMap<AbsPathBuf, BuildDataConfig>,
}
#[derive(Debug, Default, PartialEq, Eq)]
pub struct BuildDataResult {
data: FxHashMap<AbsPathBuf, BuildDataMap>,
}
pub(crate) type BuildDataMap = FxHashMap<String, BuildData>;
impl BuildDataCollector {
pub(crate) fn add_config(&mut self, workspace_root: &AbsPath, config: BuildDataConfig) {
self.configs.insert(workspace_root.to_path_buf().clone(), config);
}
pub fn collect(&mut self, progress: &dyn Fn(String)) -> Result<BuildDataResult> {
let mut res = BuildDataResult::default();
for (path, config) in self.configs.iter() {
res.data.insert(
path.clone(),
collect_from_workspace(
&config.cargo_toml,
&config.cargo_features,
&config.packages,
progress,
)?,
);
}
if cargo_features.all_features {
cmd.arg("--all-features");
} else {
if cargo_features.no_default_features {
// FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures`
// https://github.com/oli-obk/cargo_metadata/issues/79
cmd.arg("--no-default-features");
}
if !cargo_features.features.is_empty() {
cmd.arg("--features");
cmd.arg(cargo_features.features.join(" "));
}
}
cmd.stdout(Stdio::piped()).stderr(Stdio::null()).stdin(Stdio::null());
let mut child = cmd.spawn().map(JodChild)?;
let child_stdout = child.stdout.take().unwrap();
let stdout = BufReader::new(child_stdout);
let mut res = BuildDataMap::default();
for message in cargo_metadata::Message::parse_stream(stdout) {
if let Ok(message) = message {
match message {
Message::BuildScriptExecuted(BuildScript {
package_id,
out_dir,
cfgs,
env,
..
}) => {
let cfgs = {
let mut acc = Vec::new();
for cfg in cfgs {
match cfg.parse::<CfgFlag>() {
Ok(it) => acc.push(it),
Err(err) => {
anyhow::bail!("invalid cfg from cargo-metadata: {}", err)
}
};
}
acc
};
let res = res.data.entry(package_id.clone()).or_default();
// cargo_metadata crate returns default (empty) path for
// older cargos, which is not absolute, so work around that.
if out_dir != PathBuf::default() {
let out_dir = AbsPathBuf::assert(out_dir);
res.out_dir = Some(out_dir);
res.cfgs = cfgs;
}
res.envs = env;
}
Message::CompilerArtifact(message) => {
progress(format!("metadata {}", message.target.name));
if message.target.kind.contains(&"proc-macro".to_string()) {
let package_id = message.package_id;
// Skip rmeta file
if let Some(filename) =
message.filenames.iter().find(|name| is_dylib(name))
{
let filename = AbsPathBuf::assert(filename.clone());
let res = res.data.entry(package_id.clone()).or_default();
res.proc_macro_dylib_path = Some(filename);
}
}
}
Message::CompilerMessage(message) => {
progress(message.target.name.clone());
}
Message::Unknown => (),
Message::BuildFinished(_) => {}
Message::TextLine(_) => {}
}
}
}
res.inject_cargo_env(packages);
Ok(res)
}
}
pub(crate) fn with_cargo_env(packages: &Vec<Package>) -> Self {
let mut res = Self::default();
res.inject_cargo_env(packages);
res
impl BuildDataResult {
pub(crate) fn get(&self, root: &AbsPath) -> Option<&BuildDataMap> {
self.data.get(&root.to_path_buf())
}
}
impl BuildDataConfig {
pub(crate) fn new(
cargo_toml: AbsPathBuf,
cargo_features: CargoConfig,
packages: Arc<Vec<cargo_metadata::Package>>,
) -> Self {
Self { cargo_toml, cargo_features, packages }
}
}
fn collect_from_workspace(
cargo_toml: &AbsPath,
cargo_features: &CargoConfig,
packages: &Vec<cargo_metadata::Package>,
progress: &dyn Fn(String),
) -> Result<BuildDataMap> {
let mut cmd = Command::new(toolchain::cargo());
cmd.args(&["check", "--workspace", "--message-format=json", "--manifest-path"])
.arg(cargo_toml.as_ref());
// --all-targets includes tests, benches and examples in addition to the
// default lib and bins. This is an independent concept from the --targets
// flag below.
cmd.arg("--all-targets");
if let Some(target) = &cargo_features.target {
cmd.args(&["--target", target]);
}
pub(crate) fn get(&self, id: &PackageId) -> Option<&BuildData> {
self.data.get(id)
if cargo_features.all_features {
cmd.arg("--all-features");
} else {
if cargo_features.no_default_features {
// FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures`
// https://github.com/oli-obk/cargo_metadata/issues/79
cmd.arg("--no-default-features");
}
if !cargo_features.features.is_empty() {
cmd.arg("--features");
cmd.arg(cargo_features.features.join(" "));
}
}
fn inject_cargo_env(&mut self, packages: &Vec<Package>) {
for meta_pkg in packages {
let resource = self.data.entry(meta_pkg.id.clone()).or_default();
inject_cargo_env(meta_pkg, &mut resource.envs);
cmd.stdout(Stdio::piped()).stderr(Stdio::null()).stdin(Stdio::null());
if let Some(out_dir) = &resource.out_dir {
// NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!()
if let Some(out_dir) = out_dir.to_str().map(|s| s.to_owned()) {
resource.envs.push(("OUT_DIR".to_string(), out_dir));
let mut child = cmd.spawn().map(JodChild)?;
let child_stdout = child.stdout.take().unwrap();
let stdout = BufReader::new(child_stdout);
let mut res = BuildDataMap::default();
for message in cargo_metadata::Message::parse_stream(stdout) {
if let Ok(message) = message {
match message {
Message::BuildScriptExecuted(BuildScript {
package_id,
out_dir,
cfgs,
env,
..
}) => {
let cfgs = {
let mut acc = Vec::new();
for cfg in cfgs {
match cfg.parse::<CfgFlag>() {
Ok(it) => acc.push(it),
Err(err) => {
anyhow::bail!("invalid cfg from cargo-metadata: {}", err)
}
};
}
acc
};
let res = res.entry(package_id.repr.clone()).or_default();
// cargo_metadata crate returns default (empty) path for
// older cargos, which is not absolute, so work around that.
if out_dir != PathBuf::default() {
let out_dir = AbsPathBuf::assert(out_dir);
res.out_dir = Some(out_dir);
res.cfgs = cfgs;
}
res.envs = env;
}
Message::CompilerArtifact(message) => {
progress(format!("metadata {}", message.target.name));
if message.target.kind.contains(&"proc-macro".to_string()) {
let package_id = message.package_id;
// Skip rmeta file
if let Some(filename) = message.filenames.iter().find(|name| is_dylib(name))
{
let filename = AbsPathBuf::assert(filename.clone());
let res = res.entry(package_id.repr.clone()).or_default();
res.proc_macro_dylib_path = Some(filename);
}
}
}
Message::CompilerMessage(message) => {
progress(message.target.name.clone());
}
Message::Unknown => (),
Message::BuildFinished(_) => {}
Message::TextLine(_) => {}
}
}
}
for package in packages {
let build_data = res.entry(package.id.repr.clone()).or_default();
inject_cargo_env(package, build_data);
if let Some(out_dir) = &build_data.out_dir {
// NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!()
if let Some(out_dir) = out_dir.to_str().map(|s| s.to_owned()) {
build_data.envs.push(("OUT_DIR".to_string(), out_dir));
}
}
}
Ok(res)
}
// FIXME: File a better way to know if it is a dylib
@ -173,7 +219,9 @@ fn is_dylib(path: &Path) -> bool {
/// Recreates the compile-time environment variables that Cargo sets.
///
/// Should be synced with <https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-crates>
fn inject_cargo_env(package: &cargo_metadata::Package, env: &mut Vec<(String, String)>) {
fn inject_cargo_env(package: &cargo_metadata::Package, build_data: &mut BuildData) {
let env = &mut build_data.envs;
// FIXME: Missing variables:
// CARGO_PKG_HOMEPAGE, CARGO_CRATE_NAME, CARGO_BIN_NAME, CARGO_BIN_EXE_<name>

View File

@ -1,6 +1,6 @@
//! FIXME: write short doc here
use std::{convert::TryInto, ops, process::Command};
use std::{convert::TryInto, ops, process::Command, sync::Arc};
use anyhow::{Context, Result};
use base_db::Edition;
@ -9,7 +9,7 @@
use paths::{AbsPath, AbsPathBuf};
use rustc_hash::FxHashMap;
use crate::build_data::{BuildData, BuildDataMap};
use crate::build_data::BuildDataConfig;
use crate::utf8_stdout;
/// `CargoWorkspace` represents the logical structure of, well, a Cargo
@ -27,6 +27,7 @@ pub struct CargoWorkspace {
packages: Arena<PackageData>,
targets: Arena<TargetData>,
workspace_root: AbsPathBuf,
build_data_config: BuildDataConfig,
}
impl ops::Index<Package> for CargoWorkspace {
@ -55,9 +56,6 @@ pub struct CargoConfig {
/// This will be ignored if `cargo_all_features` is true.
pub features: Vec<String>,
/// Runs cargo check on launch to figure out the correct values of OUT_DIR
pub load_out_dirs_from_check: bool,
/// rustc target
pub target: Option<String>,
@ -94,8 +92,8 @@ pub struct PackageData {
pub features: FxHashMap<String, Vec<String>>,
/// List of features enabled on this package
pub active_features: Vec<String>,
/// Build script related data for this package
pub build_data: BuildData,
// String representation of package id
pub id: String,
}
#[derive(Debug, Clone, Eq, PartialEq)]
@ -228,12 +226,6 @@ pub fn from_cargo_metadata(
)
})?;
let resources = if config.load_out_dirs_from_check {
BuildDataMap::new(cargo_toml, config, &meta.packages, progress)?
} else {
BuildDataMap::with_cargo_env(&meta.packages)
};
let mut pkg_by_id = FxHashMap::default();
let mut packages = Arena::default();
let mut targets = Arena::default();
@ -241,10 +233,7 @@ pub fn from_cargo_metadata(
let ws_members = &meta.workspace_members;
meta.packages.sort_by(|a, b| a.id.cmp(&b.id));
for meta_pkg in meta.packages {
let id = meta_pkg.id.clone();
let build_data = resources.get(&id).cloned().unwrap_or_default();
for meta_pkg in &meta.packages {
let cargo_metadata::Package { id, edition, name, manifest_path, version, .. } =
meta_pkg;
let is_member = ws_members.contains(&id);
@ -252,24 +241,24 @@ pub fn from_cargo_metadata(
.parse::<Edition>()
.with_context(|| format!("Failed to parse edition {}", edition))?;
let pkg = packages.alloc(PackageData {
name,
id: id.repr.clone(),
name: name.clone(),
version: version.to_string(),
manifest: AbsPathBuf::assert(manifest_path),
manifest: AbsPathBuf::assert(manifest_path.clone()),
targets: Vec::new(),
is_member,
edition,
dependencies: Vec::new(),
features: meta_pkg.features.into_iter().collect(),
features: meta_pkg.features.clone().into_iter().collect(),
active_features: Vec::new(),
build_data,
});
let pkg_data = &mut packages[pkg];
pkg_by_id.insert(id, pkg);
for meta_tgt in meta_pkg.targets {
for meta_tgt in &meta_pkg.targets {
let is_proc_macro = meta_tgt.kind.as_slice() == ["proc-macro"];
let tgt = targets.alloc(TargetData {
package: pkg,
name: meta_tgt.name,
name: meta_tgt.name.clone(),
root: AbsPathBuf::assert(meta_tgt.src_path.clone()),
kind: TargetKind::new(meta_tgt.kind.as_slice()),
is_proc_macro,
@ -308,7 +297,13 @@ pub fn from_cargo_metadata(
}
let workspace_root = AbsPathBuf::assert(meta.workspace_root);
Ok(CargoWorkspace { packages, targets, workspace_root: workspace_root })
let build_data_config = BuildDataConfig::new(
cargo_toml.to_path_buf(),
config.clone(),
Arc::new(meta.packages.clone()),
);
Ok(CargoWorkspace { packages, targets, workspace_root, build_data_config })
}
pub fn packages<'a>(&'a self) -> impl Iterator<Item = Package> + ExactSizeIterator + 'a {
@ -334,6 +329,10 @@ pub fn package_flag(&self, package: &PackageData) -> String {
}
}
pub(crate) fn build_data_config(&self) -> &BuildDataConfig {
&self.build_data_config
}
fn is_unique(&self, name: &str) -> bool {
self.packages.iter().filter(|(_, v)| v.name == name).count() == 1
}

View File

@ -19,6 +19,7 @@
use rustc_hash::FxHashSet;
pub use crate::{
build_data::{BuildDataCollector, BuildDataResult},
cargo_workspace::{
CargoConfig, CargoWorkspace, Package, PackageData, PackageDependency, Target, TargetData,
TargetKind,

View File

@ -16,8 +16,13 @@
use rustc_hash::{FxHashMap, FxHashSet};
use crate::{
cargo_workspace, cfg_flag::CfgFlag, rustc_cfg, sysroot::SysrootCrate, utf8_stdout, CargoConfig,
CargoWorkspace, ProjectJson, ProjectManifest, Sysroot, TargetKind,
build_data::{BuildData, BuildDataMap, BuildDataResult},
cargo_workspace,
cfg_flag::CfgFlag,
rustc_cfg,
sysroot::SysrootCrate,
utf8_stdout, BuildDataCollector, CargoConfig, CargoWorkspace, ProjectJson, ProjectManifest,
Sysroot, TargetKind,
};
/// `PackageRoot` describes a package root folder.
@ -153,7 +158,7 @@ pub fn load_inline(
/// Returns the roots for the current `ProjectWorkspace`
/// The return type contains the path and whether or not
/// the root is a member of the current workspace
pub fn to_roots(&self) -> Vec<PackageRoot> {
pub fn to_roots(&self, build_data: Option<&BuildDataResult>) -> Vec<PackageRoot> {
match self {
ProjectWorkspace::Json { project, sysroot, rustc_cfg: _ } => project
.crates()
@ -179,7 +184,12 @@ pub fn to_roots(&self) -> Vec<PackageRoot> {
let pkg_root = cargo[pkg].root().to_path_buf();
let mut include = vec![pkg_root.clone()];
include.extend(cargo[pkg].build_data.out_dir.clone());
include.extend(
build_data
.and_then(|it| it.get(cargo.workspace_root()))
.and_then(|map| map.get(&cargo[pkg].id))
.and_then(|it| it.out_dir.clone()),
);
let mut exclude = vec![pkg_root.join(".git")];
if is_member {
@ -219,6 +229,7 @@ pub fn n_packages(&self) -> usize {
pub fn to_crate_graph(
&self,
build_data: Option<&BuildDataResult>,
proc_macro_client: Option<&ProcMacroClient>,
load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
) -> CrateGraph {
@ -241,8 +252,10 @@ pub fn to_crate_graph(
&proc_macro_loader,
load,
cargo,
build_data.and_then(|it| it.get(cargo.workspace_root())),
sysroot,
rustc,
rustc.as_ref().zip(build_data).and_then(|(it, map)| map.get(it.workspace_root())),
),
};
if crate_graph.patch_cfg_if() {
@ -252,6 +265,18 @@ pub fn to_crate_graph(
}
crate_graph
}
pub fn collect_build_data_configs(&self, collector: &mut BuildDataCollector) {
match self {
ProjectWorkspace::Cargo { cargo, rustc, .. } => {
collector.add_config(&cargo.workspace_root(), cargo.build_data_config().clone());
if let Some(rustc) = rustc {
collector.add_config(rustc.workspace_root(), rustc.build_data_config().clone());
}
}
_ => {}
}
}
}
fn project_json_to_crate_graph(
@ -324,8 +349,10 @@ fn cargo_to_crate_graph(
proc_macro_loader: &dyn Fn(&Path) -> Vec<ProcMacro>,
load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
cargo: &CargoWorkspace,
build_data_map: Option<&BuildDataMap>,
sysroot: &Sysroot,
rustc: &Option<CargoWorkspace>,
rustc_build_data_map: Option<&BuildDataMap>,
) -> CrateGraph {
let _p = profile::span("cargo_to_crate_graph");
let mut crate_graph = CrateGraph::default();
@ -351,6 +378,7 @@ fn cargo_to_crate_graph(
let crate_id = add_target_crate_root(
&mut crate_graph,
&cargo[pkg],
build_data_map.and_then(|it| it.get(&cargo[pkg].id)),
&cfg_options,
proc_macro_loader,
file_id,
@ -427,6 +455,7 @@ fn cargo_to_crate_graph(
let crate_id = add_target_crate_root(
&mut crate_graph,
&rustc_workspace[pkg],
rustc_build_data_map.and_then(|it| it.get(&rustc_workspace[pkg].id)),
&cfg_options,
proc_macro_loader,
file_id,
@ -475,6 +504,7 @@ fn cargo_to_crate_graph(
fn add_target_crate_root(
crate_graph: &mut CrateGraph,
pkg: &cargo_workspace::PackageData,
build_data: Option<&BuildData>,
cfg_options: &CfgOptions,
proc_macro_loader: &dyn Fn(&Path) -> Vec<ProcMacro>,
file_id: FileId,
@ -485,19 +515,22 @@ fn add_target_crate_root(
for feature in pkg.active_features.iter() {
opts.insert_key_value("feature".into(), feature.into());
}
opts.extend(pkg.build_data.cfgs.iter().cloned());
if let Some(cfgs) = build_data.as_ref().map(|it| &it.cfgs) {
opts.extend(cfgs.iter().cloned());
}
opts
};
let mut env = Env::default();
for (k, v) in &pkg.build_data.envs {
env.set(k, v.clone());
if let Some(envs) = build_data.map(|it| &it.envs) {
for (k, v) in envs {
env.set(k, v.clone());
}
}
let proc_macro = pkg
.build_data
.proc_macro_dylib_path
let proc_macro = build_data
.as_ref()
.and_then(|it| it.proc_macro_dylib_path.as_ref())
.map(|it| proc_macro_loader(&it))
.unwrap_or_default();

View File

@ -6,7 +6,9 @@
use crossbeam_channel::{unbounded, Receiver};
use ide::{AnalysisHost, Change};
use ide_db::base_db::CrateGraph;
use project_model::{CargoConfig, ProcMacroClient, ProjectManifest, ProjectWorkspace};
use project_model::{
BuildDataCollector, CargoConfig, ProcMacroClient, ProjectManifest, ProjectWorkspace,
};
use vfs::{loader::Handle, AbsPath, AbsPathBuf};
use crate::reload::{ProjectFolders, SourceRootConfig};
@ -18,11 +20,7 @@ pub fn load_cargo(
) -> Result<(AnalysisHost, vfs::Vfs)> {
let root = AbsPathBuf::assert(std::env::current_dir()?.join(root));
let root = ProjectManifest::discover_single(&root)?;
let ws = ProjectWorkspace::load(
root,
&CargoConfig { load_out_dirs_from_check, ..Default::default() },
&|_| {},
)?;
let ws = ProjectWorkspace::load(root, &CargoConfig::default(), &|_| {})?;
let (sender, receiver) = unbounded();
let mut vfs = vfs::Vfs::default();
@ -39,14 +37,26 @@ pub fn load_cargo(
None
};
let crate_graph = ws.to_crate_graph(proc_macro_client.as_ref(), &mut |path: &AbsPath| {
let contents = loader.load_sync(path);
let path = vfs::VfsPath::from(path.to_path_buf());
vfs.set_file_contents(path.clone(), contents);
vfs.file_id(&path)
});
let build_data = if load_out_dirs_from_check {
let mut collector = BuildDataCollector::default();
ws.collect_build_data_configs(&mut collector);
Some(collector.collect(&|_| {})?)
} else {
None
};
let project_folders = ProjectFolders::new(&[ws], &[]);
let crate_graph = ws.to_crate_graph(
build_data.as_ref(),
proc_macro_client.as_ref(),
&mut |path: &AbsPath| {
let contents = loader.load_sync(path);
let path = vfs::VfsPath::from(path.to_path_buf());
vfs.set_file_contents(path.clone(), contents);
vfs.file_id(&path)
},
);
let project_folders = ProjectFolders::new(&[ws], &[], build_data.as_ref());
loader.set_config(vfs::loader::Config { load: project_folders.load, watch: vec![] });
log::debug!("crate graph: {:?}", crate_graph);

View File

@ -469,6 +469,9 @@ pub fn notifications(&self) -> NotificationsConfig {
pub fn cargo_autoreload(&self) -> bool {
self.data.cargo_autoreload
}
pub fn load_out_dirs_from_check(&self) -> bool {
self.data.cargo_loadOutDirsFromCheck
}
pub fn cargo(&self) -> CargoConfig {
let rustc_source = self.data.rustcSource.as_ref().map(|it| self.root_path.join(&it));
@ -476,7 +479,6 @@ pub fn cargo(&self) -> CargoConfig {
no_default_features: self.data.cargo_noDefaultFeatures,
all_features: self.data.cargo_allFeatures,
features: self.data.cargo_features.clone(),
load_out_dirs_from_check: self.data.cargo_loadOutDirsFromCheck,
target: self.data.cargo_target.clone(),
rustc_source,
no_sysroot: self.data.cargo_noSysroot,

View File

@ -11,7 +11,9 @@
use ide_db::base_db::{CrateId, VfsPath};
use lsp_types::{SemanticTokens, Url};
use parking_lot::{Mutex, RwLock};
use project_model::{CargoWorkspace, ProcMacroClient, ProjectWorkspace, Target};
use project_model::{
BuildDataCollector, BuildDataResult, CargoWorkspace, ProcMacroClient, ProjectWorkspace, Target,
};
use rustc_hash::FxHashMap;
use vfs::AnchoredPathBuf;
@ -33,7 +35,7 @@
#[derive(Eq, PartialEq, Copy, Clone)]
pub(crate) enum Status {
Loading,
Ready,
Ready { partial: bool },
Invalid,
NeedsReload,
}
@ -79,7 +81,9 @@ pub(crate) struct GlobalState {
pub(crate) source_root_config: SourceRootConfig,
pub(crate) proc_macro_client: Option<ProcMacroClient>,
pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
pub(crate) fetch_workspaces_queue: OpQueue,
pub(crate) fetch_workspaces_queue: OpQueue<()>,
pub(crate) workspace_build_data: Option<BuildDataResult>,
pub(crate) fetch_build_data_queue: OpQueue<BuildDataCollector>,
latest_requests: Arc<RwLock<LatestRequests>>,
}
@ -133,6 +137,8 @@ pub(crate) fn new(sender: Sender<lsp_server::Message>, config: Config) -> Global
proc_macro_client: None,
workspaces: Arc::new(Vec::new()),
fetch_workspaces_queue: OpQueue::default(),
workspace_build_data: None,
fetch_build_data_queue: OpQueue::default(),
latest_requests: Default::default(),
}
}

View File

@ -234,6 +234,7 @@ pub enum StatusNotification {}
#[derive(Serialize, Deserialize)]
pub enum Status {
Loading,
ReadyPartial,
Ready,
NeedsReload,
Invalid,

View File

@ -21,7 +21,7 @@
global_state::{file_id_to_url, url_to_file_id, GlobalState, Status},
handlers, lsp_ext,
lsp_utils::{apply_document_changes, is_canceled, notification_is, Progress},
reload::ProjectWorkspaceProgress,
reload::{BuildDataProgress, ProjectWorkspaceProgress},
Result,
};
@ -63,6 +63,7 @@ pub(crate) enum Task {
Diagnostics(Vec<(FileId, Vec<lsp_types::Diagnostic>)>),
PrimeCaches(PrimeCachesProgress),
FetchWorkspace(ProjectWorkspaceProgress),
FetchBuildData(BuildDataProgress),
}
impl fmt::Debug for Event {
@ -226,12 +227,33 @@ fn handle_event(&mut self, event: Event) -> Result<()> {
}
ProjectWorkspaceProgress::End(workspaces) => {
self.fetch_workspaces_completed();
self.switch_workspaces(workspaces);
self.switch_workspaces(workspaces, None);
(Progress::End, None)
}
};
self.report_progress("fetching", state, msg, None);
}
Task::FetchBuildData(progress) => {
let (state, msg) = match progress {
BuildDataProgress::Begin => (Some(Progress::Begin), None),
BuildDataProgress::Report(msg) => {
(Some(Progress::Report), Some(msg))
}
BuildDataProgress::End(collector) => {
self.fetch_build_data_completed();
let workspaces = (*self.workspaces)
.clone()
.into_iter()
.map(|it| Ok(it))
.collect();
self.switch_workspaces(workspaces, Some(collector));
(Some(Progress::End), None)
}
};
if let Some(state) = state {
self.report_progress("loading", state, msg, None);
}
}
}
// Coalesce multiple task events into one loop turn
task = match self.task_pool.receiver.try_recv() {
@ -287,7 +309,11 @@ fn handle_event(&mut self, event: Event) -> Result<()> {
Progress::Report
} else {
assert_eq!(n_done, n_total);
self.transition(Status::Ready);
let status = Status::Ready {
partial: self.config.load_out_dirs_from_check()
&& self.workspace_build_data.is_none(),
};
self.transition(status);
Progress::End
};
self.report_progress(
@ -372,13 +398,14 @@ fn handle_event(&mut self, event: Event) -> Result<()> {
}
let state_changed = self.process_changes();
if prev_status == Status::Loading && self.status == Status::Ready {
let is_ready = matches!(self.status, Status::Ready { .. } );
if prev_status == Status::Loading && is_ready {
for flycheck in &self.flycheck {
flycheck.update();
}
}
if self.status == Status::Ready && (state_changed || prev_status == Status::Loading) {
if is_ready && (state_changed || prev_status == Status::Loading) {
self.update_file_notifications_on_threadpool();
// Refresh semantic tokens if the client supports it.
@ -408,6 +435,7 @@ fn handle_event(&mut self, event: Event) -> Result<()> {
}
self.fetch_workspaces_if_needed();
self.fetch_build_data_if_needed();
let loop_duration = loop_start.elapsed();
if loop_duration > Duration::from_millis(100) {

View File

@ -1,22 +1,26 @@
//! Bookkeeping to make sure only one long-running operation is executed.
#[derive(Default)]
pub(crate) struct OpQueue {
op_scheduled: bool,
pub(crate) struct OpQueue<D> {
op_scheduled: Option<D>,
op_in_progress: bool,
}
impl OpQueue {
pub(crate) fn request_op(&mut self) {
self.op_scheduled = true;
impl<D> Default for OpQueue<D> {
fn default() -> Self {
Self { op_scheduled: None, op_in_progress: false }
}
pub(crate) fn should_start_op(&mut self) -> bool {
if !self.op_in_progress && self.op_scheduled {
self.op_in_progress = true;
self.op_scheduled = false;
return true;
}
impl<D> OpQueue<D> {
pub(crate) fn request_op(&mut self, data: D) {
self.op_scheduled = Some(data);
}
pub(crate) fn should_start_op(&mut self) -> Option<D> {
if self.op_in_progress {
return None;
}
false
self.op_in_progress = self.op_scheduled.is_some();
self.op_scheduled.take()
}
pub(crate) fn op_completed(&mut self) {
assert!(self.op_in_progress);

View File

@ -4,7 +4,7 @@
use flycheck::{FlycheckConfig, FlycheckHandle};
use ide::Change;
use ide_db::base_db::{CrateGraph, SourceRoot, VfsPath};
use project_model::{ProcMacroClient, ProjectWorkspace};
use project_model::{BuildDataCollector, BuildDataResult, ProcMacroClient, ProjectWorkspace};
use vfs::{file_set::FileSetConfig, AbsPath, AbsPathBuf, ChangeKind};
use crate::{
@ -22,6 +22,13 @@ pub(crate) enum ProjectWorkspaceProgress {
End(Vec<anyhow::Result<ProjectWorkspace>>),
}
#[derive(Debug)]
pub(crate) enum BuildDataProgress {
Begin,
Report(String),
End(anyhow::Result<BuildDataResult>),
}
impl GlobalState {
pub(crate) fn update_configuration(&mut self, config: Config) {
let _p = profile::span("GlobalState::update_configuration");
@ -41,7 +48,7 @@ pub(crate) fn maybe_refresh(&mut self, changes: &[(AbsPathBuf, ChangeKind)]) {
}
match self.status {
Status::Loading | Status::NeedsReload => return,
Status::Ready | Status::Invalid => (),
Status::Ready { .. } | Status::Invalid => (),
}
if self.config.cargo_autoreload() {
self.fetch_workspaces_request();
@ -89,7 +96,8 @@ pub(crate) fn transition(&mut self, new_status: Status) {
if self.config.status_notification() {
let lsp_status = match new_status {
Status::Loading => lsp_ext::Status::Loading,
Status::Ready => lsp_ext::Status::Ready,
Status::Ready { partial: true } => lsp_ext::Status::ReadyPartial,
Status::Ready { partial: false } => lsp_ext::Status::Ready,
Status::Invalid => lsp_ext::Status::Invalid,
Status::NeedsReload => lsp_ext::Status::NeedsReload,
};
@ -99,11 +107,37 @@ pub(crate) fn transition(&mut self, new_status: Status) {
}
}
pub(crate) fn fetch_build_data_request(&mut self, build_data_collector: BuildDataCollector) {
self.fetch_build_data_queue.request_op(build_data_collector);
}
pub(crate) fn fetch_build_data_if_needed(&mut self) {
let mut build_data_collector = match self.fetch_build_data_queue.should_start_op() {
Some(it) => it,
None => return,
};
self.task_pool.handle.spawn_with_sender(move |sender| {
sender.send(Task::FetchBuildData(BuildDataProgress::Begin)).unwrap();
let progress = {
let sender = sender.clone();
move |msg| {
sender.send(Task::FetchBuildData(BuildDataProgress::Report(msg))).unwrap()
}
};
let res = build_data_collector.collect(&progress);
sender.send(Task::FetchBuildData(BuildDataProgress::End(res))).unwrap();
});
}
pub(crate) fn fetch_build_data_completed(&mut self) {
self.fetch_build_data_queue.op_completed()
}
pub(crate) fn fetch_workspaces_request(&mut self) {
self.fetch_workspaces_queue.request_op()
self.fetch_workspaces_queue.request_op(())
}
pub(crate) fn fetch_workspaces_if_needed(&mut self) {
if !self.fetch_workspaces_queue.should_start_op() {
if self.fetch_workspaces_queue.should_start_op().is_none() {
return;
}
log::info!("will fetch workspaces");
@ -154,7 +188,11 @@ pub(crate) fn fetch_workspaces_completed(&mut self) {
self.fetch_workspaces_queue.op_completed()
}
pub(crate) fn switch_workspaces(&mut self, workspaces: Vec<anyhow::Result<ProjectWorkspace>>) {
pub(crate) fn switch_workspaces(
&mut self,
workspaces: Vec<anyhow::Result<ProjectWorkspace>>,
workspace_build_data: Option<anyhow::Result<BuildDataResult>>,
) {
let _p = profile::span("GlobalState::switch_workspaces");
log::info!("will switch workspaces: {:?}", workspaces);
@ -176,7 +214,20 @@ pub(crate) fn switch_workspaces(&mut self, workspaces: Vec<anyhow::Result<Projec
})
.collect::<Vec<_>>();
if &*self.workspaces == &workspaces {
let workspace_build_data = match workspace_build_data {
Some(Ok(it)) => Some(it),
Some(Err(err)) => {
log::error!("failed to fetch build data: {:#}", err);
self.show_message(
lsp_types::MessageType::Error,
format!("rust-analyzer failed to fetch build data: {:#}", err),
);
return;
}
None => None,
};
if &*self.workspaces == &workspaces && self.workspace_build_data == workspace_build_data {
return;
}
@ -189,7 +240,7 @@ pub(crate) fn switch_workspaces(&mut self, workspaces: Vec<anyhow::Result<Projec
let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions {
watchers: workspaces
.iter()
.flat_map(ProjectWorkspace::to_roots)
.flat_map(|it| it.to_roots(workspace_build_data.as_ref()))
.filter(|it| it.is_member)
.flat_map(|root| {
root.include.into_iter().map(|it| format!("{}/**/*.rs", it.display()))
@ -215,7 +266,8 @@ pub(crate) fn switch_workspaces(&mut self, workspaces: Vec<anyhow::Result<Projec
let mut change = Change::new();
let files_config = self.config.files();
let project_folders = ProjectFolders::new(&workspaces, &files_config.exclude);
let project_folders =
ProjectFolders::new(&workspaces, &files_config.exclude, workspace_build_data.as_ref());
self.proc_macro_client = match self.config.proc_macro_srv() {
None => None,
@ -257,15 +309,28 @@ pub(crate) fn switch_workspaces(&mut self, workspaces: Vec<anyhow::Result<Projec
res
};
for ws in workspaces.iter() {
crate_graph.extend(ws.to_crate_graph(self.proc_macro_client.as_ref(), &mut load));
crate_graph.extend(ws.to_crate_graph(
self.workspace_build_data.as_ref(),
self.proc_macro_client.as_ref(),
&mut load,
));
}
crate_graph
};
change.set_crate_graph(crate_graph);
if self.config.load_out_dirs_from_check() && workspace_build_data.is_none() {
let mut collector = BuildDataCollector::default();
for ws in &workspaces {
ws.collect_build_data_configs(&mut collector);
}
self.fetch_build_data_request(collector)
}
self.source_root_config = project_folders.source_root_config;
self.workspaces = Arc::new(workspaces);
self.workspace_build_data = workspace_build_data;
self.analysis_host.apply_change(change);
self.process_changes();
@ -323,12 +388,13 @@ impl ProjectFolders {
pub(crate) fn new(
workspaces: &[ProjectWorkspace],
global_excludes: &[AbsPathBuf],
build_data: Option<&BuildDataResult>,
) -> ProjectFolders {
let mut res = ProjectFolders::default();
let mut fsc = FileSetConfig::builder();
let mut local_filesets = vec![];
for root in workspaces.iter().flat_map(|it| it.to_roots()) {
for root in workspaces.iter().flat_map(|it| it.to_roots(build_data)) {
let file_set_roots: Vec<VfsPath> =
root.include.iter().cloned().map(VfsPath::from).collect();