Re-implement dependecny tracking for cargo script

This commit is contained in:
Lukas Wirth 2024-04-19 14:55:53 +02:00
parent 2f828073aa
commit 0b24599cf9
10 changed files with 187 additions and 234 deletions

View File

@ -335,7 +335,7 @@ fn load_crate_graph(
) -> RootDatabase {
let (ProjectWorkspace::Cargo { toolchain, target_layout, .. }
| ProjectWorkspace::Json { toolchain, target_layout, .. }
| ProjectWorkspace::DetachedFiles { toolchain, target_layout, .. }) = ws;
| ProjectWorkspace::DetachedFile { toolchain, target_layout, .. }) = ws;
let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<usize>().ok());
let mut db = RootDatabase::new(lru_cap);

View File

@ -305,8 +305,10 @@ pub fn fetch_metadata(
.collect(),
);
}
if cargo_toml.extension().is_some_and(|x| x == "rs") {
// TODO: enable `+nightly` for cargo scripts
// The manifest is a rust file, so this means its a script manifest
if cargo_toml.extension().is_some_and(|ext| ext == "rs") {
// Deliberately don't set up RUSTC_BOOTSTRAP or a nightly override here, the user should
// opt into it themselves.
other_options.push("-Zscript".to_owned());
}
meta.other_options(other_options);

View File

@ -2,7 +2,7 @@
//! metadata` or `rust-project.json`) into representation stored in the salsa
//! database -- `CrateGraph`.
use std::{collections::VecDeque, fmt, fs, io::BufRead, iter, sync};
use std::{collections::VecDeque, fmt, fs, iter, sync};
use anyhow::{format_err, Context};
use base_db::{
@ -99,9 +99,9 @@ pub enum ProjectWorkspace {
// //
/// Project with a set of disjoint files, not belonging to any particular workspace.
/// Backed by basic sysroot crates for basic completion and highlighting.
DetachedFiles {
/// The set of detached files.
files: Vec<AbsPathBuf>,
DetachedFile {
/// The file in question.
file: AbsPathBuf,
/// The sysroot loaded for this workspace.
sysroot: Result<Sysroot, Option<String>>,
/// Holds cfg flags for the current target. We get those by running
@ -120,50 +120,6 @@ pub enum ProjectWorkspace {
},
}
/// Tracks the cargo toml parts in cargo scripts, to detect if they
/// changed and reload workspace in that case.
pub struct CargoScriptTomls(pub FxHashMap<AbsPathBuf, String>);
impl CargoScriptTomls {
fn extract_toml_part(p: &AbsPath) -> Option<String> {
let mut r = String::new();
let f = std::fs::File::open(p).ok()?;
let f = std::io::BufReader::new(f);
let mut started = false;
for line in f.lines() {
let line = line.ok()?;
if started {
if line.trim() == "//! ```" {
return Some(r);
}
r += &line;
} else {
if line.trim() == "//! ```cargo" {
started = true;
}
}
}
None
}
pub fn track_file(&mut self, p: AbsPathBuf) {
let toml = CargoScriptTomls::extract_toml_part(&p).unwrap_or_default();
self.0.insert(p, toml);
}
pub fn need_reload(&mut self, p: &AbsPath) -> bool {
let Some(prev) = self.0.get_mut(p) else {
return false; // File is not tracked
};
let next = CargoScriptTomls::extract_toml_part(p).unwrap_or_default();
if *prev == next {
return false;
}
*prev = next;
true
}
}
impl fmt::Debug for ProjectWorkspace {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// Make sure this isn't too verbose.
@ -213,8 +169,8 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
.field("n_cfg_overrides", &cfg_overrides.len());
debug_struct.finish()
}
ProjectWorkspace::DetachedFiles {
files,
ProjectWorkspace::DetachedFile {
file,
sysroot,
rustc_cfg,
toolchain,
@ -223,7 +179,8 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
cargo_script,
} => f
.debug_struct("DetachedFiles")
.field("n_files", &files.len())
.field("file", &file)
.field("cargo_script", &cargo_script.is_some())
.field("sysroot", &sysroot.is_ok())
.field("cargo_script", &cargo_script.is_some())
.field("n_rustc_cfg", &rustc_cfg.len())
@ -479,71 +436,79 @@ pub fn load_inline(
pub fn load_detached_files(
detached_files: Vec<AbsPathBuf>,
config: &CargoConfig,
cargo_script_tomls: &mut CargoScriptTomls,
) -> anyhow::Result<ProjectWorkspace> {
let dir = detached_files
.first()
.and_then(|it| it.parent())
.ok_or_else(|| format_err!("No detached files to load"))?;
let sysroot = match &config.sysroot {
Some(RustLibSource::Path(path)) => {
Sysroot::with_sysroot_dir(path.clone(), config.sysroot_query_metadata)
.map_err(|e| Some(format!("Failed to find sysroot at {path}:{e}")))
}
Some(RustLibSource::Discover) => Sysroot::discover(
dir,
&config.extra_env,
config.sysroot_query_metadata,
)
.map_err(|e| {
Some(format!("Failed to find sysroot for {dir}. Is rust-src installed? {e}"))
}),
None => Err(None),
};
) -> Vec<anyhow::Result<ProjectWorkspace>> {
dbg!(detached_files
.into_iter()
.map(|detached_file| {
let dir = detached_file
.parent()
.ok_or_else(|| format_err!("detached file has no parent"))?;
let sysroot = match &config.sysroot {
Some(RustLibSource::Path(path)) => {
Sysroot::with_sysroot_dir(path.clone(), config.sysroot_query_metadata)
.map_err(|e| Some(format!("Failed to find sysroot at {path}:{e}")))
}
Some(RustLibSource::Discover) => {
Sysroot::discover(dir, &config.extra_env, config.sysroot_query_metadata)
.map_err(|e| {
Some(format!(
"Failed to find sysroot for {dir}. Is rust-src installed? {e}"
))
})
}
None => Err(None),
};
let sysroot_ref = sysroot.as_ref().ok();
let toolchain =
match get_toolchain_version(dir, sysroot_ref, Tool::Rustc, &config.extra_env, "rustc ")
{
Ok(it) => it,
Err(e) => {
tracing::error!("{e}");
None
}
};
let sysroot_ref = sysroot.as_ref().ok();
let toolchain = match get_toolchain_version(
dir,
sysroot_ref,
Tool::Rustc,
&config.extra_env,
"rustc ",
) {
Ok(it) => it,
Err(e) => {
tracing::error!("{e}");
None
}
};
let rustc_cfg = rustc_cfg::get(None, &config.extra_env, RustcCfgConfig::Rustc(sysroot_ref));
let data_layout = target_data_layout::get(
RustcDataLayoutConfig::Rustc(sysroot_ref),
None,
&config.extra_env,
);
let cargo_toml = ManifestPath::try_from(detached_files[0].clone()).unwrap();
let meta = CargoWorkspace::fetch_metadata(
&cargo_toml,
cargo_toml.parent(),
config,
sysroot_ref,
&|_| (),
)
.with_context(|| {
format!("Failed to read Cargo metadata from Cargo.toml file {cargo_toml}")
})?;
let cargo = CargoWorkspace::new(meta);
let rustc_cfg =
rustc_cfg::get(None, &config.extra_env, RustcCfgConfig::Rustc(sysroot_ref));
let data_layout = target_data_layout::get(
RustcDataLayoutConfig::Rustc(sysroot_ref),
None,
&config.extra_env,
);
for file in &detached_files {
cargo_script_tomls.track_file(file.clone());
}
let cargo_script = ManifestPath::try_from(detached_file.clone())
.ok()
.and_then(|file| {
CargoWorkspace::fetch_metadata(
&file,
file.parent(),
config,
sysroot_ref,
&|_| (),
)
.ok()
})
.map(CargoWorkspace::new);
Ok(ProjectWorkspace::DetachedFiles {
files: detached_files,
sysroot,
rustc_cfg,
toolchain,
target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())),
cfg_overrides: config.cfg_overrides.clone(),
cargo_script: Some(cargo),
})
Ok(ProjectWorkspace::DetachedFile {
file: detached_file,
sysroot,
rustc_cfg,
toolchain,
target_layout: data_layout
.map(Arc::from)
.map_err(|it| Arc::from(it.to_string())),
cfg_overrides: config.cfg_overrides.clone(),
cargo_script,
})
})
.collect())
}
/// Runs the build scripts for this [`ProjectWorkspace`].
@ -565,7 +530,7 @@ pub fn run_build_scripts(
format!("Failed to run build scripts for {}", cargo.workspace_root())
})
}
ProjectWorkspace::Json { .. } | ProjectWorkspace::DetachedFiles { .. } => {
ProjectWorkspace::Json { .. } | ProjectWorkspace::DetachedFile { .. } => {
Ok(WorkspaceBuildScripts::default())
}
}
@ -623,11 +588,11 @@ pub fn set_build_scripts(&mut self, bs: WorkspaceBuildScripts) {
}
}
pub fn workspace_definition_path(&self) -> Option<&AbsPath> {
pub fn workspace_definition_path(&self) -> &AbsPath {
match self {
ProjectWorkspace::Cargo { cargo, .. } => Some(cargo.workspace_root()),
ProjectWorkspace::Json { project, .. } => Some(project.path()),
ProjectWorkspace::DetachedFiles { .. } => None,
ProjectWorkspace::Cargo { cargo, .. } => cargo.workspace_root(),
ProjectWorkspace::Json { project, .. } => project.path(),
ProjectWorkspace::DetachedFile { file, .. } => file,
}
}
@ -635,10 +600,10 @@ pub fn find_sysroot_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> {
match self {
ProjectWorkspace::Cargo { sysroot: Ok(sysroot), .. }
| ProjectWorkspace::Json { sysroot: Ok(sysroot), .. }
| ProjectWorkspace::DetachedFiles { sysroot: Ok(sysroot), .. } => {
| ProjectWorkspace::DetachedFile { sysroot: Ok(sysroot), .. } => {
sysroot.discover_proc_macro_srv()
}
ProjectWorkspace::DetachedFiles { .. } => {
ProjectWorkspace::DetachedFile { .. } => {
Err(anyhow::format_err!("cannot find proc-macro server, no sysroot was found"))
}
ProjectWorkspace::Cargo { cargo, .. } => Err(anyhow::format_err!(
@ -769,15 +734,13 @@ pub fn to_roots(&self) -> Vec<PackageRoot> {
}))
.collect()
}
ProjectWorkspace::DetachedFiles { files, sysroot, .. } => files
.iter()
.map(|detached_file| PackageRoot {
is_local: true,
include: vec![detached_file.clone()],
exclude: Vec::new(),
})
.chain(mk_sysroot(sysroot.as_ref()))
.collect(),
ProjectWorkspace::DetachedFile { file, sysroot, .. } => iter::once(PackageRoot {
is_local: true,
include: vec![file.clone()],
exclude: Vec::new(),
})
.chain(mk_sysroot(sysroot.as_ref()))
.collect(),
}
}
@ -793,9 +756,9 @@ pub fn n_packages(&self) -> usize {
let sysroot_package_len = sysroot.as_ref().map_or(0, |it| it.num_packages());
cargo.packages().len() + sysroot_package_len + rustc_package_len
}
ProjectWorkspace::DetachedFiles { sysroot, files, .. } => {
ProjectWorkspace::DetachedFile { sysroot, .. } => {
let sysroot_package_len = sysroot.as_ref().map_or(0, |it| it.num_packages());
sysroot_package_len + files.len()
sysroot_package_len + 1
}
}
}
@ -848,8 +811,8 @@ pub fn to_crate_graph(
),
sysroot,
),
ProjectWorkspace::DetachedFiles {
files,
ProjectWorkspace::DetachedFile {
file,
sysroot,
rustc_cfg,
toolchain: _,
@ -868,10 +831,10 @@ pub fn to_crate_graph(
&WorkspaceBuildScripts::default(),
)
} else {
detached_files_to_crate_graph(
detached_file_to_crate_graph(
rustc_cfg.clone(),
load,
files,
file,
sysroot.as_ref().ok(),
cfg_overrides,
)
@ -949,8 +912,8 @@ pub fn eq_ignore_build_data(&self, other: &Self) -> bool {
&& cfg_overrides == o_cfg_overrides
}
(
Self::DetachedFiles {
files,
Self::DetachedFile {
file,
sysroot,
rustc_cfg,
cargo_script,
@ -958,8 +921,8 @@ pub fn eq_ignore_build_data(&self, other: &Self) -> bool {
target_layout,
cfg_overrides,
},
Self::DetachedFiles {
files: o_files,
Self::DetachedFile {
file: o_file,
sysroot: o_sysroot,
rustc_cfg: o_rustc_cfg,
cargo_script: o_cargo_script,
@ -968,7 +931,7 @@ pub fn eq_ignore_build_data(&self, other: &Self) -> bool {
cfg_overrides: o_cfg_overrides,
},
) => {
files == o_files
file == o_file
&& sysroot == o_sysroot
&& rustc_cfg == o_rustc_cfg
&& toolchain == o_toolchain
@ -1285,10 +1248,10 @@ fn cargo_to_crate_graph(
res
}
fn detached_files_to_crate_graph(
fn detached_file_to_crate_graph(
rustc_cfg: Vec<CfgFlag>,
load: FileLoader<'_>,
detached_files: &[AbsPathBuf],
detached_file: &AbsPathBuf,
sysroot: Option<&Sysroot>,
override_cfg: &CfgOverrides,
) -> (CrateGraph, ProcMacroPaths) {
@ -1305,34 +1268,32 @@ fn detached_files_to_crate_graph(
override_cfg.apply(&mut cfg_options, "");
let cfg_options = Arc::new(cfg_options);
for detached_file in detached_files {
let file_id = match load(detached_file) {
Some(file_id) => file_id,
None => {
tracing::error!("Failed to load detached file {:?}", detached_file);
continue;
}
};
let display_name = detached_file
.file_stem()
.map(|file_stem| CrateDisplayName::from_canonical_name(file_stem.to_owned()));
let detached_file_crate = crate_graph.add_crate_root(
file_id,
Edition::CURRENT,
display_name.clone(),
None,
cfg_options.clone(),
None,
Env::default(),
false,
CrateOrigin::Local {
repo: None,
name: display_name.map(|n| n.canonical_name().to_owned()),
},
);
let file_id = match load(detached_file) {
Some(file_id) => file_id,
None => {
tracing::error!("Failed to load detached file {:?}", detached_file);
return (crate_graph, FxHashMap::default());
}
};
let display_name = detached_file
.file_stem()
.map(|file_stem| CrateDisplayName::from_canonical_name(file_stem.to_owned()));
let detached_file_crate = crate_graph.add_crate_root(
file_id,
Edition::CURRENT,
display_name.clone(),
None,
cfg_options.clone(),
None,
Env::default(),
false,
CrateOrigin::Local {
repo: None,
name: display_name.map(|n| n.canonical_name().to_owned()),
},
);
public_deps.add_to_crate_graph(&mut crate_graph, detached_file_crate);
}
public_deps.add_to_crate_graph(&mut crate_graph, detached_file_crate);
(crate_graph, FxHashMap::default())
}

View File

@ -75,8 +75,8 @@ fn new() -> Result<Self> {
&cargo_config.extra_env,
);
let workspace = ProjectWorkspace::DetachedFiles {
files: vec![tmp_file],
let workspace = ProjectWorkspace::DetachedFile {
file: tmp_file,
sysroot,
rustc_cfg: vec![],
toolchain: None,

View File

@ -18,9 +18,7 @@
RwLockWriteGuard,
};
use proc_macro_api::ProcMacroServer;
use project_model::{
CargoScriptTomls, CargoWorkspace, ProjectWorkspace, Target, WorkspaceBuildScripts,
};
use project_model::{CargoWorkspace, ProjectWorkspace, Target, WorkspaceBuildScripts};
use rustc_hash::{FxHashMap, FxHashSet};
use triomphe::Arc;
use vfs::{AnchoredPathBuf, ChangedFile, Vfs};
@ -127,6 +125,7 @@ pub(crate) struct GlobalState {
/// to invalidate any salsa caches.
pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
pub(crate) crate_graph_file_dependencies: FxHashSet<vfs::VfsPath>,
pub(crate) detached_files: FxHashSet<vfs::AbsPathBuf>,
// op queues
pub(crate) fetch_workspaces_queue:
@ -146,7 +145,6 @@ pub(crate) struct GlobalState {
/// this queue should run only *after* [`GlobalState::process_changes`] has
/// been called.
pub(crate) deferred_task_queue: TaskQueue,
pub(crate) cargo_script_tomls: Arc<Mutex<CargoScriptTomls>>,
}
/// An immutable snapshot of the world's state at a point in time.
@ -236,6 +234,7 @@ pub(crate) fn new(sender: Sender<lsp_server::Message>, config: Config) -> Global
workspaces: Arc::from(Vec::new()),
crate_graph_file_dependencies: FxHashSet::default(),
detached_files: FxHashSet::default(),
fetch_workspaces_queue: OpQueue::default(),
fetch_build_data_queue: OpQueue::default(),
fetch_proc_macros_queue: OpQueue::default(),
@ -243,7 +242,6 @@ pub(crate) fn new(sender: Sender<lsp_server::Message>, config: Config) -> Global
prime_caches_queue: OpQueue::default(),
deferred_task_queue: task_queue,
cargo_script_tomls: Arc::new(Mutex::new(CargoScriptTomls(FxHashMap::default()))),
};
// Apply any required database inputs from the config.
this.update_configuration(config);
@ -326,11 +324,7 @@ pub(crate) fn process_changes(&mut self) -> bool {
if file.is_created_or_deleted() {
workspace_structure_change.get_or_insert((path, false)).1 |=
self.crate_graph_file_dependencies.contains(vfs_path);
} else if reload::should_refresh_for_change(
&path,
file.kind(),
&mut self.cargo_script_tomls.lock(),
) {
} else if reload::should_refresh_for_change(&path, file.kind()) {
workspace_structure_change.get_or_insert((path.clone(), false));
}
}
@ -527,7 +521,7 @@ pub(crate) fn cargo_target_for_crate_root(
cargo.target_by_root(path).map(|it| (cargo, it))
}
ProjectWorkspace::Json { .. } => None,
ProjectWorkspace::DetachedFiles { .. } => None,
ProjectWorkspace::DetachedFile { .. } => None,
})
}

View File

@ -150,14 +150,14 @@ pub(crate) fn handle_did_save_text_document(
if let Ok(vfs_path) = from_proto::vfs_path(&params.text_document.uri) {
// Re-fetch workspaces if a workspace related file has changed
if let Some(abs_path) = vfs_path.as_path() {
if reload::should_refresh_for_change(
abs_path,
ChangeKind::Modify,
&mut state.cargo_script_tomls.lock(),
) {
if reload::should_refresh_for_change(abs_path, ChangeKind::Modify) {
state
.fetch_workspaces_queue
.request_op(format!("workspace vfs file change saved {abs_path}"), false);
} else if state.detached_files.contains(abs_path) {
state
.fetch_workspaces_queue
.request_op(format!("detached file saved {abs_path}"), false);
}
}
@ -307,7 +307,7 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool {
}
None
}
project_model::ProjectWorkspace::DetachedFiles { .. } => return None,
project_model::ProjectWorkspace::DetachedFile { .. } => return None,
};
Some((idx, package))
});

View File

@ -101,7 +101,7 @@ pub(crate) fn handle_analyzer_status(
"Workspace root folders: {:?}",
snap.workspaces
.iter()
.flat_map(|ws| ws.workspace_definition_path())
.map(|ws| ws.workspace_definition_path())
.collect::<Vec<&AbsPath>>()
);
}
@ -1761,7 +1761,7 @@ pub(crate) fn handle_open_docs(
let ws_and_sysroot = snap.workspaces.iter().find_map(|ws| match ws {
ProjectWorkspace::Cargo { cargo, sysroot, .. } => Some((cargo, sysroot.as_ref().ok())),
ProjectWorkspace::Json { .. } => None,
ProjectWorkspace::DetachedFiles { .. } => None,
ProjectWorkspace::DetachedFile { .. } => None,
});
let (cargo, sysroot) = match ws_and_sysroot {

View File

@ -25,7 +25,7 @@
use itertools::Itertools;
use load_cargo::{load_proc_macro, ProjectFolders};
use proc_macro_api::ProcMacroServer;
use project_model::{CargoScriptTomls, ProjectWorkspace, WorkspaceBuildScripts};
use project_model::{ProjectWorkspace, WorkspaceBuildScripts};
use stdx::{format_to, thread::ThreadIntent};
use triomphe::Arc;
use vfs::{AbsPath, AbsPathBuf, ChangeKind};
@ -153,7 +153,7 @@ pub(crate) fn current_status(&self) -> lsp_ext::ServerStatusParams {
for ws in self.workspaces.iter() {
let (ProjectWorkspace::Cargo { sysroot, .. }
| ProjectWorkspace::Json { sysroot, .. }
| ProjectWorkspace::DetachedFiles { sysroot, .. }) = ws;
| ProjectWorkspace::DetachedFile { sysroot, .. }) = ws;
match sysroot {
Err(None) => (),
Err(Some(e)) => {
@ -206,7 +206,6 @@ pub(crate) fn fetch_workspaces(&mut self, cause: Cause, force_crate_graph_reload
let linked_projects = self.config.linked_or_discovered_projects();
let detached_files = self.config.detached_files().to_vec();
let cargo_config = self.config.cargo();
let cargo_script_tomls = self.cargo_script_tomls.clone();
move |sender| {
let progress = {
@ -256,10 +255,9 @@ pub(crate) fn fetch_workspaces(&mut self, cause: Cause, force_crate_graph_reload
}
if !detached_files.is_empty() {
workspaces.push(project_model::ProjectWorkspace::load_detached_files(
workspaces.extend(project_model::ProjectWorkspace::load_detached_files(
detached_files,
&cargo_config,
&mut cargo_script_tomls.lock(),
));
}
@ -542,9 +540,6 @@ pub(crate) fn switch_workspaces(&mut self, cause: Cause) {
}
fn recreate_crate_graph(&mut self, cause: String) {
// crate graph construction relies on these paths, record them so when one of them gets
// deleted or created we trigger a reconstruction of the crate graph
let mut crate_graph_file_dependencies = mem::take(&mut self.crate_graph_file_dependencies);
self.report_progress(
"Building CrateGraph",
crate::lsp::utils::Progress::Begin,
@ -553,13 +548,25 @@ fn recreate_crate_graph(&mut self, cause: String) {
None,
);
// crate graph construction relies on these paths, record them so when one of them gets
// deleted or created we trigger a reconstruction of the crate graph
self.crate_graph_file_dependencies.clear();
self.detached_files = self
.workspaces
.iter()
.filter_map(|ws| match ws {
ProjectWorkspace::DetachedFile { file, .. } => Some(file.clone()),
_ => None,
})
.collect();
let (crate_graph, proc_macro_paths, layouts, toolchains) = {
// Create crate graph from all the workspaces
let vfs = &mut self.vfs.write().0;
let load = |path: &AbsPath| {
let vfs_path = vfs::VfsPath::from(path.to_path_buf());
crate_graph_file_dependencies.insert(vfs_path.clone());
self.crate_graph_file_dependencies.insert(vfs_path.clone());
vfs.file_id(&vfs_path)
};
@ -579,7 +586,6 @@ fn recreate_crate_graph(&mut self, cause: String) {
change.set_target_data_layouts(layouts);
change.set_toolchains(toolchains);
self.analysis_host.apply_change(change);
self.crate_graph_file_dependencies = crate_graph_file_dependencies;
self.report_progress(
"Building CrateGraph",
crate::lsp::utils::Progress::End,
@ -676,7 +682,7 @@ fn reload_flycheck(&mut self) {
_ => None,
}
}
ProjectWorkspace::DetachedFiles { .. } => None,
ProjectWorkspace::DetachedFile { .. } => None,
})
.map(|(id, root, sysroot_root)| {
let sender = sender.clone();
@ -712,18 +718,14 @@ pub fn ws_to_crate_graph(
let mut toolchains = Vec::default();
let e = Err(Arc::from("missing layout"));
for ws in workspaces {
dbg!(ws);
let (other, mut crate_proc_macros) = ws.to_crate_graph(&mut load, extra_env);
dbg!(&other);
let num_layouts = layouts.len();
let num_toolchains = toolchains.len();
let (toolchain, layout) = match ws {
ProjectWorkspace::Cargo { toolchain, target_layout, .. }
| ProjectWorkspace::Json { toolchain, target_layout, .. } => {
(toolchain.clone(), target_layout.clone())
}
ProjectWorkspace::DetachedFiles { .. } => {
(None, Err("detached files have no layout".into()))
}
};
let (ProjectWorkspace::Cargo { toolchain, target_layout, .. }
| ProjectWorkspace::Json { toolchain, target_layout, .. }
| ProjectWorkspace::DetachedFile { toolchain, target_layout, .. }) = ws;
let mapping = crate_graph.extend(
other,
@ -732,7 +734,7 @@ pub fn ws_to_crate_graph(
// if the newly created crate graph's layout is equal to the crate of the merged graph, then
// we can merge the crates.
let id = cg_id.into_raw().into_u32() as usize;
layouts[id] == layout && toolchains[id] == toolchain && cg_data == o_data
layouts[id] == *target_layout && toolchains[id] == *toolchain && cg_data == o_data
},
);
// Populate the side tables for the newly merged crates
@ -744,13 +746,13 @@ pub fn ws_to_crate_graph(
if layouts.len() <= idx {
layouts.resize(idx + 1, e.clone());
}
layouts[idx].clone_from(&layout);
layouts[idx].clone_from(target_layout);
}
if idx >= num_toolchains {
if toolchains.len() <= idx {
toolchains.resize(idx + 1, None);
}
toolchains[idx].clone_from(&toolchain);
toolchains[idx].clone_from(toolchain);
}
});
proc_macro_paths.push(crate_proc_macros);
@ -760,15 +762,7 @@ pub fn ws_to_crate_graph(
(crate_graph, proc_macro_paths, layouts, toolchains)
}
pub(crate) fn should_refresh_for_change(
path: &AbsPath,
change_kind: ChangeKind,
cargo_script_tomls: &mut CargoScriptTomls,
) -> bool {
if cargo_script_tomls.need_reload(path) {
return true;
}
pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool {
const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"];
const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"];

View File

@ -139,16 +139,16 @@ fn completes_items_from_standard_library_in_cargo_script() {
pub struct SpecialHashMap2;
//- /src/lib.rs
#!/usr/bin/env -S cargo +nightly -Zscript
//! ```cargo
//! ---cargo
//! [dependencies]
//! dependency = { path = "../dependency" }
//! ```
//! ---
use dependency::Spam;
use dependency2::Spam;
"#,
)
.with_config(serde_json::json!({
"cargo": { "sysroot": "discover" },
"cargo": { "sysroot": null },
}))
.server()
.wait_until_workspace_is_loaded();
@ -156,18 +156,18 @@ fn completes_items_from_standard_library_in_cargo_script() {
let res = server.send_request::<Completion>(CompletionParams {
text_document_position: TextDocumentPositionParams::new(
server.doc_id("src/lib.rs"),
Position::new(7, 18),
Position::new(5, 18),
),
context: None,
partial_result_params: PartialResultParams::default(),
work_done_progress_params: WorkDoneProgressParams::default(),
});
assert!(res.to_string().contains("SpecialHashMap"));
assert!(res.to_string().contains("SpecialHashMap"), "{}", res.to_string());
let res = server.send_request::<Completion>(CompletionParams {
text_document_position: TextDocumentPositionParams::new(
server.doc_id("src/lib.rs"),
Position::new(8, 18),
Position::new(6, 18),
),
context: None,
partial_result_params: PartialResultParams::default(),
@ -178,10 +178,10 @@ fn completes_items_from_standard_library_in_cargo_script() {
server.write_file_and_save(
"src/lib.rs",
r#"#!/usr/bin/env -S cargo +nightly -Zscript
//! ```cargo
//! ---cargo
//! [dependencies]
//! dependency2 = { path = "../dependency2" }
//! ```
//! ---
use dependency::Spam;
use dependency2::Spam;
"#
@ -195,7 +195,7 @@ fn completes_items_from_standard_library_in_cargo_script() {
let res = server.send_request::<Completion>(CompletionParams {
text_document_position: TextDocumentPositionParams::new(
server.doc_id("src/lib.rs"),
Position::new(7, 18),
Position::new(5, 18),
),
context: None,
partial_result_params: PartialResultParams::default(),
@ -206,7 +206,7 @@ fn completes_items_from_standard_library_in_cargo_script() {
let res = server.send_request::<Completion>(CompletionParams {
text_document_position: TextDocumentPositionParams::new(
server.doc_id("src/lib.rs"),
Position::new(8, 18),
Position::new(6, 18),
),
context: None,
partial_result_params: PartialResultParams::default(),

View File

@ -287,6 +287,7 @@ pub(crate) fn request<R>(&self, params: R::Params, expected_resp: Value)
}
}
#[track_caller]
pub(crate) fn send_request<R>(&self, params: R::Params) -> Value
where
R: lsp_types::request::Request,
@ -298,6 +299,7 @@ pub(crate) fn send_request<R>(&self, params: R::Params) -> Value
let r = Request::new(id.into(), R::METHOD.to_owned(), params);
self.send_request_(r)
}
#[track_caller]
fn send_request_(&self, r: Request) -> Value {
let id = r.id.clone();
self.client.sender.send(r.clone().into()).unwrap();