Auto merge of #17110 - Veykril:cargo-script-mvp, r=Veykril

Cargo script mvp

Based on https://github.com/rust-lang/rust-analyzer/pull/15456,

As the original PR stated, detached files are still horrendous to work with.
This commit is contained in:
bors 2024-04-19 20:51:10 +00:00
commit c83d8cf584
10 changed files with 376 additions and 143 deletions

View File

@ -335,7 +335,7 @@ fn load_crate_graph(
) -> RootDatabase {
let (ProjectWorkspace::Cargo { toolchain, target_layout, .. }
| ProjectWorkspace::Json { toolchain, target_layout, .. }
| ProjectWorkspace::DetachedFiles { toolchain, target_layout, .. }) = ws;
| ProjectWorkspace::DetachedFile { toolchain, target_layout, .. }) = ws;
let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<usize>().ok());
let mut db = RootDatabase::new(lru_cap);

View File

@ -305,6 +305,12 @@ impl CargoWorkspace {
.collect(),
);
}
// The manifest is a rust file, so this means its a script manifest
if cargo_toml.extension().is_some_and(|ext| ext == "rs") {
// Deliberately don't set up RUSTC_BOOTSTRAP or a nightly override here, the user should
// opt into it themselves.
other_options.push("-Zscript".to_owned());
}
meta.other_options(other_options);
// FIXME: Fetching metadata is a slow process, as it might require
@ -373,11 +379,12 @@ impl CargoWorkspace {
let is_local = source.is_none();
let is_member = ws_members.contains(&id);
let manifest = AbsPathBuf::assert(manifest_path);
let pkg = packages.alloc(PackageData {
id: id.repr.clone(),
name,
version,
manifest: AbsPathBuf::assert(manifest_path).try_into().unwrap(),
manifest: manifest.clone().try_into().unwrap(),
targets: Vec::new(),
is_local,
is_member,
@ -400,11 +407,22 @@ impl CargoWorkspace {
for meta_tgt in meta_targets {
let cargo_metadata::Target { name, kind, required_features, src_path, .. } =
meta_tgt;
let kind = TargetKind::new(&kind);
let tgt = targets.alloc(TargetData {
package: pkg,
name,
root: AbsPathBuf::assert(src_path),
kind: TargetKind::new(&kind),
root: if kind == TargetKind::Bin
&& manifest.extension().is_some_and(|ext| ext == "rs")
{
// cargo strips the script part of a cargo script away and places the
// modified manifest file into a special target dir which is then used as
// the source path. We don't want that, we want the original here so map it
// back
manifest.clone()
} else {
AbsPathBuf::assert(src_path)
},
kind,
required_features,
});
pkg_data.targets.push(tgt);

View File

@ -99,9 +99,9 @@ pub enum ProjectWorkspace {
// //
/// Project with a set of disjoint files, not belonging to any particular workspace.
/// Backed by basic sysroot crates for basic completion and highlighting.
DetachedFiles {
/// The set of detached files.
files: Vec<AbsPathBuf>,
DetachedFile {
/// The file in question.
file: AbsPathBuf,
/// The sysroot loaded for this workspace.
sysroot: Result<Sysroot, Option<String>>,
/// Holds cfg flags for the current target. We get those by running
@ -115,6 +115,8 @@ pub enum ProjectWorkspace {
target_layout: TargetLayoutLoadResult,
/// A set of cfg overrides for the files.
cfg_overrides: CfgOverrides,
/// Is this file a cargo script file?
cargo_script: Option<CargoWorkspace>,
},
}
@ -167,17 +169,20 @@ impl fmt::Debug for ProjectWorkspace {
.field("n_cfg_overrides", &cfg_overrides.len());
debug_struct.finish()
}
ProjectWorkspace::DetachedFiles {
files,
ProjectWorkspace::DetachedFile {
file,
sysroot,
rustc_cfg,
toolchain,
target_layout,
cfg_overrides,
cargo_script,
} => f
.debug_struct("DetachedFiles")
.field("n_files", &files.len())
.field("file", &file)
.field("cargo_script", &cargo_script.is_some())
.field("sysroot", &sysroot.is_ok())
.field("cargo_script", &cargo_script.is_some())
.field("n_rustc_cfg", &rustc_cfg.len())
.field("toolchain", &toolchain)
.field("data_layout", &target_layout)
@ -431,52 +436,79 @@ impl ProjectWorkspace {
pub fn load_detached_files(
detached_files: Vec<AbsPathBuf>,
config: &CargoConfig,
) -> anyhow::Result<ProjectWorkspace> {
let dir = detached_files
.first()
.and_then(|it| it.parent())
.ok_or_else(|| format_err!("No detached files to load"))?;
let sysroot = match &config.sysroot {
Some(RustLibSource::Path(path)) => {
Sysroot::with_sysroot_dir(path.clone(), config.sysroot_query_metadata)
.map_err(|e| Some(format!("Failed to find sysroot at {path}:{e}")))
}
Some(RustLibSource::Discover) => Sysroot::discover(
dir,
&config.extra_env,
config.sysroot_query_metadata,
)
.map_err(|e| {
Some(format!("Failed to find sysroot for {dir}. Is rust-src installed? {e}"))
}),
None => Err(None),
};
) -> Vec<anyhow::Result<ProjectWorkspace>> {
detached_files
.into_iter()
.map(|detached_file| {
let dir = detached_file
.parent()
.ok_or_else(|| format_err!("detached file has no parent"))?;
let sysroot = match &config.sysroot {
Some(RustLibSource::Path(path)) => {
Sysroot::with_sysroot_dir(path.clone(), config.sysroot_query_metadata)
.map_err(|e| Some(format!("Failed to find sysroot at {path}:{e}")))
}
Some(RustLibSource::Discover) => {
Sysroot::discover(dir, &config.extra_env, config.sysroot_query_metadata)
.map_err(|e| {
Some(format!(
"Failed to find sysroot for {dir}. Is rust-src installed? {e}"
))
})
}
None => Err(None),
};
let sysroot_ref = sysroot.as_ref().ok();
let toolchain =
match get_toolchain_version(dir, sysroot_ref, Tool::Rustc, &config.extra_env, "rustc ")
{
Ok(it) => it,
Err(e) => {
tracing::error!("{e}");
None
}
};
let sysroot_ref = sysroot.as_ref().ok();
let toolchain = match get_toolchain_version(
dir,
sysroot_ref,
Tool::Rustc,
&config.extra_env,
"rustc ",
) {
Ok(it) => it,
Err(e) => {
tracing::error!("{e}");
None
}
};
let rustc_cfg = rustc_cfg::get(None, &config.extra_env, RustcCfgConfig::Rustc(sysroot_ref));
let data_layout = target_data_layout::get(
RustcDataLayoutConfig::Rustc(sysroot_ref),
None,
&config.extra_env,
);
Ok(ProjectWorkspace::DetachedFiles {
files: detached_files,
sysroot,
rustc_cfg,
toolchain,
target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())),
cfg_overrides: config.cfg_overrides.clone(),
})
let rustc_cfg =
rustc_cfg::get(None, &config.extra_env, RustcCfgConfig::Rustc(sysroot_ref));
let data_layout = target_data_layout::get(
RustcDataLayoutConfig::Rustc(sysroot_ref),
None,
&config.extra_env,
);
let cargo_script = ManifestPath::try_from(detached_file.clone())
.ok()
.and_then(|file| {
CargoWorkspace::fetch_metadata(
&file,
file.parent(),
config,
sysroot_ref,
&|_| (),
)
.ok()
})
.map(CargoWorkspace::new);
Ok(ProjectWorkspace::DetachedFile {
file: detached_file,
sysroot,
rustc_cfg,
toolchain,
target_layout: data_layout
.map(Arc::from)
.map_err(|it| Arc::from(it.to_string())),
cfg_overrides: config.cfg_overrides.clone(),
cargo_script,
})
})
.collect()
}
/// Runs the build scripts for this [`ProjectWorkspace`].
@ -486,7 +518,13 @@ impl ProjectWorkspace {
progress: &dyn Fn(String),
) -> anyhow::Result<WorkspaceBuildScripts> {
match self {
ProjectWorkspace::Cargo { cargo, toolchain, sysroot, .. } => {
ProjectWorkspace::DetachedFile {
cargo_script: Some(cargo),
toolchain,
sysroot,
..
}
| ProjectWorkspace::Cargo { cargo, toolchain, sysroot, .. } => {
WorkspaceBuildScripts::run_for_workspace(
config,
cargo,
@ -498,9 +536,8 @@ impl ProjectWorkspace {
format!("Failed to run build scripts for {}", cargo.workspace_root())
})
}
ProjectWorkspace::Json { .. } | ProjectWorkspace::DetachedFiles { .. } => {
Ok(WorkspaceBuildScripts::default())
}
ProjectWorkspace::DetachedFile { cargo_script: None, .. }
| ProjectWorkspace::Json { .. } => Ok(WorkspaceBuildScripts::default()),
}
}
@ -556,11 +593,11 @@ impl ProjectWorkspace {
}
}
pub fn workspace_definition_path(&self) -> Option<&AbsPath> {
pub fn workspace_definition_path(&self) -> &AbsPath {
match self {
ProjectWorkspace::Cargo { cargo, .. } => Some(cargo.workspace_root()),
ProjectWorkspace::Json { project, .. } => Some(project.path()),
ProjectWorkspace::DetachedFiles { .. } => None,
ProjectWorkspace::Cargo { cargo, .. } => cargo.workspace_root(),
ProjectWorkspace::Json { project, .. } => project.path(),
ProjectWorkspace::DetachedFile { file, .. } => file,
}
}
@ -568,10 +605,10 @@ impl ProjectWorkspace {
match self {
ProjectWorkspace::Cargo { sysroot: Ok(sysroot), .. }
| ProjectWorkspace::Json { sysroot: Ok(sysroot), .. }
| ProjectWorkspace::DetachedFiles { sysroot: Ok(sysroot), .. } => {
| ProjectWorkspace::DetachedFile { sysroot: Ok(sysroot), .. } => {
sysroot.discover_proc_macro_srv()
}
ProjectWorkspace::DetachedFiles { .. } => {
ProjectWorkspace::DetachedFile { .. } => {
Err(anyhow::format_err!("cannot find proc-macro server, no sysroot was found"))
}
ProjectWorkspace::Cargo { cargo, .. } => Err(anyhow::format_err!(
@ -702,15 +739,50 @@ impl ProjectWorkspace {
}))
.collect()
}
ProjectWorkspace::DetachedFiles { files, sysroot, .. } => files
.iter()
.map(|detached_file| PackageRoot {
ProjectWorkspace::DetachedFile { file, cargo_script, sysroot, .. } => {
iter::once(PackageRoot {
is_local: true,
include: vec![detached_file.clone()],
include: vec![file.clone()],
exclude: Vec::new(),
})
.chain(cargo_script.iter().flat_map(|cargo| {
cargo.packages().map(|pkg| {
let is_local = cargo[pkg].is_local;
let pkg_root = cargo[pkg].manifest.parent().to_path_buf();
let mut include = vec![pkg_root.clone()];
// In case target's path is manually set in Cargo.toml to be
// outside the package root, add its parent as an extra include.
// An example of this situation would look like this:
//
// ```toml
// [lib]
// path = "../../src/lib.rs"
// ```
let extra_targets = cargo[pkg]
.targets
.iter()
.filter(|&&tgt| matches!(cargo[tgt].kind, TargetKind::Lib { .. }))
.filter_map(|&tgt| cargo[tgt].root.parent())
.map(|tgt| tgt.normalize().to_path_buf())
.filter(|path| !path.starts_with(&pkg_root));
include.extend(extra_targets);
let mut exclude = vec![pkg_root.join(".git")];
if is_local {
exclude.push(pkg_root.join("target"));
} else {
exclude.push(pkg_root.join("tests"));
exclude.push(pkg_root.join("examples"));
exclude.push(pkg_root.join("benches"));
}
PackageRoot { is_local, include, exclude }
})
}))
.chain(mk_sysroot(sysroot.as_ref()))
.collect(),
.collect()
}
}
}
@ -726,9 +798,10 @@ impl ProjectWorkspace {
let sysroot_package_len = sysroot.as_ref().map_or(0, |it| it.num_packages());
cargo.packages().len() + sysroot_package_len + rustc_package_len
}
ProjectWorkspace::DetachedFiles { sysroot, files, .. } => {
ProjectWorkspace::DetachedFile { sysroot, cargo_script, .. } => {
let sysroot_package_len = sysroot.as_ref().map_or(0, |it| it.num_packages());
sysroot_package_len + files.len()
sysroot_package_len
+ cargo_script.as_ref().map_or(1, |cargo| cargo.packages().len())
}
}
}
@ -781,21 +854,34 @@ impl ProjectWorkspace {
),
sysroot,
),
ProjectWorkspace::DetachedFiles {
files,
ProjectWorkspace::DetachedFile {
file,
sysroot,
rustc_cfg,
toolchain: _,
target_layout: _,
cfg_overrides,
cargo_script,
} => (
detached_files_to_crate_graph(
rustc_cfg.clone(),
load,
files,
sysroot.as_ref().ok(),
cfg_overrides,
),
if let Some(cargo) = cargo_script {
cargo_to_crate_graph(
&mut |path| load(path),
None,
cargo,
sysroot.as_ref().ok(),
rustc_cfg.clone(),
cfg_overrides,
&WorkspaceBuildScripts::default(),
)
} else {
detached_file_to_crate_graph(
rustc_cfg.clone(),
load,
file,
sysroot.as_ref().ok(),
cfg_overrides,
)
},
sysroot,
),
};
@ -869,29 +955,32 @@ impl ProjectWorkspace {
&& cfg_overrides == o_cfg_overrides
}
(
Self::DetachedFiles {
files,
Self::DetachedFile {
file,
sysroot,
rustc_cfg,
cargo_script,
toolchain,
target_layout,
cfg_overrides,
},
Self::DetachedFiles {
files: o_files,
Self::DetachedFile {
file: o_file,
sysroot: o_sysroot,
rustc_cfg: o_rustc_cfg,
cargo_script: o_cargo_script,
toolchain: o_toolchain,
target_layout: o_target_layout,
cfg_overrides: o_cfg_overrides,
},
) => {
files == o_files
file == o_file
&& sysroot == o_sysroot
&& rustc_cfg == o_rustc_cfg
&& toolchain == o_toolchain
&& target_layout == o_target_layout
&& cfg_overrides == o_cfg_overrides
&& cargo_script == o_cargo_script
}
_ => false,
}
@ -1202,10 +1291,10 @@ fn cargo_to_crate_graph(
res
}
fn detached_files_to_crate_graph(
fn detached_file_to_crate_graph(
rustc_cfg: Vec<CfgFlag>,
load: FileLoader<'_>,
detached_files: &[AbsPathBuf],
detached_file: &AbsPathBuf,
sysroot: Option<&Sysroot>,
override_cfg: &CfgOverrides,
) -> (CrateGraph, ProcMacroPaths) {
@ -1222,34 +1311,32 @@ fn detached_files_to_crate_graph(
override_cfg.apply(&mut cfg_options, "");
let cfg_options = Arc::new(cfg_options);
for detached_file in detached_files {
let file_id = match load(detached_file) {
Some(file_id) => file_id,
None => {
tracing::error!("Failed to load detached file {:?}", detached_file);
continue;
}
};
let display_name = detached_file
.file_stem()
.map(|file_stem| CrateDisplayName::from_canonical_name(file_stem.to_owned()));
let detached_file_crate = crate_graph.add_crate_root(
file_id,
Edition::CURRENT,
display_name.clone(),
None,
cfg_options.clone(),
None,
Env::default(),
false,
CrateOrigin::Local {
repo: None,
name: display_name.map(|n| n.canonical_name().to_owned()),
},
);
let file_id = match load(detached_file) {
Some(file_id) => file_id,
None => {
tracing::error!("Failed to load detached file {:?}", detached_file);
return (crate_graph, FxHashMap::default());
}
};
let display_name = detached_file
.file_stem()
.map(|file_stem| CrateDisplayName::from_canonical_name(file_stem.to_owned()));
let detached_file_crate = crate_graph.add_crate_root(
file_id,
Edition::CURRENT,
display_name.clone(),
None,
cfg_options.clone(),
None,
Env::default(),
false,
CrateOrigin::Local {
repo: None,
name: display_name.map(|n| n.canonical_name().to_owned()),
},
);
public_deps.add_to_crate_graph(&mut crate_graph, detached_file_crate);
}
public_deps.add_to_crate_graph(&mut crate_graph, detached_file_crate);
(crate_graph, FxHashMap::default())
}

View File

@ -75,13 +75,14 @@ impl Tester {
&cargo_config.extra_env,
);
let workspace = ProjectWorkspace::DetachedFiles {
files: vec![tmp_file],
let workspace = ProjectWorkspace::DetachedFile {
file: tmp_file,
sysroot,
rustc_cfg: vec![],
toolchain: None,
target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())),
cfg_overrides: Default::default(),
cargo_script: None,
};
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: false,

View File

@ -125,6 +125,7 @@ pub(crate) struct GlobalState {
/// to invalidate any salsa caches.
pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
pub(crate) crate_graph_file_dependencies: FxHashSet<vfs::VfsPath>,
pub(crate) detached_files: FxHashSet<vfs::AbsPathBuf>,
// op queues
pub(crate) fetch_workspaces_queue:
@ -233,6 +234,7 @@ impl GlobalState {
workspaces: Arc::from(Vec::new()),
crate_graph_file_dependencies: FxHashSet::default(),
detached_files: FxHashSet::default(),
fetch_workspaces_queue: OpQueue::default(),
fetch_build_data_queue: OpQueue::default(),
fetch_proc_macros_queue: OpQueue::default(),
@ -519,7 +521,7 @@ impl GlobalStateSnapshot {
cargo.target_by_root(path).map(|it| (cargo, it))
}
ProjectWorkspace::Json { .. } => None,
ProjectWorkspace::DetachedFiles { .. } => None,
ProjectWorkspace::DetachedFile { .. } => None,
})
}

View File

@ -154,6 +154,10 @@ pub(crate) fn handle_did_save_text_document(
state
.fetch_workspaces_queue
.request_op(format!("workspace vfs file change saved {abs_path}"), false);
} else if state.detached_files.contains(abs_path) {
state
.fetch_workspaces_queue
.request_op(format!("detached file saved {abs_path}"), false);
}
}
@ -303,7 +307,8 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool {
}
None
}
project_model::ProjectWorkspace::DetachedFiles { .. } => return None,
// FIXME
project_model::ProjectWorkspace::DetachedFile { .. } => return None,
};
Some((idx, package))
});

View File

@ -101,7 +101,7 @@ pub(crate) fn handle_analyzer_status(
"Workspace root folders: {:?}",
snap.workspaces
.iter()
.flat_map(|ws| ws.workspace_definition_path())
.map(|ws| ws.workspace_definition_path())
.collect::<Vec<&AbsPath>>()
);
}
@ -1761,7 +1761,9 @@ pub(crate) fn handle_open_docs(
let ws_and_sysroot = snap.workspaces.iter().find_map(|ws| match ws {
ProjectWorkspace::Cargo { cargo, sysroot, .. } => Some((cargo, sysroot.as_ref().ok())),
ProjectWorkspace::Json { .. } => None,
ProjectWorkspace::DetachedFiles { .. } => None,
ProjectWorkspace::DetachedFile { cargo_script, sysroot, .. } => {
cargo_script.as_ref().zip(Some(sysroot.as_ref().ok()))
}
});
let (cargo, sysroot) = match ws_and_sysroot {

View File

@ -153,7 +153,7 @@ impl GlobalState {
for ws in self.workspaces.iter() {
let (ProjectWorkspace::Cargo { sysroot, .. }
| ProjectWorkspace::Json { sysroot, .. }
| ProjectWorkspace::DetachedFiles { sysroot, .. }) = ws;
| ProjectWorkspace::DetachedFile { sysroot, .. }) = ws;
match sysroot {
Err(None) => (),
Err(Some(e)) => {
@ -255,7 +255,7 @@ impl GlobalState {
}
if !detached_files.is_empty() {
workspaces.push(project_model::ProjectWorkspace::load_detached_files(
workspaces.extend(project_model::ProjectWorkspace::load_detached_files(
detached_files,
&cargo_config,
));
@ -540,9 +540,6 @@ impl GlobalState {
}
fn recreate_crate_graph(&mut self, cause: String) {
// crate graph construction relies on these paths, record them so when one of them gets
// deleted or created we trigger a reconstruction of the crate graph
let mut crate_graph_file_dependencies = mem::take(&mut self.crate_graph_file_dependencies);
self.report_progress(
"Building CrateGraph",
crate::lsp::utils::Progress::Begin,
@ -551,13 +548,25 @@ impl GlobalState {
None,
);
// crate graph construction relies on these paths, record them so when one of them gets
// deleted or created we trigger a reconstruction of the crate graph
self.crate_graph_file_dependencies.clear();
self.detached_files = self
.workspaces
.iter()
.filter_map(|ws| match ws {
ProjectWorkspace::DetachedFile { file, .. } => Some(file.clone()),
_ => None,
})
.collect();
let (crate_graph, proc_macro_paths, layouts, toolchains) = {
// Create crate graph from all the workspaces
let vfs = &mut self.vfs.write().0;
let load = |path: &AbsPath| {
let vfs_path = vfs::VfsPath::from(path.to_path_buf());
crate_graph_file_dependencies.insert(vfs_path.clone());
self.crate_graph_file_dependencies.insert(vfs_path.clone());
vfs.file_id(&vfs_path)
};
@ -577,7 +586,6 @@ impl GlobalState {
change.set_target_data_layouts(layouts);
change.set_toolchains(toolchains);
self.analysis_host.apply_change(change);
self.crate_graph_file_dependencies = crate_graph_file_dependencies;
self.report_progress(
"Building CrateGraph",
crate::lsp::utils::Progress::End,
@ -674,7 +682,8 @@ impl GlobalState {
_ => None,
}
}
ProjectWorkspace::DetachedFiles { .. } => None,
// FIXME
ProjectWorkspace::DetachedFile { .. } => None,
})
.map(|(id, root, sysroot_root)| {
let sender = sender.clone();
@ -713,15 +722,9 @@ pub fn ws_to_crate_graph(
let (other, mut crate_proc_macros) = ws.to_crate_graph(&mut load, extra_env);
let num_layouts = layouts.len();
let num_toolchains = toolchains.len();
let (toolchain, layout) = match ws {
ProjectWorkspace::Cargo { toolchain, target_layout, .. }
| ProjectWorkspace::Json { toolchain, target_layout, .. } => {
(toolchain.clone(), target_layout.clone())
}
ProjectWorkspace::DetachedFiles { .. } => {
(None, Err("detached files have no layout".into()))
}
};
let (ProjectWorkspace::Cargo { toolchain, target_layout, .. }
| ProjectWorkspace::Json { toolchain, target_layout, .. }
| ProjectWorkspace::DetachedFile { toolchain, target_layout, .. }) = ws;
let mapping = crate_graph.extend(
other,
@ -730,7 +733,7 @@ pub fn ws_to_crate_graph(
// if the newly created crate graph's layout is equal to the crate of the merged graph, then
// we can merge the crates.
let id = cg_id.into_raw().into_u32() as usize;
layouts[id] == layout && toolchains[id] == toolchain && cg_data == o_data
layouts[id] == *target_layout && toolchains[id] == *toolchain && cg_data == o_data
},
);
// Populate the side tables for the newly merged crates
@ -742,13 +745,13 @@ pub fn ws_to_crate_graph(
if layouts.len() <= idx {
layouts.resize(idx + 1, e.clone());
}
layouts[idx].clone_from(&layout);
layouts[idx].clone_from(target_layout);
}
if idx >= num_toolchains {
if toolchains.len() <= idx {
toolchains.resize(idx + 1, None);
}
toolchains[idx].clone_from(&toolchain);
toolchains[idx].clone_from(toolchain);
}
});
proc_macro_paths.push(crate_proc_macros);

View File

@ -117,6 +117,105 @@ fn f() {
);
}
#[test]
fn completes_items_from_standard_library_in_cargo_script() {
// this test requires nightly so CI can't run it
if skip_slow_tests() || std::env::var("CI").is_ok() {
return;
}
let server = Project::with_fixture(
r#"
//- /dependency/Cargo.toml
[package]
name = "dependency"
version = "0.1.0"
//- /dependency/src/lib.rs
pub struct SpecialHashMap;
//- /dependency2/Cargo.toml
[package]
name = "dependency2"
version = "0.1.0"
//- /dependency2/src/lib.rs
pub struct SpecialHashMap2;
//- /src/lib.rs
#!/usr/bin/env -S cargo +nightly -Zscript
---
[dependencies]
dependency = { path = "../dependency" }
---
use dependency::Spam;
use dependency2::Spam;
"#,
)
.with_config(serde_json::json!({
"cargo": { "sysroot": null },
}))
.server()
.wait_until_workspace_is_loaded();
let res = server.send_request::<Completion>(CompletionParams {
text_document_position: TextDocumentPositionParams::new(
server.doc_id("src/lib.rs"),
Position::new(5, 18),
),
context: None,
partial_result_params: PartialResultParams::default(),
work_done_progress_params: WorkDoneProgressParams::default(),
});
assert!(res.to_string().contains("SpecialHashMap"), "{}", res.to_string());
let res = server.send_request::<Completion>(CompletionParams {
text_document_position: TextDocumentPositionParams::new(
server.doc_id("src/lib.rs"),
Position::new(6, 18),
),
context: None,
partial_result_params: PartialResultParams::default(),
work_done_progress_params: WorkDoneProgressParams::default(),
});
assert!(!res.to_string().contains("SpecialHashMap"));
server.write_file_and_save(
"src/lib.rs",
r#"#!/usr/bin/env -S cargo +nightly -Zscript
---
[dependencies]
dependency2 = { path = "../dependency2" }
---
use dependency::Spam;
use dependency2::Spam;
"#
.to_owned(),
);
let server = server.wait_until_workspace_is_loaded();
std::thread::sleep(std::time::Duration::from_secs(3));
let res = server.send_request::<Completion>(CompletionParams {
text_document_position: TextDocumentPositionParams::new(
server.doc_id("src/lib.rs"),
Position::new(5, 18),
),
context: None,
partial_result_params: PartialResultParams::default(),
work_done_progress_params: WorkDoneProgressParams::default(),
});
assert!(!res.to_string().contains("SpecialHashMap"));
let res = server.send_request::<Completion>(CompletionParams {
text_document_position: TextDocumentPositionParams::new(
server.doc_id("src/lib.rs"),
Position::new(6, 18),
),
context: None,
partial_result_params: PartialResultParams::default(),
work_done_progress_params: WorkDoneProgressParams::default(),
});
assert!(res.to_string().contains("SpecialHashMap"));
}
#[test]
fn test_runnables_project() {
if skip_slow_tests() {

View File

@ -125,7 +125,7 @@ impl Project<'_> {
}
let mut config = Config::new(
tmp_dir_path,
tmp_dir_path.clone(),
lsp_types::ClientCapabilities {
workspace: Some(lsp_types::WorkspaceClientCapabilities {
did_change_watched_files: Some(
@ -185,10 +185,14 @@ impl Project<'_> {
roots,
None,
);
config.update(self.config).expect("invalid config");
// TODO: don't hardcode src/lib.rs as detached file
let mut c = self.config;
let p = tmp_dir_path.join("src/lib.rs").to_string();
c["detachedFiles"] = serde_json::json!([p]);
config.update(c).expect("invalid config");
config.rediscover_workspaces();
Server::new(tmp_dir, config)
Server::new(tmp_dir.keep(), config)
}
}
@ -283,6 +287,7 @@ impl Server {
}
}
#[track_caller]
pub(crate) fn send_request<R>(&self, params: R::Params) -> Value
where
R: lsp_types::request::Request,
@ -294,6 +299,7 @@ impl Server {
let r = Request::new(id.into(), R::METHOD.to_owned(), params);
self.send_request_(r)
}
#[track_caller]
fn send_request_(&self, r: Request) -> Value {
let id = r.id.clone();
self.client.sender.send(r.clone().into()).unwrap();
@ -374,6 +380,16 @@ impl Server {
pub(crate) fn path(&self) -> &Utf8Path {
self.dir.path()
}
pub(crate) fn write_file_and_save(&self, path: &str, text: String) {
fs::write(self.dir.path().join(path), &text).unwrap();
self.notification::<lsp_types::notification::DidSaveTextDocument>(
lsp_types::DidSaveTextDocumentParams {
text_document: self.doc_id(path),
text: Some(text),
},
)
}
}
impl Drop for Server {