Merge #4709
4709: Minor r=matklad a=matklad
bors r+
🤖
Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
commit
f137b3a4e6
@ -1,10 +1,10 @@
|
||||
//! See `CargoTargetSpec`
|
||||
|
||||
use ra_cfg::CfgExpr;
|
||||
use ra_ide::{FileId, RunnableKind, TestId};
|
||||
use ra_project_model::{self, ProjectWorkspace, TargetKind};
|
||||
|
||||
use crate::{world::WorldSnapshot, Result};
|
||||
use ra_syntax::SmolStr;
|
||||
|
||||
/// Abstract representation of Cargo target.
|
||||
///
|
||||
@ -21,7 +21,7 @@ impl CargoTargetSpec {
|
||||
pub(crate) fn runnable_args(
|
||||
spec: Option<CargoTargetSpec>,
|
||||
kind: &RunnableKind,
|
||||
features_needed: &Vec<SmolStr>,
|
||||
cfgs: &[CfgExpr],
|
||||
) -> Result<(Vec<String>, Vec<String>)> {
|
||||
let mut args = Vec::new();
|
||||
let mut extra_args = Vec::new();
|
||||
@ -76,10 +76,14 @@ pub(crate) fn runnable_args(
|
||||
}
|
||||
}
|
||||
|
||||
features_needed.iter().for_each(|feature| {
|
||||
let mut features = Vec::new();
|
||||
for cfg in cfgs {
|
||||
required_features(cfg, &mut features);
|
||||
}
|
||||
for feature in features {
|
||||
args.push("--features".to_string());
|
||||
args.push(feature.to_string());
|
||||
});
|
||||
args.push(feature);
|
||||
}
|
||||
|
||||
Ok((args, extra_args))
|
||||
}
|
||||
@ -140,3 +144,74 @@ pub(crate) fn push_to(self, buf: &mut Vec<String>, kind: &RunnableKind) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Fill minimal features needed
|
||||
fn required_features(cfg_expr: &CfgExpr, features: &mut Vec<String>) {
|
||||
match cfg_expr {
|
||||
CfgExpr::KeyValue { key, value } if key == "feature" => features.push(value.to_string()),
|
||||
CfgExpr::All(preds) => {
|
||||
preds.iter().for_each(|cfg| required_features(cfg, features));
|
||||
}
|
||||
CfgExpr::Any(preds) => {
|
||||
for cfg in preds {
|
||||
let len_features = features.len();
|
||||
required_features(cfg, features);
|
||||
if len_features != features.len() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
use mbe::{ast_to_token_tree, TokenMap};
|
||||
use ra_cfg::parse_cfg;
|
||||
use ra_syntax::{
|
||||
ast::{self, AstNode},
|
||||
SmolStr,
|
||||
};
|
||||
|
||||
fn get_token_tree_generated(input: &str) -> (tt::Subtree, TokenMap) {
|
||||
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
||||
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||
ast_to_token_tree(&tt).unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cfg_expr_minimal_features_needed() {
|
||||
let (subtree, _) = get_token_tree_generated(r#"#![cfg(feature = "baz")]"#);
|
||||
let cfg_expr = parse_cfg(&subtree);
|
||||
let mut min_features = vec![];
|
||||
required_features(&cfg_expr, &mut min_features);
|
||||
|
||||
assert_eq!(min_features, vec![SmolStr::new("baz")]);
|
||||
|
||||
let (subtree, _) =
|
||||
get_token_tree_generated(r#"#![cfg(all(feature = "baz", feature = "foo"))]"#);
|
||||
let cfg_expr = parse_cfg(&subtree);
|
||||
|
||||
let mut min_features = vec![];
|
||||
required_features(&cfg_expr, &mut min_features);
|
||||
assert_eq!(min_features, vec![SmolStr::new("baz"), SmolStr::new("foo")]);
|
||||
|
||||
let (subtree, _) =
|
||||
get_token_tree_generated(r#"#![cfg(any(feature = "baz", feature = "foo", unix))]"#);
|
||||
let cfg_expr = parse_cfg(&subtree);
|
||||
|
||||
let mut min_features = vec![];
|
||||
required_features(&cfg_expr, &mut min_features);
|
||||
assert_eq!(min_features, vec![SmolStr::new("baz")]);
|
||||
|
||||
let (subtree, _) = get_token_tree_generated(r#"#![cfg(foo)]"#);
|
||||
let cfg_expr = parse_cfg(&subtree);
|
||||
|
||||
let mut min_features = vec![];
|
||||
required_features(&cfg_expr, &mut min_features);
|
||||
assert!(min_features.is_empty());
|
||||
}
|
||||
}
|
||||
|
@ -17,14 +17,12 @@
|
||||
SemanticTokensParams, SemanticTokensRangeParams, SemanticTokensRangeResult,
|
||||
SemanticTokensResult, SymbolInformation, TextDocumentIdentifier, Url, WorkspaceEdit,
|
||||
};
|
||||
use ra_cfg::CfgExpr;
|
||||
use ra_ide::{
|
||||
FileId, FilePosition, FileRange, Query, RangeInfo, Runnable, RunnableKind, SearchScope,
|
||||
TextEdit,
|
||||
FileId, FilePosition, FileRange, Query, RangeInfo, RunnableKind, SearchScope, TextEdit,
|
||||
};
|
||||
use ra_prof::profile;
|
||||
use ra_project_model::TargetKind;
|
||||
use ra_syntax::{AstNode, SmolStr, SyntaxKind, TextRange, TextSize};
|
||||
use ra_syntax::{AstNode, SyntaxKind, TextRange, TextSize};
|
||||
use rustc_hash::FxHashMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::to_value;
|
||||
@ -416,7 +414,7 @@ pub fn handle_runnables(
|
||||
}
|
||||
}
|
||||
}
|
||||
res.push(to_lsp_runnable(&world, file_id, runnable)?);
|
||||
res.push(to_proto::runnable(&world, file_id, runnable)?);
|
||||
}
|
||||
|
||||
// Add `cargo check` and `cargo test` for the whole package
|
||||
@ -784,7 +782,7 @@ pub fn handle_code_lens(
|
||||
}
|
||||
};
|
||||
|
||||
let mut r = to_lsp_runnable(&world, file_id, runnable)?;
|
||||
let mut r = to_proto::runnable(&world, file_id, runnable)?;
|
||||
if world.config.lens.run {
|
||||
let lens = CodeLens {
|
||||
range: r.range,
|
||||
@ -959,65 +957,6 @@ pub fn publish_diagnostics(world: &WorldSnapshot, file_id: FileId) -> Result<Dia
|
||||
Ok(DiagnosticTask::SetNative(file_id, diagnostics))
|
||||
}
|
||||
|
||||
fn to_lsp_runnable(
|
||||
world: &WorldSnapshot,
|
||||
file_id: FileId,
|
||||
runnable: Runnable,
|
||||
) -> Result<lsp_ext::Runnable> {
|
||||
let spec = CargoTargetSpec::for_file(world, file_id)?;
|
||||
let target = spec.as_ref().map(|s| s.target.clone());
|
||||
let mut features_needed = vec![];
|
||||
for cfg_expr in &runnable.cfg_exprs {
|
||||
collect_minimal_features_needed(cfg_expr, &mut features_needed);
|
||||
}
|
||||
let (args, extra_args) =
|
||||
CargoTargetSpec::runnable_args(spec, &runnable.kind, &features_needed)?;
|
||||
let line_index = world.analysis().file_line_index(file_id)?;
|
||||
let label = match &runnable.kind {
|
||||
RunnableKind::Test { test_id, .. } => format!("test {}", test_id),
|
||||
RunnableKind::TestMod { path } => format!("test-mod {}", path),
|
||||
RunnableKind::Bench { test_id } => format!("bench {}", test_id),
|
||||
RunnableKind::DocTest { test_id, .. } => format!("doctest {}", test_id),
|
||||
RunnableKind::Bin => {
|
||||
target.map_or_else(|| "run binary".to_string(), |t| format!("run {}", t))
|
||||
}
|
||||
};
|
||||
|
||||
Ok(lsp_ext::Runnable {
|
||||
range: to_proto::range(&line_index, runnable.range),
|
||||
label,
|
||||
kind: lsp_ext::RunnableKind::Cargo,
|
||||
args,
|
||||
extra_args,
|
||||
env: {
|
||||
let mut m = FxHashMap::default();
|
||||
m.insert("RUST_BACKTRACE".to_string(), "short".to_string());
|
||||
m
|
||||
},
|
||||
cwd: world.workspace_root_for(file_id).map(|root| root.to_owned()),
|
||||
})
|
||||
}
|
||||
|
||||
/// Fill minimal features needed
|
||||
fn collect_minimal_features_needed(cfg_expr: &CfgExpr, features: &mut Vec<SmolStr>) {
|
||||
match cfg_expr {
|
||||
CfgExpr::KeyValue { key, value } if key == "feature" => features.push(value.clone()),
|
||||
CfgExpr::All(preds) => {
|
||||
preds.iter().for_each(|cfg| collect_minimal_features_needed(cfg, features));
|
||||
}
|
||||
CfgExpr::Any(preds) => {
|
||||
for cfg in preds {
|
||||
let len_features = features.len();
|
||||
collect_minimal_features_needed(cfg, features);
|
||||
if len_features != features.len() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn handle_inlay_hints(
|
||||
world: WorldSnapshot,
|
||||
params: InlayHintsParams,
|
||||
@ -1154,54 +1093,3 @@ pub fn handle_semantic_tokens_range(
|
||||
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
|
||||
Ok(Some(semantic_tokens.into()))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
use mbe::{ast_to_token_tree, TokenMap};
|
||||
use ra_cfg::parse_cfg;
|
||||
use ra_syntax::{
|
||||
ast::{self, AstNode},
|
||||
SmolStr,
|
||||
};
|
||||
|
||||
fn get_token_tree_generated(input: &str) -> (tt::Subtree, TokenMap) {
|
||||
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
||||
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||
ast_to_token_tree(&tt).unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cfg_expr_minimal_features_needed() {
|
||||
let (subtree, _) = get_token_tree_generated(r#"#![cfg(feature = "baz")]"#);
|
||||
let cfg_expr = parse_cfg(&subtree);
|
||||
let mut min_features = vec![];
|
||||
collect_minimal_features_needed(&cfg_expr, &mut min_features);
|
||||
|
||||
assert_eq!(min_features, vec![SmolStr::new("baz")]);
|
||||
|
||||
let (subtree, _) =
|
||||
get_token_tree_generated(r#"#![cfg(all(feature = "baz", feature = "foo"))]"#);
|
||||
let cfg_expr = parse_cfg(&subtree);
|
||||
|
||||
let mut min_features = vec![];
|
||||
collect_minimal_features_needed(&cfg_expr, &mut min_features);
|
||||
assert_eq!(min_features, vec![SmolStr::new("baz"), SmolStr::new("foo")]);
|
||||
|
||||
let (subtree, _) =
|
||||
get_token_tree_generated(r#"#![cfg(any(feature = "baz", feature = "foo", unix))]"#);
|
||||
let cfg_expr = parse_cfg(&subtree);
|
||||
|
||||
let mut min_features = vec![];
|
||||
collect_minimal_features_needed(&cfg_expr, &mut min_features);
|
||||
assert_eq!(min_features, vec![SmolStr::new("baz")]);
|
||||
|
||||
let (subtree, _) = get_token_tree_generated(r#"#![cfg(foo)]"#);
|
||||
let cfg_expr = parse_cfg(&subtree);
|
||||
|
||||
let mut min_features = vec![];
|
||||
collect_minimal_features_needed(&cfg_expr, &mut min_features);
|
||||
assert!(min_features.is_empty());
|
||||
}
|
||||
}
|
||||
|
@ -3,13 +3,16 @@
|
||||
use ra_ide::{
|
||||
Assist, CompletionItem, CompletionItemKind, Documentation, FileSystemEdit, Fold, FoldKind,
|
||||
FunctionSignature, Highlight, HighlightModifier, HighlightTag, HighlightedRange, Indel,
|
||||
InlayHint, InlayKind, InsertTextFormat, LineIndex, NavigationTarget, ReferenceAccess, Severity,
|
||||
SourceChange, SourceFileEdit, TextEdit,
|
||||
InlayHint, InlayKind, InsertTextFormat, LineIndex, NavigationTarget, ReferenceAccess, Runnable,
|
||||
RunnableKind, Severity, SourceChange, SourceFileEdit, TextEdit,
|
||||
};
|
||||
use ra_syntax::{SyntaxKind, TextRange, TextSize};
|
||||
use ra_vfs::LineEndings;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::{lsp_ext, semantic_tokens, world::WorldSnapshot, Result};
|
||||
use crate::{
|
||||
cargo_target_spec::CargoTargetSpec, lsp_ext, semantic_tokens, world::WorldSnapshot, Result,
|
||||
};
|
||||
|
||||
pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::Position {
|
||||
let line_col = line_index.line_col(offset);
|
||||
@ -627,3 +630,38 @@ pub(crate) fn code_action(world: &WorldSnapshot, assist: Assist) -> Result<lsp_e
|
||||
};
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
pub(crate) fn runnable(
|
||||
world: &WorldSnapshot,
|
||||
file_id: FileId,
|
||||
runnable: Runnable,
|
||||
) -> Result<lsp_ext::Runnable> {
|
||||
let spec = CargoTargetSpec::for_file(world, file_id)?;
|
||||
let target = spec.as_ref().map(|s| s.target.clone());
|
||||
let (args, extra_args) =
|
||||
CargoTargetSpec::runnable_args(spec, &runnable.kind, &runnable.cfg_exprs)?;
|
||||
let line_index = world.analysis().file_line_index(file_id)?;
|
||||
let label = match &runnable.kind {
|
||||
RunnableKind::Test { test_id, .. } => format!("test {}", test_id),
|
||||
RunnableKind::TestMod { path } => format!("test-mod {}", path),
|
||||
RunnableKind::Bench { test_id } => format!("bench {}", test_id),
|
||||
RunnableKind::DocTest { test_id, .. } => format!("doctest {}", test_id),
|
||||
RunnableKind::Bin => {
|
||||
target.map_or_else(|| "run binary".to_string(), |t| format!("run {}", t))
|
||||
}
|
||||
};
|
||||
|
||||
Ok(lsp_ext::Runnable {
|
||||
range: range(&line_index, runnable.range),
|
||||
label,
|
||||
kind: lsp_ext::RunnableKind::Cargo,
|
||||
args,
|
||||
extra_args,
|
||||
env: {
|
||||
let mut m = FxHashMap::default();
|
||||
m.insert("RUST_BACKTRACE".to_string(), "short".to_string());
|
||||
m
|
||||
},
|
||||
cwd: world.workspace_root_for(file_id).map(|root| root.to_owned()),
|
||||
})
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user