diff --git a/Cargo.lock b/Cargo.lock index a511e0d2804..ca6c6416631 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1372,17 +1372,20 @@ dependencies = [ "lsp-types", "parking_lot", "pico-args", + "ra_cfg", "ra_db", "ra_flycheck", "ra_hir", "ra_hir_def", "ra_hir_ty", "ra_ide", + "ra_mbe", "ra_proc_macro_srv", "ra_prof", "ra_project_model", "ra_syntax", "ra_text_edit", + "ra_tt", "ra_vfs", "rand", "relative-path", diff --git a/crates/ra_cfg/src/cfg_expr.rs b/crates/ra_cfg/src/cfg_expr.rs index 39d71851ca7..85b100c6adf 100644 --- a/crates/ra_cfg/src/cfg_expr.rs +++ b/crates/ra_cfg/src/cfg_expr.rs @@ -88,13 +88,17 @@ fn next_cfg_expr(it: &mut SliceIter) -> Option { mod tests { use super::*; - use mbe::ast_to_token_tree; + use mbe::{ast_to_token_tree, TokenMap}; use ra_syntax::ast::{self, AstNode}; - fn assert_parse_result(input: &str, expected: CfgExpr) { + fn get_token_tree_generated(input: &str) -> (tt::Subtree, TokenMap) { let source_file = ast::SourceFile::parse(input).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let (tt, _) = ast_to_token_tree(&tt).unwrap(); + ast_to_token_tree(&tt).unwrap() + } + + fn assert_parse_result(input: &str, expected: CfgExpr) { + let (tt, _) = get_token_tree_generated(input); assert_eq!(parse_cfg(&tt), expected); } diff --git a/crates/ra_hir/src/lib.rs b/crates/ra_hir/src/lib.rs index c5df4ac24da..3364a822f43 100644 --- a/crates/ra_hir/src/lib.rs +++ b/crates/ra_hir/src/lib.rs @@ -62,6 +62,7 @@ pub use crate::{ pub use hir_def::{ adt::StructKind, + attr::Attrs, body::scope::ExprScopes, builtin_type::BuiltinType, docs::Documentation, diff --git a/crates/ra_hir_def/src/attr.rs b/crates/ra_hir_def/src/attr.rs index 576cd0c65ba..8b6c0bedee7 100644 --- a/crates/ra_hir_def/src/attr.rs +++ b/crates/ra_hir_def/src/attr.rs @@ -81,7 +81,7 @@ impl Attrs { } } - fn from_attrs_owner(db: &dyn DefDatabase, owner: InFile<&dyn AttrsOwner>) -> Attrs { + pub fn from_attrs_owner(db: &dyn DefDatabase, owner: InFile<&dyn AttrsOwner>) -> Attrs { let hygiene = Hygiene::new(db.upcast(), owner.file_id); Attrs::new(owner.value, &hygiene) } diff --git a/crates/ra_hir_def/src/body.rs b/crates/ra_hir_def/src/body.rs index f5a7305dc09..273036cee1c 100644 --- a/crates/ra_hir_def/src/body.rs +++ b/crates/ra_hir_def/src/body.rs @@ -29,7 +29,7 @@ use crate::{ AsMacroCall, DefWithBodyId, HasModule, Lookup, ModuleId, }; -/// A subset of Exander that only deals with cfg attributes. We only need it to +/// A subset of Expander that only deals with cfg attributes. We only need it to /// avoid cyclic queries in crate def map during enum processing. pub(crate) struct CfgExpander { cfg_options: CfgOptions, diff --git a/crates/ra_ide/src/runnables.rs b/crates/ra_ide/src/runnables.rs index 131b8f307c1..ed98e58e00c 100644 --- a/crates/ra_ide/src/runnables.rs +++ b/crates/ra_ide/src/runnables.rs @@ -1,6 +1,6 @@ //! FIXME: write short doc here -use hir::{AsAssocItem, Semantics}; +use hir::{AsAssocItem, Attrs, HirFileId, InFile, Semantics}; use itertools::Itertools; use ra_ide_db::RootDatabase; use ra_syntax::{ @@ -10,12 +10,14 @@ use ra_syntax::{ use crate::FileId; use ast::DocCommentsOwner; +use ra_cfg::CfgExpr; use std::fmt::Display; #[derive(Debug)] pub struct Runnable { pub range: TextRange, pub kind: RunnableKind, + pub cfg_exprs: Vec, } #[derive(Debug)] @@ -45,20 +47,24 @@ pub enum RunnableKind { pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec { let sema = Semantics::new(db); let source_file = sema.parse(file_id); - source_file.syntax().descendants().filter_map(|i| runnable(&sema, i)).collect() + source_file.syntax().descendants().filter_map(|i| runnable(&sema, i, file_id)).collect() } -fn runnable(sema: &Semantics, item: SyntaxNode) -> Option { +fn runnable(sema: &Semantics, item: SyntaxNode, file_id: FileId) -> Option { match_ast! { match item { - ast::FnDef(it) => runnable_fn(sema, it), - ast::Module(it) => runnable_mod(sema, it), + ast::FnDef(it) => runnable_fn(sema, it, file_id), + ast::Module(it) => runnable_mod(sema, it, file_id), _ => None, } } } -fn runnable_fn(sema: &Semantics, fn_def: ast::FnDef) -> Option { +fn runnable_fn( + sema: &Semantics, + fn_def: ast::FnDef, + file_id: FileId, +) -> Option { let name_string = fn_def.name()?.text().to_string(); let kind = if name_string == "main" { @@ -111,7 +117,12 @@ fn runnable_fn(sema: &Semantics, fn_def: ast::FnDef) -> Option bool { fn_def.doc_comment_text().map_or(false, |comment| comment.contains("```")) } -fn runnable_mod(sema: &Semantics, module: ast::Module) -> Option { +fn runnable_mod( + sema: &Semantics, + module: ast::Module, + file_id: FileId, +) -> Option { let has_test_function = module .item_list()? .items() @@ -160,11 +175,20 @@ fn runnable_mod(sema: &Semantics, module: ast::Module) -> Option //empty + #[test] + #[cfg(feature = "foo")] + fn test_foo1() {} + "#, + ); + let runnables = analysis.runnables(pos.file_id).unwrap(); + assert_debug_snapshot!(&runnables, + @r###" + [ + Runnable { + range: 1..58, + kind: Test { + test_id: Name( + "test_foo1", + ), + attr: TestAttr { + ignore: false, + }, + }, + cfg_exprs: [ + KeyValue { + key: "feature", + value: "foo", + }, + ], + }, + ] + "### + ); + } + + #[test] + fn test_runnables_with_features() { + let (analysis, pos) = analysis_and_position( + r#" + //- /lib.rs crate:foo cfg:feature=foo,feature=bar + <|> //empty + #[test] + #[cfg(all(feature = "foo", feature = "bar"))] + fn test_foo1() {} + "#, + ); + let runnables = analysis.runnables(pos.file_id).unwrap(); + assert_debug_snapshot!(&runnables, + @r###" + [ + Runnable { + range: 1..80, + kind: Test { + test_id: Name( + "test_foo1", + ), + attr: TestAttr { + ignore: false, + }, + }, + cfg_exprs: [ + All( + [ + KeyValue { + key: "feature", + value: "foo", + }, + KeyValue { + key: "feature", + value: "bar", + }, + ], + ), + ], }, ] "### diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index 9b2d29b1d57..65b487db3b9 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml @@ -40,6 +40,7 @@ ra_project_model = { path = "../ra_project_model" } ra_syntax = { path = "../ra_syntax" } ra_text_edit = { path = "../ra_text_edit" } ra_vfs = "0.6.0" +ra_cfg = { path = "../ra_cfg"} # This should only be used in CLI ra_db = { path = "../ra_db" } @@ -55,6 +56,8 @@ winapi = "0.3.8" tempfile = "3.1.0" insta = "0.16.0" test_utils = { path = "../test_utils" } +mbe = { path = "../ra_mbe", package = "ra_mbe" } +tt = { path = "../ra_tt", package = "ra_tt" } [features] jemalloc = [ "ra_prof/jemalloc" ] diff --git a/crates/rust-analyzer/src/cargo_target_spec.rs b/crates/rust-analyzer/src/cargo_target_spec.rs index 5e5a17943d2..441fb61df00 100644 --- a/crates/rust-analyzer/src/cargo_target_spec.rs +++ b/crates/rust-analyzer/src/cargo_target_spec.rs @@ -4,6 +4,7 @@ use ra_ide::{FileId, RunnableKind, TestId}; use ra_project_model::{self, ProjectWorkspace, TargetKind}; use crate::{world::WorldSnapshot, Result}; +use ra_syntax::SmolStr; /// Abstract representation of Cargo target. /// @@ -20,6 +21,7 @@ impl CargoTargetSpec { pub(crate) fn runnable_args( spec: Option, kind: &RunnableKind, + features_needed: &Vec, ) -> Result<(Vec, Vec)> { let mut args = Vec::new(); let mut extra_args = Vec::new(); @@ -73,6 +75,12 @@ impl CargoTargetSpec { } } } + + features_needed.iter().for_each(|feature| { + args.push("--features".to_string()); + args.push(feature.to_string()); + }); + Ok((args, extra_args)) } diff --git a/crates/rust-analyzer/src/main_loop/handlers.rs b/crates/rust-analyzer/src/main_loop/handlers.rs index ba685755668..2aaff3ea48b 100644 --- a/crates/rust-analyzer/src/main_loop/handlers.rs +++ b/crates/rust-analyzer/src/main_loop/handlers.rs @@ -17,13 +17,14 @@ use lsp_types::{ SemanticTokensParams, SemanticTokensRangeParams, SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation, TextDocumentIdentifier, Url, WorkspaceEdit, }; +use ra_cfg::CfgExpr; use ra_ide::{ FileId, FilePosition, FileRange, Query, RangeInfo, Runnable, RunnableKind, SearchScope, TextEdit, }; use ra_prof::profile; use ra_project_model::TargetKind; -use ra_syntax::{AstNode, SyntaxKind, TextRange, TextSize}; +use ra_syntax::{AstNode, SmolStr, SyntaxKind, TextRange, TextSize}; use rustc_hash::FxHashMap; use serde::{Deserialize, Serialize}; use serde_json::to_value; @@ -978,7 +979,12 @@ fn to_lsp_runnable( ) -> Result { let spec = CargoTargetSpec::for_file(world, file_id)?; let target = spec.as_ref().map(|s| s.target.clone()); - let (args, extra_args) = CargoTargetSpec::runnable_args(spec, &runnable.kind)?; + let mut features_needed = vec![]; + for cfg_expr in &runnable.cfg_exprs { + collect_minimal_features_needed(cfg_expr, &mut features_needed); + } + let (args, extra_args) = + CargoTargetSpec::runnable_args(spec, &runnable.kind, &features_needed)?; let line_index = world.analysis().file_line_index(file_id)?; let label = match &runnable.kind { RunnableKind::Test { test_id, .. } => format!("test {}", test_id), @@ -1004,6 +1010,26 @@ fn to_lsp_runnable( }) } +/// Fill minimal features needed +fn collect_minimal_features_needed(cfg_expr: &CfgExpr, features: &mut Vec) { + match cfg_expr { + CfgExpr::KeyValue { key, value } if key == "feature" => features.push(value.clone()), + CfgExpr::All(preds) => { + preds.iter().for_each(|cfg| collect_minimal_features_needed(cfg, features)); + } + CfgExpr::Any(preds) => { + for cfg in preds { + let len_features = features.len(); + collect_minimal_features_needed(cfg, features); + if len_features != features.len() { + break; + } + } + } + _ => {} + } +} + pub fn handle_inlay_hints( world: WorldSnapshot, params: InlayHintsParams, @@ -1140,3 +1166,54 @@ pub fn handle_semantic_tokens_range( let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights); Ok(Some(semantic_tokens.into())) } + +#[cfg(test)] +mod tests { + use super::*; + + use mbe::{ast_to_token_tree, TokenMap}; + use ra_cfg::parse_cfg; + use ra_syntax::{ + ast::{self, AstNode}, + SmolStr, + }; + + fn get_token_tree_generated(input: &str) -> (tt::Subtree, TokenMap) { + let source_file = ast::SourceFile::parse(input).ok().unwrap(); + let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + ast_to_token_tree(&tt).unwrap() + } + + #[test] + fn test_cfg_expr_minimal_features_needed() { + let (subtree, _) = get_token_tree_generated(r#"#![cfg(feature = "baz")]"#); + let cfg_expr = parse_cfg(&subtree); + let mut min_features = vec![]; + collect_minimal_features_needed(&cfg_expr, &mut min_features); + + assert_eq!(min_features, vec![SmolStr::new("baz")]); + + let (subtree, _) = + get_token_tree_generated(r#"#![cfg(all(feature = "baz", feature = "foo"))]"#); + let cfg_expr = parse_cfg(&subtree); + + let mut min_features = vec![]; + collect_minimal_features_needed(&cfg_expr, &mut min_features); + assert_eq!(min_features, vec![SmolStr::new("baz"), SmolStr::new("foo")]); + + let (subtree, _) = + get_token_tree_generated(r#"#![cfg(any(feature = "baz", feature = "foo", unix))]"#); + let cfg_expr = parse_cfg(&subtree); + + let mut min_features = vec![]; + collect_minimal_features_needed(&cfg_expr, &mut min_features); + assert_eq!(min_features, vec![SmolStr::new("baz")]); + + let (subtree, _) = get_token_tree_generated(r#"#![cfg(foo)]"#); + let cfg_expr = parse_cfg(&subtree); + + let mut min_features = vec![]; + collect_minimal_features_needed(&cfg_expr, &mut min_features); + assert!(min_features.is_empty()); + } +}