Allow semantic tokens for strings to be disabled

This commit is contained in:
John Renner 2021-05-10 13:09:38 -07:00
parent ba86203987
commit c3ba1f14fa
4 changed files with 58 additions and 9 deletions

View File

@ -44,6 +44,9 @@ struct ConfigData {
/// Show function name and docs in parameter hints.
callInfo_full: bool = "true",
/// Use semantic tokens for strings. Disable to support injected grammars
semanticStringTokens: bool = "true",
/// Automatically refresh project info via `cargo metadata` on
/// `Cargo.toml` changes.
cargo_autoreload: bool = "true",
@ -381,6 +384,9 @@ pub fn location_link(&self) -> bool {
pub fn line_folding_only(&self) -> bool {
try_or!(self.caps.text_document.as_ref()?.folding_range.as_ref()?.line_folding_only?, false)
}
pub fn semantic_strings(&self) -> bool {
self.data.semanticStringTokens
}
pub fn hierarchical_symbols(&self) -> bool {
try_or!(
self.caps

View File

@ -1376,7 +1376,8 @@ pub(crate) fn handle_semantic_tokens_full(
let line_index = snap.file_line_index(file_id)?;
let highlights = snap.analysis.highlight(file_id)?;
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
let semantic_strings = snap.config.semantic_strings();
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights, semantic_strings);
// Unconditionally cache the tokens
snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens.clone());
@ -1395,8 +1396,8 @@ pub(crate) fn handle_semantic_tokens_full_delta(
let line_index = snap.file_line_index(file_id)?;
let highlights = snap.analysis.highlight(file_id)?;
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
let semantic_strings = snap.config.semantic_strings();
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights, semantic_strings);
let mut cache = snap.semantic_tokens_cache.lock();
let cached_tokens = cache.entry(params.text_document.uri).or_default();
@ -1425,7 +1426,8 @@ pub(crate) fn handle_semantic_tokens_range(
let line_index = snap.file_line_index(frange.file_id)?;
let highlights = snap.analysis.highlight_range(frange)?;
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
let semantic_strings = snap.config.semantic_strings();
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights, semantic_strings);
Ok(Some(semantic_tokens.into()))
}

View File

@ -381,6 +381,7 @@ pub(crate) fn semantic_tokens(
text: &str,
line_index: &LineIndex,
highlights: Vec<HlRange>,
include_strings: bool
) -> lsp_types::SemanticTokens {
let id = TOKEN_RESULT_COUNTER.fetch_add(1, Ordering::SeqCst).to_string();
let mut builder = semantic_tokens::SemanticTokensBuilder::new(id);
@ -389,8 +390,11 @@ pub(crate) fn semantic_tokens(
if highlight_range.highlight.is_empty() {
continue;
}
let (type_, mods) = semantic_token_type_and_modifiers(highlight_range.highlight);
let token_index = semantic_tokens::type_index(type_);
let (typ, mods) = semantic_token_type_and_modifiers(highlight_range.highlight);
if !include_strings && typ == lsp_types::SemanticTokenType::STRING {
continue;
}
let token_index = semantic_tokens::type_index(typ);
let modifier_bitset = mods.0;
for mut text_range in line_index.index.lines(highlight_range.range) {

View File

@ -18,15 +18,16 @@
notification::DidOpenTextDocument,
request::{
CodeActionRequest, Completion, Formatting, GotoTypeDefinition, HoverRequest,
WillRenameFiles,
SemanticTokensRangeRequest, WillRenameFiles
},
CodeActionContext, CodeActionParams, CompletionParams, DidOpenTextDocumentParams,
DocumentFormattingParams, FileRename, FormattingOptions, GotoDefinitionParams, HoverParams,
PartialResultParams, Position, Range, RenameFilesParams, TextDocumentItem,
PartialResultParams, Position, Range, RenameFilesParams, SemanticTokensRangeParams, TextDocumentItem,
TextDocumentPositionParams, WorkDoneProgressParams,
SemanticTokens
};
use rust_analyzer::lsp_ext::{OnEnter, Runnables, RunnablesParams};
use serde_json::json;
use serde_json::{from_value, json};
use test_utils::skip_slow_tests;
use crate::{
@ -37,6 +38,42 @@
const PROFILE: &str = "";
// const PROFILE: &'static str = "*@3>100";
#[test]
fn can_disable_semantic_strings() {
if skip_slow_tests() {
return;
}
[true, false].iter().for_each(|semantic_strings| {
let server = Project::with_fixture(
r#"
//- /Cargo.toml
[package]
name = "foo"
version = "0.0.0"
//- /src/lib.rs
const foo: &'static str = "hi";
"#,
)
.with_config(serde_json::json!({
"semanticStringTokens": semantic_strings
}))
.server().wait_until_workspace_is_loaded();
let res = server.send_request::<SemanticTokensRangeRequest>(SemanticTokensRangeParams {
text_document: server.doc_id("src/lib.rs"),
partial_result_params: PartialResultParams::default(),
work_done_progress_params: WorkDoneProgressParams::default(),
range: Range::new(Position::new(0, 26), Position::new(0, 30)),
});
let tok_res: SemanticTokens = from_value(res).expect("invalid server response");
assert!(tok_res.data.len() == *semantic_strings as usize);
});
}
#[test]
fn completes_items_from_standard_library() {
if skip_slow_tests() {