Ensure that semantic tokens are single-line

This commit is contained in:
Aleksey Kladov 2020-02-27 14:54:31 +01:00
parent 57f0d6cba3
commit 8ed7e751b6
2 changed files with 52 additions and 3 deletions

View File

@ -1,7 +1,8 @@
//! `LineIndex` maps flat `TextUnit` offsets into `(Line, Column)`
//! representation.
use std::iter;
use ra_syntax::TextUnit;
use ra_syntax::{TextRange, TextUnit};
use rustc_hash::FxHashMap;
use superslice::Ext;
@ -87,6 +88,19 @@ pub fn offset(&self, line_col: LineCol) -> TextUnit {
self.newlines[line_col.line as usize] + col
}
pub fn lines(&self, range: TextRange) -> impl Iterator<Item = TextRange> + '_ {
let lo = self.newlines.lower_bound(&range.start());
let hi = self.newlines.upper_bound(&range.end());
let all = iter::once(range.start())
.chain(self.newlines[lo..hi].iter().copied())
.chain(iter::once(range.end()));
all.clone()
.zip(all.skip(1))
.map(|(lo, hi)| TextRange::from_to(lo, hi))
.filter(|it| !it.is_empty())
}
fn utf8_to_utf16_col(&self, line: u32, mut col: TextUnit) -> usize {
if let Some(utf16_chars) = self.utf16_lines.get(&line) {
let mut correction = TextUnit::from_usize(0);
@ -221,4 +235,32 @@ fn test_string() {
assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextUnit::from_usize(15));
}
#[test]
fn test_splitlines() {
fn r(lo: u32, hi: u32) -> TextRange {
TextRange::from_to(lo.into(), hi.into())
}
let text = "a\nbb\nccc\n";
let line_index = LineIndex::new(text);
let actual = line_index.lines(r(0, 9)).collect::<Vec<_>>();
let expected = vec![r(0, 2), r(2, 5), r(5, 9)];
assert_eq!(actual, expected);
let text = "";
let line_index = LineIndex::new(text);
let actual = line_index.lines(r(0, 0)).collect::<Vec<_>>();
let expected = vec![];
assert_eq!(actual, expected);
let text = "\n";
let line_index = LineIndex::new(text);
let actual = line_index.lines(r(0, 1)).collect::<Vec<_>>();
let expected = vec![r(0, 1)];
assert_eq!(actual, expected)
}
}

View File

@ -1078,13 +1078,20 @@ pub fn handle_semantic_tokens(
let _p = profile("handle_semantic_tokens");
let file_id = params.text_document.try_conv_with(&world)?;
let text = world.analysis().file_text(file_id)?;
let line_index = world.analysis().file_line_index(file_id)?;
let mut builder = SemanticTokensBuilder::default();
for highlight_range in world.analysis().highlight(file_id)?.into_iter() {
let (token_type, token_modifiers) = highlight_range.highlight.conv();
builder.push(highlight_range.range.conv_with(&line_index), token_type, token_modifiers);
let (token_index, modifier_bitset) = highlight_range.highlight.conv();
for mut range in line_index.lines(highlight_range.range) {
if text[range].ends_with('\n') {
range = TextRange::from_to(range.start(), range.end() - TextUnit::of_char('\n'));
}
let range = range.conv_with(&line_index);
builder.push(range, token_index, modifier_bitset);
}
}
let tokens = SemanticTokens { data: builder.build(), ..Default::default() };