7657: utf8 r=matklad a=matklad

- Prepare for utf-8 offsets
- reduce code duplication in tests
- Make utf8 default, implement utf16 in terms of it
- Make it easy to add additional context for offset conversion
- Implement utf8 offsets

closes #7453

Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2021-02-16 16:34:22 +00:00 committed by GitHub
commit c9672a0539
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 221 additions and 146 deletions

View File

@ -95,7 +95,7 @@ pub use ide_db::{
},
call_info::CallInfo,
label::Label,
line_index::{LineCol, LineIndex},
line_index::{LineCol, LineColUtf16, LineIndex},
search::{ReferenceAccess, SearchScope},
source_change::{FileSystemEdit, SourceChange},
symbol_index::Query,

View File

@ -15,11 +15,19 @@ pub struct LineIndex {
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct LineCol {
pub struct LineColUtf16 {
/// Zero-based
pub line: u32,
/// Zero-based
pub col_utf16: u32,
pub col: u32,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct LineCol {
/// Zero-based
pub line: u32,
/// Zero-based utf8 offset
pub col: u32,
}
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
@ -92,14 +100,21 @@ impl LineIndex {
let line = partition_point(&self.newlines, |&it| it <= offset) - 1;
let line_start_offset = self.newlines[line];
let col = offset - line_start_offset;
LineCol { line: line as u32, col_utf16: self.utf8_to_utf16_col(line as u32, col) as u32 }
LineCol { line: line as u32, col: col.into() }
}
pub fn offset(&self, line_col: LineCol) -> TextSize {
//FIXME: return Result
let col = self.utf16_to_utf8_col(line_col.line, line_col.col_utf16);
self.newlines[line_col.line as usize] + col
self.newlines[line_col.line as usize] + TextSize::from(line_col.col)
}
pub fn to_utf16(&self, line_col: LineCol) -> LineColUtf16 {
let col = self.utf8_to_utf16_col(line_col.line, line_col.col.into());
LineColUtf16 { line: line_col.line, col: col as u32 }
}
pub fn to_utf8(&self, line_col: LineColUtf16) -> LineCol {
let col = self.utf16_to_utf8_col(line_col.line, line_col.col);
LineCol { line: line_col.line, col: col.into() }
}
pub fn lines(&self, range: TextRange) -> impl Iterator<Item = TextRange> + '_ {

View File

@ -3,24 +3,29 @@ use super::*;
#[test]
fn test_line_index() {
let text = "hello\nworld";
let table = [
(00, 0, 0),
(01, 0, 1),
(05, 0, 5),
(06, 1, 0),
(07, 1, 1),
(08, 1, 2),
(10, 1, 4),
(11, 1, 5),
(12, 1, 6),
];
let index = LineIndex::new(text);
assert_eq!(index.line_col(0.into()), LineCol { line: 0, col_utf16: 0 });
assert_eq!(index.line_col(1.into()), LineCol { line: 0, col_utf16: 1 });
assert_eq!(index.line_col(5.into()), LineCol { line: 0, col_utf16: 5 });
assert_eq!(index.line_col(6.into()), LineCol { line: 1, col_utf16: 0 });
assert_eq!(index.line_col(7.into()), LineCol { line: 1, col_utf16: 1 });
assert_eq!(index.line_col(8.into()), LineCol { line: 1, col_utf16: 2 });
assert_eq!(index.line_col(10.into()), LineCol { line: 1, col_utf16: 4 });
assert_eq!(index.line_col(11.into()), LineCol { line: 1, col_utf16: 5 });
assert_eq!(index.line_col(12.into()), LineCol { line: 1, col_utf16: 6 });
for &(offset, line, col) in &table {
assert_eq!(index.line_col(offset.into()), LineCol { line, col });
}
let text = "\nhello\nworld";
let table = [(0, 0, 0), (1, 1, 0), (2, 1, 1), (6, 1, 5), (7, 2, 0)];
let index = LineIndex::new(text);
assert_eq!(index.line_col(0.into()), LineCol { line: 0, col_utf16: 0 });
assert_eq!(index.line_col(1.into()), LineCol { line: 1, col_utf16: 0 });
assert_eq!(index.line_col(2.into()), LineCol { line: 1, col_utf16: 1 });
assert_eq!(index.line_col(6.into()), LineCol { line: 1, col_utf16: 5 });
assert_eq!(index.line_col(7.into()), LineCol { line: 2, col_utf16: 0 });
for &(offset, line, col) in &table {
assert_eq!(index.line_col(offset.into()), LineCol { line, col });
}
}
#[test]

View File

@ -8,7 +8,7 @@ use std::{convert::TryFrom, env, fs, path::PathBuf, process};
use lsp_server::Connection;
use project_model::ProjectManifest;
use rust_analyzer::{cli, config::Config, from_json, Result};
use rust_analyzer::{cli, config::Config, from_json, lsp_ext::supports_utf8, Result};
use vfs::AbsPathBuf;
#[cfg(all(feature = "mimalloc"))]
@ -127,7 +127,11 @@ fn run_server() -> Result<()> {
name: String::from("rust-analyzer"),
version: Some(String::from(env!("REV"))),
}),
offset_encoding: None,
offset_encoding: if supports_utf8(&initialize_params.capabilities) {
Some("utf-8".to_string())
} else {
None
},
};
let initialize_result = serde_json::to_value(initialize_result).unwrap();

View File

@ -97,7 +97,7 @@ impl BenchCmd {
let offset = host
.analysis()
.file_line_index(file_id)?
.offset(LineCol { line: pos.line - 1, col_utf16: pos.column });
.offset(LineCol { line: pos.line - 1, col: pos.column });
let file_position = FilePosition { file_id, offset };
if is_completion {

View File

@ -218,9 +218,9 @@ impl AnalysisStatsCmd {
bar.println(format!(
"{}:{}-{}:{}: {}",
start.line + 1,
start.col_utf16,
start.col,
end.line + 1,
end.col_utf16,
end.col,
ty.display(db)
));
} else {
@ -250,9 +250,9 @@ impl AnalysisStatsCmd {
"{} {}:{}-{}:{}: Expected {}, got {}",
path,
start.line + 1,
start.col_utf16,
start.col,
end.line + 1,
end.col_utf16,
end.col,
mismatch.expected.display(db),
mismatch.actual.display(db)
));

View File

@ -23,7 +23,10 @@ use rustc_hash::FxHashSet;
use serde::{de::DeserializeOwned, Deserialize};
use vfs::AbsPathBuf;
use crate::{caps::completion_item_edit_resolve, diagnostics::DiagnosticsMapConfig};
use crate::{
caps::completion_item_edit_resolve, diagnostics::DiagnosticsMapConfig,
line_index::OffsetEncoding, lsp_ext::supports_utf8,
};
config_data! {
struct ConfigData {
@ -415,6 +418,13 @@ impl Config {
false
)
}
pub fn offset_encoding(&self) -> OffsetEncoding {
if supports_utf8(&self.caps) {
OffsetEncoding::Utf8
} else {
OffsetEncoding::Utf16
}
}
fn experimental(&self, index: &'static str) -> bool {
try_or!(self.caps.experimental.as_ref()?.get(index)?.as_bool()?, false)

View File

@ -1,12 +1,17 @@
//! Conversion lsp_types types to rust-analyzer specific ones.
use std::convert::TryFrom;
use ide::{Annotation, AnnotationKind, AssistKind, LineCol, LineIndex};
use ide::{Annotation, AnnotationKind, AssistKind, LineCol, LineColUtf16};
use ide_db::base_db::{FileId, FilePosition, FileRange};
use syntax::{TextRange, TextSize};
use vfs::AbsPathBuf;
use crate::{from_json, global_state::GlobalStateSnapshot, lsp_ext, Result};
use crate::{
from_json,
global_state::GlobalStateSnapshot,
line_index::{LineIndex, OffsetEncoding},
lsp_ext, Result,
};
pub(crate) fn abs_path(url: &lsp_types::Url) -> Result<AbsPathBuf> {
let path = url.to_file_path().map_err(|()| "url is not a file")?;
@ -18,8 +23,17 @@ pub(crate) fn vfs_path(url: &lsp_types::Url) -> Result<vfs::VfsPath> {
}
pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> TextSize {
let line_col = LineCol { line: position.line as u32, col_utf16: position.character as u32 };
line_index.offset(line_col)
let line_col = match line_index.encoding {
OffsetEncoding::Utf8 => {
LineCol { line: position.line as u32, col: position.character as u32 }
}
OffsetEncoding::Utf16 => {
let line_col =
LineColUtf16 { line: position.line as u32, col: position.character as u32 };
line_index.index.to_utf8(line_col)
}
};
line_index.index.offset(line_col)
}
pub(crate) fn text_range(line_index: &LineIndex, range: lsp_types::Range) -> TextRange {
@ -37,8 +51,8 @@ pub(crate) fn file_position(
tdpp: lsp_types::TextDocumentPositionParams,
) -> Result<FilePosition> {
let file_id = file_id(world, &tdpp.text_document.uri)?;
let line_index = world.analysis.file_line_index(file_id)?;
let offset = offset(&*line_index, tdpp.position);
let line_index = world.file_line_index(file_id)?;
let offset = offset(&line_index, tdpp.position);
Ok(FilePosition { file_id, offset })
}
@ -48,7 +62,7 @@ pub(crate) fn file_range(
range: lsp_types::Range,
) -> Result<FileRange> {
let file_id = file_id(world, &text_document_identifier.uri)?;
let line_index = world.analysis.file_line_index(file_id)?;
let line_index = world.file_line_index(file_id)?;
let range = text_range(&line_index, range);
Ok(FileRange { file_id, range })
}
@ -78,7 +92,7 @@ pub(crate) fn annotation(
lsp_ext::CodeLensResolveData::Impls(params) => {
let file_id =
world.url_to_file_id(&params.text_document_position_params.text_document.uri)?;
let line_index = world.analysis.file_line_index(file_id)?;
let line_index = world.file_line_index(file_id)?;
Ok(Annotation {
range: text_range(&line_index, code_lens.range),
@ -90,7 +104,7 @@ pub(crate) fn annotation(
}
lsp_ext::CodeLensResolveData::References(params) => {
let file_id = world.url_to_file_id(&params.text_document.uri)?;
let line_index = world.analysis.file_line_index(file_id)?;
let line_index = world.file_line_index(file_id)?;
Ok(Annotation {
range: text_range(&line_index, code_lens.range),

View File

@ -7,7 +7,7 @@ use std::{sync::Arc, time::Instant};
use crossbeam_channel::{unbounded, Receiver, Sender};
use flycheck::FlycheckHandle;
use ide::{Analysis, AnalysisHost, Change, FileId};
use ide::{Analysis, AnalysisHost, Cancelable, Change, FileId};
use ide_db::base_db::{CrateId, VfsPath};
use lsp_types::{SemanticTokens, Url};
use parking_lot::{Mutex, RwLock};
@ -22,7 +22,7 @@ use crate::{
diagnostics::{CheckFixes, DiagnosticCollection},
document::DocumentData,
from_proto,
line_endings::LineEndings,
line_index::{LineEndings, LineIndex},
main_loop::Task,
op_queue::OpQueue,
reload::SourceRootConfig,
@ -271,8 +271,11 @@ impl GlobalStateSnapshot {
file_id_to_url(&self.vfs.read().0, id)
}
pub(crate) fn file_line_endings(&self, id: FileId) -> LineEndings {
self.vfs.read().1[&id]
pub(crate) fn file_line_index(&self, file_id: FileId) -> Cancelable<LineIndex> {
let endings = self.vfs.read().1[&file_id];
let index = self.analysis.file_line_index(file_id)?;
let res = LineIndex { index, endings, encoding: self.config.offset_encoding() };
Ok(res)
}
pub(crate) fn url_file_version(&self, url: &Url) -> Option<i32> {

View File

@ -5,12 +5,11 @@
use std::{
io::Write as _,
process::{self, Stdio},
sync::Arc,
};
use ide::{
AnnotationConfig, FileId, FilePosition, FileRange, HoverAction, HoverGotoTypeData, LineIndex,
Query, RangeInfo, Runnable, RunnableKind, SearchScope, SourceChange, TextEdit,
AnnotationConfig, FileId, FilePosition, FileRange, HoverAction, HoverGotoTypeData, Query,
RangeInfo, Runnable, RunnableKind, SearchScope, SourceChange, TextEdit,
};
use ide_db::SymbolKind;
use itertools::Itertools;
@ -37,7 +36,7 @@ use crate::{
diff::diff,
from_proto,
global_state::{GlobalState, GlobalStateSnapshot},
line_endings::LineEndings,
line_index::{LineEndings, LineIndex},
lsp_ext::{self, InlayHint, InlayHintsParams},
lsp_utils::all_edits_are_disjoint,
to_proto, LspError, Result,
@ -100,7 +99,7 @@ pub(crate) fn handle_syntax_tree(
) -> Result<String> {
let _p = profile::span("handle_syntax_tree");
let id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.analysis.file_line_index(id)?;
let line_index = snap.file_line_index(id)?;
let text_range = params.range.map(|r| from_proto::text_range(&line_index, r));
let res = snap.analysis.syntax_tree(id, text_range)?;
Ok(res)
@ -122,7 +121,7 @@ pub(crate) fn handle_expand_macro(
) -> Result<Option<lsp_ext::ExpandedMacro>> {
let _p = profile::span("handle_expand_macro");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.analysis.file_line_index(file_id)?;
let line_index = snap.file_line_index(file_id)?;
let offset = from_proto::offset(&line_index, params.position);
let res = snap.analysis.expand_macro(FilePosition { file_id, offset })?;
@ -135,7 +134,7 @@ pub(crate) fn handle_selection_range(
) -> Result<Option<Vec<lsp_types::SelectionRange>>> {
let _p = profile::span("handle_selection_range");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.analysis.file_line_index(file_id)?;
let line_index = snap.file_line_index(file_id)?;
let res: Result<Vec<lsp_types::SelectionRange>> = params
.positions
.into_iter()
@ -178,7 +177,7 @@ pub(crate) fn handle_matching_brace(
) -> Result<Vec<Position>> {
let _p = profile::span("handle_matching_brace");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.analysis.file_line_index(file_id)?;
let line_index = snap.file_line_index(file_id)?;
let res = params
.positions
.into_iter()
@ -200,8 +199,7 @@ pub(crate) fn handle_join_lines(
) -> Result<Vec<lsp_types::TextEdit>> {
let _p = profile::span("handle_join_lines");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.analysis.file_line_index(file_id)?;
let line_endings = snap.file_line_endings(file_id);
let line_index = snap.file_line_index(file_id)?;
let mut res = TextEdit::default();
for range in params.ranges {
let range = from_proto::text_range(&line_index, range);
@ -213,7 +211,7 @@ pub(crate) fn handle_join_lines(
}
}
}
let res = to_proto::text_edit_vec(&line_index, line_endings, res);
let res = to_proto::text_edit_vec(&line_index, res);
Ok(res)
}
@ -227,9 +225,8 @@ pub(crate) fn handle_on_enter(
None => return Ok(None),
Some(it) => it,
};
let line_index = snap.analysis.file_line_index(position.file_id)?;
let line_endings = snap.file_line_endings(position.file_id);
let edit = to_proto::snippet_text_edit_vec(&line_index, line_endings, true, edit);
let line_index = snap.file_line_index(position.file_id)?;
let edit = to_proto::snippet_text_edit_vec(&line_index, true, edit);
Ok(Some(edit))
}
@ -240,8 +237,7 @@ pub(crate) fn handle_on_type_formatting(
) -> Result<Option<Vec<lsp_types::TextEdit>>> {
let _p = profile::span("handle_on_type_formatting");
let mut position = from_proto::file_position(&snap, params.text_document_position)?;
let line_index = snap.analysis.file_line_index(position.file_id)?;
let line_endings = snap.file_line_endings(position.file_id);
let line_index = snap.file_line_index(position.file_id)?;
// in `ide`, the `on_type` invariant is that
// `text.char_at(position) == typed_char`.
@ -269,7 +265,7 @@ pub(crate) fn handle_on_type_formatting(
// This should be a single-file edit
let (_, edit) = edit.source_file_edits.into_iter().next().unwrap();
let change = to_proto::text_edit_vec(&line_index, line_endings, edit);
let change = to_proto::text_edit_vec(&line_index, edit);
Ok(Some(change))
}
@ -279,7 +275,7 @@ pub(crate) fn handle_document_symbol(
) -> Result<Option<lsp_types::DocumentSymbolResponse>> {
let _p = profile::span("handle_document_symbol");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.analysis.file_line_index(file_id)?;
let line_index = snap.file_line_index(file_id)?;
let mut parents: Vec<(lsp_types::DocumentSymbol, Option<usize>)> = Vec::new();
@ -535,7 +531,7 @@ pub(crate) fn handle_runnables(
) -> Result<Vec<lsp_ext::Runnable>> {
let _p = profile::span("handle_runnables");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.analysis.file_line_index(file_id)?;
let line_index = snap.file_line_index(file_id)?;
let offset = params.position.map(|it| from_proto::offset(&line_index, it));
let cargo_spec = CargoTargetSpec::for_file(&snap, file_id)?;
@ -645,14 +641,12 @@ pub(crate) fn handle_completion(
None => return Ok(None),
Some(items) => items,
};
let line_index = snap.analysis.file_line_index(position.file_id)?;
let line_endings = snap.file_line_endings(position.file_id);
let line_index = snap.file_line_index(position.file_id)?;
let items: Vec<CompletionItem> = items
.into_iter()
.flat_map(|item| {
let mut new_completion_items =
to_proto::completion_item(&line_index, line_endings, item.clone());
let mut new_completion_items = to_proto::completion_item(&line_index, item.clone());
if completion_config.enable_imports_on_the_fly {
for new_item in &mut new_completion_items {
@ -693,8 +687,7 @@ pub(crate) fn handle_completion_resolve(
};
let file_id = from_proto::file_id(&snap, &resolve_data.position.text_document.uri)?;
let line_index = snap.analysis.file_line_index(file_id)?;
let line_endings = snap.file_line_endings(file_id);
let line_index = snap.file_line_index(file_id)?;
let offset = from_proto::offset(&line_index, resolve_data.position.position);
let additional_edits = snap
@ -707,9 +700,7 @@ pub(crate) fn handle_completion_resolve(
resolve_data.import_for_trait_assoc_item,
)?
.into_iter()
.flat_map(|edit| {
edit.into_iter().map(|indel| to_proto::text_edit(&line_index, line_endings, indel))
})
.flat_map(|edit| edit.into_iter().map(|indel| to_proto::text_edit(&line_index, indel)))
.collect_vec();
if !all_edits_are_disjoint(&original_completion, &additional_edits) {
@ -738,7 +729,7 @@ pub(crate) fn handle_folding_range(
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let folds = snap.analysis.folding_ranges(file_id)?;
let text = snap.analysis.file_text(file_id)?;
let line_index = snap.analysis.file_line_index(file_id)?;
let line_index = snap.file_line_index(file_id)?;
let line_folding_only = snap.config.line_folding_only();
let res = folds
.into_iter()
@ -775,7 +766,7 @@ pub(crate) fn handle_hover(
None => return Ok(None),
Some(info) => info,
};
let line_index = snap.analysis.file_line_index(position.file_id)?;
let line_index = snap.file_line_index(position.file_id)?;
let range = to_proto::range(&line_index, info.range);
let hover = lsp_ext::Hover {
hover: lsp_types::Hover {
@ -797,7 +788,7 @@ pub(crate) fn handle_prepare_rename(
let change = snap.analysis.prepare_rename(position)?.map_err(to_proto::rename_error)?;
let line_index = snap.analysis.file_line_index(position.file_id)?;
let line_index = snap.file_line_index(position.file_id)?;
let range = to_proto::range(&line_index, change.range);
Ok(Some(PrepareRenameResponse::Range(range)))
}
@ -857,8 +848,7 @@ pub(crate) fn handle_formatting(
let file = snap.analysis.file_text(file_id)?;
let crate_ids = snap.analysis.crate_for(file_id)?;
let file_line_index = snap.analysis.file_line_index(file_id)?;
let file_line_endings = snap.file_line_endings(file_id);
let line_index = snap.file_line_index(file_id)?;
let mut rustfmt = match snap.config.rustfmt() {
RustfmtConfig::Rustfmt { extra_args } => {
@ -935,24 +925,19 @@ pub(crate) fn handle_formatting(
let (new_text, new_line_endings) = LineEndings::normalize(captured_stdout);
if file_line_endings != new_line_endings {
if line_index.endings != new_line_endings {
// If line endings are different, send the entire file.
// Diffing would not work here, as the line endings might be the only
// difference.
Ok(Some(to_proto::text_edit_vec(
&file_line_index,
new_line_endings,
&line_index,
TextEdit::replace(TextRange::up_to(TextSize::of(&*file)), new_text),
)))
} else if *file == new_text {
// The document is already formatted correctly -- no edits needed.
Ok(None)
} else {
Ok(Some(to_proto::text_edit_vec(
&file_line_index,
file_line_endings,
diff(&file, &new_text),
)))
Ok(Some(to_proto::text_edit_vec(&line_index, diff(&file, &new_text))))
}
}
@ -969,7 +954,7 @@ pub(crate) fn handle_code_action(
}
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.analysis.file_line_index(file_id)?;
let line_index = snap.file_line_index(file_id)?;
let range = from_proto::text_range(&line_index, params.range);
let frange = FileRange { file_id, range };
@ -1010,7 +995,7 @@ pub(crate) fn handle_code_action(
fn add_quick_fixes(
snap: &GlobalStateSnapshot,
frange: FileRange,
line_index: &Arc<LineIndex>,
line_index: &LineIndex,
acc: &mut Vec<lsp_ext::CodeAction>,
) -> Result<()> {
let diagnostics = snap.analysis.diagnostics(&snap.config.diagnostics(), frange.file_id)?;
@ -1052,7 +1037,7 @@ pub(crate) fn handle_code_action_resolve(
};
let file_id = from_proto::file_id(&snap, &params.code_action_params.text_document.uri)?;
let line_index = snap.analysis.file_line_index(file_id)?;
let line_index = snap.file_line_index(file_id)?;
let range = from_proto::text_range(&line_index, params.code_action_params.range);
let frange = FileRange { file_id, range };
@ -1131,7 +1116,7 @@ pub(crate) fn handle_document_highlight(
) -> Result<Option<Vec<DocumentHighlight>>> {
let _p = profile::span("handle_document_highlight");
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let line_index = snap.analysis.file_line_index(position.file_id)?;
let line_index = snap.file_line_index(position.file_id)?;
let refs = match snap
.analysis
@ -1192,7 +1177,7 @@ pub(crate) fn publish_diagnostics(
file_id: FileId,
) -> Result<Vec<Diagnostic>> {
let _p = profile::span("publish_diagnostics");
let line_index = snap.analysis.file_line_index(file_id)?;
let line_index = snap.file_line_index(file_id)?;
let diagnostics: Vec<Diagnostic> = snap
.analysis
@ -1226,7 +1211,7 @@ pub(crate) fn handle_inlay_hints(
) -> Result<Vec<InlayHint>> {
let _p = profile::span("handle_inlay_hints");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.analysis.file_line_index(file_id)?;
let line_index = snap.file_line_index(file_id)?;
Ok(snap
.analysis
.inlay_hints(file_id, &snap.config.inlay_hints())?
@ -1277,7 +1262,7 @@ pub(crate) fn handle_call_hierarchy_incoming(
for call_item in call_items.into_iter() {
let file_id = call_item.target.file_id;
let line_index = snap.analysis.file_line_index(file_id)?;
let line_index = snap.file_line_index(file_id)?;
let item = to_proto::call_hierarchy_item(&snap, call_item.target)?;
res.push(CallHierarchyIncomingCall {
from: item,
@ -1312,7 +1297,7 @@ pub(crate) fn handle_call_hierarchy_outgoing(
for call_item in call_items.into_iter() {
let file_id = call_item.target.file_id;
let line_index = snap.analysis.file_line_index(file_id)?;
let line_index = snap.file_line_index(file_id)?;
let item = to_proto::call_hierarchy_item(&snap, call_item.target)?;
res.push(CallHierarchyOutgoingCall {
to: item,
@ -1335,7 +1320,7 @@ pub(crate) fn handle_semantic_tokens_full(
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let text = snap.analysis.file_text(file_id)?;
let line_index = snap.analysis.file_line_index(file_id)?;
let line_index = snap.file_line_index(file_id)?;
let highlights = snap.analysis.highlight(file_id)?;
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
@ -1354,7 +1339,7 @@ pub(crate) fn handle_semantic_tokens_full_delta(
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let text = snap.analysis.file_text(file_id)?;
let line_index = snap.analysis.file_line_index(file_id)?;
let line_index = snap.file_line_index(file_id)?;
let highlights = snap.analysis.highlight(file_id)?;
@ -1384,7 +1369,7 @@ pub(crate) fn handle_semantic_tokens_range(
let frange = from_proto::file_range(&snap, params.text_document, params.range)?;
let text = snap.analysis.file_text(frange.file_id)?;
let line_index = snap.analysis.file_line_index(frange.file_id)?;
let line_index = snap.file_line_index(frange.file_id)?;
let highlights = snap.analysis.highlight_range(frange)?;
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
@ -1432,7 +1417,7 @@ fn show_impl_command_link(
if snap.config.hover().implementations {
if let Some(nav_data) = snap.analysis.goto_implementation(*position).unwrap_or(None) {
let uri = to_proto::url(snap, position.file_id);
let line_index = snap.analysis.file_line_index(position.file_id).ok()?;
let line_index = snap.file_line_index(position.file_id).ok()?;
let position = to_proto::position(&line_index, position.offset);
let locations: Vec<_> = nav_data
.info

View File

@ -29,7 +29,7 @@ mod from_proto;
mod semantic_tokens;
mod markdown;
mod diagnostics;
mod line_endings;
mod line_index;
mod request_metrics;
mod lsp_utils;
mod thread_pool;

View File

@ -1,7 +1,23 @@
//! Enhances `ide::LineIndex` with additional info required to convert offsets
//! into lsp positions.
//!
//! We maintain invariant that all internal strings use `\n` as line separator.
//! This module does line ending conversion and detection (so that we can
//! convert back to `\r\n` on the way out).
use std::sync::Arc;
pub enum OffsetEncoding {
Utf8,
Utf16,
}
pub(crate) struct LineIndex {
pub(crate) index: Arc<ide::LineIndex>,
pub(crate) endings: LineEndings,
pub(crate) encoding: OffsetEncoding,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub(crate) enum LineEndings {
Unix,

View File

@ -385,3 +385,7 @@ pub(crate) enum CodeLensResolveData {
Impls(lsp_types::request::GotoImplementationParams),
References(lsp_types::TextDocumentPositionParams),
}
pub fn supports_utf8(caps: &lsp_types::ClientCapabilities) -> bool {
caps.offset_encoding.as_deref().unwrap_or_default().iter().any(|it| it == "utf-8")
}

View File

@ -1,11 +1,14 @@
//! Utilities for LSP-related boilerplate code.
use std::{error::Error, ops::Range};
use std::{error::Error, ops::Range, sync::Arc};
use ide::LineIndex;
use ide_db::base_db::Canceled;
use lsp_server::Notification;
use crate::{from_proto, global_state::GlobalState};
use crate::{
from_proto,
global_state::GlobalState,
line_index::{LineEndings, LineIndex, OffsetEncoding},
};
pub(crate) fn is_canceled(e: &(dyn Error + 'static)) -> bool {
e.downcast_ref::<Canceled>().is_some()
@ -90,7 +93,13 @@ pub(crate) fn apply_document_changes(
old_text: &mut String,
content_changes: Vec<lsp_types::TextDocumentContentChangeEvent>,
) {
let mut line_index = LineIndex::new(old_text);
let mut line_index = LineIndex {
index: Arc::new(ide::LineIndex::new(old_text)),
// We don't care about line endings or offset encoding here.
endings: LineEndings::Unix,
encoding: OffsetEncoding::Utf16,
};
// The changes we got must be applied sequentially, but can cross lines so we
// have to keep our line index updated.
// Some clients (e.g. Code) sort the ranges in reverse. As an optimization, we
@ -115,7 +124,7 @@ pub(crate) fn apply_document_changes(
match change.range {
Some(range) => {
if !index_valid.covers(range.end.line) {
line_index = LineIndex::new(&old_text);
line_index.index = Arc::new(ide::LineIndex::new(&old_text));
}
index_valid = IndexValid::UpToLineExclusive(range.start.line);
let range = from_proto::text_range(&line_index, range);

View File

@ -7,22 +7,29 @@ use std::{
use ide::{
Annotation, AnnotationKind, Assist, AssistKind, CallInfo, CompletionItem, CompletionItemKind,
Documentation, FileId, FileRange, FileSystemEdit, Fold, FoldKind, Highlight, HlMod, HlPunct,
HlRange, HlTag, Indel, InlayHint, InlayKind, InsertTextFormat, LineIndex, Markup,
NavigationTarget, ReferenceAccess, RenameError, Runnable, Severity, SourceChange, TextEdit,
TextRange, TextSize,
HlRange, HlTag, Indel, InlayHint, InlayKind, InsertTextFormat, Markup, NavigationTarget,
ReferenceAccess, RenameError, Runnable, Severity, SourceChange, TextEdit, TextRange, TextSize,
};
use ide_db::SymbolKind;
use itertools::Itertools;
use serde_json::to_value;
use crate::{
cargo_target_spec::CargoTargetSpec, global_state::GlobalStateSnapshot,
line_endings::LineEndings, lsp_ext, semantic_tokens, Result,
cargo_target_spec::CargoTargetSpec,
global_state::GlobalStateSnapshot,
line_index::{LineEndings, LineIndex, OffsetEncoding},
lsp_ext, semantic_tokens, Result,
};
pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::Position {
let line_col = line_index.line_col(offset);
lsp_types::Position::new(line_col.line, line_col.col_utf16)
let line_col = line_index.index.line_col(offset);
match line_index.encoding {
OffsetEncoding::Utf8 => lsp_types::Position::new(line_col.line, line_col.col),
OffsetEncoding::Utf16 => {
let line_col = line_index.index.to_utf16(line_col);
lsp_types::Position::new(line_col.line, line_col.col)
}
}
}
pub(crate) fn range(line_index: &LineIndex, range: TextRange) -> lsp_types::Range {
@ -122,13 +129,9 @@ pub(crate) fn completion_item_kind(
}
}
pub(crate) fn text_edit(
line_index: &LineIndex,
line_endings: LineEndings,
indel: Indel,
) -> lsp_types::TextEdit {
pub(crate) fn text_edit(line_index: &LineIndex, indel: Indel) -> lsp_types::TextEdit {
let range = range(line_index, indel.delete);
let new_text = match line_endings {
let new_text = match line_index.endings {
LineEndings::Unix => indel.insert,
LineEndings::Dos => indel.insert.replace('\n', "\r\n"),
};
@ -137,11 +140,10 @@ pub(crate) fn text_edit(
pub(crate) fn snippet_text_edit(
line_index: &LineIndex,
line_endings: LineEndings,
is_snippet: bool,
indel: Indel,
) -> lsp_ext::SnippetTextEdit {
let text_edit = text_edit(line_index, line_endings, indel);
let text_edit = text_edit(line_index, indel);
let insert_text_format =
if is_snippet { Some(lsp_types::InsertTextFormat::Snippet) } else { None };
lsp_ext::SnippetTextEdit {
@ -153,27 +155,24 @@ pub(crate) fn snippet_text_edit(
pub(crate) fn text_edit_vec(
line_index: &LineIndex,
line_endings: LineEndings,
text_edit: TextEdit,
) -> Vec<lsp_types::TextEdit> {
text_edit.into_iter().map(|indel| self::text_edit(line_index, line_endings, indel)).collect()
text_edit.into_iter().map(|indel| self::text_edit(line_index, indel)).collect()
}
pub(crate) fn snippet_text_edit_vec(
line_index: &LineIndex,
line_endings: LineEndings,
is_snippet: bool,
text_edit: TextEdit,
) -> Vec<lsp_ext::SnippetTextEdit> {
text_edit
.into_iter()
.map(|indel| self::snippet_text_edit(line_index, line_endings, is_snippet, indel))
.map(|indel| self::snippet_text_edit(line_index, is_snippet, indel))
.collect()
}
pub(crate) fn completion_item(
line_index: &LineIndex,
line_endings: LineEndings,
completion_item: CompletionItem,
) -> Vec<lsp_types::CompletionItem> {
fn set_score(res: &mut lsp_types::CompletionItem, label: &str) {
@ -190,19 +189,19 @@ pub(crate) fn completion_item(
for indel in completion_item.text_edit().iter() {
if indel.delete.contains_range(source_range) {
text_edit = Some(if indel.delete == source_range {
self::text_edit(line_index, line_endings, indel.clone())
self::text_edit(line_index, indel.clone())
} else {
assert!(source_range.end() == indel.delete.end());
let range1 = TextRange::new(indel.delete.start(), source_range.start());
let range2 = source_range;
let indel1 = Indel::replace(range1, String::new());
let indel2 = Indel::replace(range2, indel.insert.clone());
additional_text_edits.push(self::text_edit(line_index, line_endings, indel1));
self::text_edit(line_index, line_endings, indel2)
additional_text_edits.push(self::text_edit(line_index, indel1));
self::text_edit(line_index, indel2)
})
} else {
assert!(source_range.intersect(indel.delete).is_none());
let text_edit = self::text_edit(line_index, line_endings, indel.clone());
let text_edit = self::text_edit(line_index, indel.clone());
additional_text_edits.push(text_edit);
}
}
@ -358,7 +357,7 @@ pub(crate) fn semantic_tokens(
let token_index = semantic_tokens::type_index(type_);
let modifier_bitset = mods.0;
for mut text_range in line_index.lines(highlight_range.range) {
for mut text_range in line_index.index.lines(highlight_range.range) {
if text[text_range].ends_with('\n') {
text_range =
TextRange::new(text_range.start(), text_range.end() - TextSize::of('\n'));
@ -565,7 +564,7 @@ pub(crate) fn location(
frange: FileRange,
) -> Result<lsp_types::Location> {
let url = url(snap, frange.file_id);
let line_index = snap.analysis.file_line_index(frange.file_id)?;
let line_index = snap.file_line_index(frange.file_id)?;
let range = range(&line_index, frange.range);
let loc = lsp_types::Location::new(url, range);
Ok(loc)
@ -577,7 +576,7 @@ pub(crate) fn location_from_nav(
nav: NavigationTarget,
) -> Result<lsp_types::Location> {
let url = url(snap, nav.file_id);
let line_index = snap.analysis.file_line_index(nav.file_id)?;
let line_index = snap.file_line_index(nav.file_id)?;
let range = range(&line_index, nav.full_range);
let loc = lsp_types::Location::new(url, range);
Ok(loc)
@ -590,7 +589,7 @@ pub(crate) fn location_link(
) -> Result<lsp_types::LocationLink> {
let origin_selection_range = match src {
Some(src) => {
let line_index = snap.analysis.file_line_index(src.file_id)?;
let line_index = snap.file_line_index(src.file_id)?;
let range = range(&line_index, src.range);
Some(range)
}
@ -610,7 +609,7 @@ fn location_info(
snap: &GlobalStateSnapshot,
target: NavigationTarget,
) -> Result<(lsp_types::Url, lsp_types::Range, lsp_types::Range)> {
let line_index = snap.analysis.file_line_index(target.file_id)?;
let line_index = snap.file_line_index(target.file_id)?;
let target_uri = url(snap, target.file_id);
let target_range = range(&line_index, target.full_range);
@ -648,12 +647,8 @@ pub(crate) fn snippet_text_document_edit(
edit: TextEdit,
) -> Result<lsp_ext::SnippetTextDocumentEdit> {
let text_document = optional_versioned_text_document_identifier(snap, file_id);
let line_index = snap.analysis.file_line_index(file_id)?;
let line_endings = snap.file_line_endings(file_id);
let edits = edit
.into_iter()
.map(|it| snippet_text_edit(&line_index, line_endings, is_snippet, it))
.collect();
let line_index = snap.file_line_index(file_id)?;
let edits = edit.into_iter().map(|it| snippet_text_edit(&line_index, is_snippet, it)).collect();
Ok(lsp_ext::SnippetTextDocumentEdit { text_document, edits })
}
@ -674,9 +669,8 @@ pub(crate) fn snippet_text_document_ops(
if !initial_contents.is_empty() {
let text_document =
lsp_types::OptionalVersionedTextDocumentIdentifier { uri, version: None };
let range = range(&LineIndex::new(""), TextRange::empty(TextSize::from(0)));
let text_edit = lsp_ext::SnippetTextEdit {
range,
range: lsp_types::Range::default(),
new_text: initial_contents,
insert_text_format: Some(lsp_types::InsertTextFormat::PlainText),
};
@ -867,7 +861,7 @@ pub(crate) fn code_lens(
) -> Result<lsp_types::CodeLens> {
match annotation.kind {
AnnotationKind::Runnable { debug, runnable: run } => {
let line_index = snap.analysis.file_line_index(run.nav.file_id)?;
let line_index = snap.file_line_index(run.nav.file_id)?;
let annotation_range = range(&line_index, annotation.range);
let action = run.action();
@ -883,7 +877,7 @@ pub(crate) fn code_lens(
Ok(lsp_types::CodeLens { range: annotation_range, command: Some(command), data: None })
}
AnnotationKind::HasImpls { position: file_position, data } => {
let line_index = snap.analysis.file_line_index(file_position.file_id)?;
let line_index = snap.file_line_index(file_position.file_id)?;
let annotation_range = range(&line_index, annotation.range);
let url = url(snap, file_position.file_id);
@ -926,7 +920,7 @@ pub(crate) fn code_lens(
})
}
AnnotationKind::HasReferences { position: file_position, data } => {
let line_index = snap.analysis.file_line_index(file_position.file_id)?;
let line_index = snap.file_line_index(file_position.file_id)?;
let annotation_range = range(&line_index, annotation.range);
let url = url(snap, file_position.file_id);
@ -1060,6 +1054,8 @@ pub(crate) fn rename_error(err: RenameError) -> crate::LspError {
#[cfg(test)]
mod tests {
use std::sync::Arc;
use hir::PrefixKind;
use ide::Analysis;
use ide_db::helpers::{insert_use::InsertUseConfig, SnippetCap};
@ -1077,7 +1073,11 @@ mod tests {
}"#;
let (offset, text) = test_utils::extract_offset(fixture);
let line_index = LineIndex::new(&text);
let line_index = LineIndex {
index: Arc::new(ide::LineIndex::new(&text)),
endings: LineEndings::Unix,
encoding: OffsetEncoding::Utf16,
};
let (analysis, file_id) = Analysis::from_single_file(text);
let completions: Vec<(String, Option<String>)> = analysis
.completions(
@ -1095,7 +1095,7 @@ mod tests {
.unwrap()
.into_iter()
.filter(|c| c.label().ends_with("arg"))
.map(|c| completion_item(&line_index, LineEndings::Unix, c))
.map(|c| completion_item(&line_index, c))
.flat_map(|comps| comps.into_iter().map(|c| (c.label, c.sort_text)))
.collect();
expect_test::expect![[r#"
@ -1133,7 +1133,11 @@ fn main() {
let folds = analysis.folding_ranges(file_id).unwrap();
assert_eq!(folds.len(), 4);
let line_index = LineIndex::new(&text);
let line_index = LineIndex {
index: Arc::new(ide::LineIndex::new(&text)),
endings: LineEndings::Unix,
encoding: OffsetEncoding::Utf16,
};
let converted: Vec<lsp_types::FoldingRange> =
folds.into_iter().map(|it| folding_range(&text, &line_index, true, it)).collect();

View File

@ -1,5 +1,5 @@
<!---
lsp_ext.rs hash: 34aec6bfeaeb97a
lsp_ext.rs hash: d279d971d4f62cd7
If you need to change the above hash to make the test pass, please check if you
need to adjust this doc as well and ping this issue:
@ -19,6 +19,12 @@ Requests, which are likely to always remain specific to `rust-analyzer` are unde
If you want to be notified about the changes to this document, subscribe to [#4604](https://github.com/rust-analyzer/rust-analyzer/issues/4604).
## UTF-8 offsets
rust-analyzer supports clangd's extension for opting into UTF-8 as the coordinate space for offsets (by default, LSP uses UTF-16 offsets).
https://clangd.llvm.org/extensions.html#utf-8-offsets
## `initializationOptions`
For `initializationOptions`, `rust-analyzer` expects `"rust-analyzer"` section of the configuration.