Switch to upstream positionEncoding
This commit is contained in:
parent
53b6d69e93
commit
956b96a19d
4
Cargo.lock
generated
4
Cargo.lock
generated
@ -872,9 +872,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "lsp-types"
|
||||
version = "0.93.1"
|
||||
version = "0.93.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a3bcfee315dde785ba887edb540b08765fd7df75a7d948844be6bf5712246734"
|
||||
checksum = "9be6e9c7e2d18f651974370d7aff703f9513e0df6e464fd795660edc77e6ca51"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"serde",
|
||||
|
@ -23,7 +23,7 @@ crossbeam-channel = "0.5.5"
|
||||
dissimilar = "1.0.4"
|
||||
itertools = "0.10.5"
|
||||
scip = "0.1.1"
|
||||
lsp-types = { version = "0.93.1", features = ["proposed"] }
|
||||
lsp-types = { version = "0.93.2", features = ["proposed"] }
|
||||
parking_lot = "0.12.1"
|
||||
xflags = "0.3.0"
|
||||
oorandom = "11.1.3"
|
||||
|
@ -11,7 +11,7 @@
|
||||
|
||||
use lsp_server::Connection;
|
||||
use project_model::ProjectManifest;
|
||||
use rust_analyzer::{cli::flags, config::Config, from_json, lsp_ext::supports_utf8, Result};
|
||||
use rust_analyzer::{cli::flags, config::Config, from_json, Result};
|
||||
use vfs::AbsPathBuf;
|
||||
|
||||
#[cfg(all(feature = "mimalloc"))]
|
||||
@ -191,11 +191,7 @@ fn run_server() -> Result<()> {
|
||||
name: String::from("rust-analyzer"),
|
||||
version: Some(rust_analyzer::version().to_string()),
|
||||
}),
|
||||
offset_encoding: if supports_utf8(config.caps()) {
|
||||
Some("utf-8".to_string())
|
||||
} else {
|
||||
None
|
||||
},
|
||||
offset_encoding: None,
|
||||
};
|
||||
|
||||
let initialize_result = serde_json::to_value(initialize_result).unwrap();
|
||||
|
@ -6,19 +6,25 @@
|
||||
FileOperationFilter, FileOperationPattern, FileOperationPatternKind,
|
||||
FileOperationRegistrationOptions, FoldingRangeProviderCapability, HoverProviderCapability,
|
||||
ImplementationProviderCapability, InlayHintOptions, InlayHintServerCapabilities, OneOf,
|
||||
RenameOptions, SaveOptions, SelectionRangeProviderCapability, SemanticTokensFullOptions,
|
||||
SemanticTokensLegend, SemanticTokensOptions, ServerCapabilities, SignatureHelpOptions,
|
||||
TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions,
|
||||
TypeDefinitionProviderCapability, WorkDoneProgressOptions,
|
||||
PositionEncodingKind, RenameOptions, SaveOptions, SelectionRangeProviderCapability,
|
||||
SemanticTokensFullOptions, SemanticTokensLegend, SemanticTokensOptions, ServerCapabilities,
|
||||
SignatureHelpOptions, TextDocumentSyncCapability, TextDocumentSyncKind,
|
||||
TextDocumentSyncOptions, TypeDefinitionProviderCapability, WorkDoneProgressOptions,
|
||||
WorkspaceFileOperationsServerCapabilities, WorkspaceServerCapabilities,
|
||||
};
|
||||
use serde_json::json;
|
||||
|
||||
use crate::config::{Config, RustfmtConfig};
|
||||
use crate::lsp_ext::supports_utf8;
|
||||
use crate::semantic_tokens;
|
||||
|
||||
pub fn server_capabilities(config: &Config) -> ServerCapabilities {
|
||||
ServerCapabilities {
|
||||
position_encoding: if supports_utf8(config.caps()) {
|
||||
Some(PositionEncodingKind::UTF8)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions {
|
||||
open_close: Some(true),
|
||||
change: Some(TextDocumentSyncKind::INCREMENTAL),
|
||||
|
@ -20,7 +20,7 @@
|
||||
load_cargo::{load_workspace, LoadCargoConfig},
|
||||
Result,
|
||||
};
|
||||
use crate::line_index::{LineEndings, LineIndex, OffsetEncoding};
|
||||
use crate::line_index::{LineEndings, LineIndex, PositionEncoding};
|
||||
use crate::to_proto;
|
||||
use crate::version::version;
|
||||
|
||||
@ -126,7 +126,7 @@ fn get_range_id(&mut self, id: FileRange) -> Id {
|
||||
let line_index = self.db.line_index(file_id);
|
||||
let line_index = LineIndex {
|
||||
index: line_index,
|
||||
encoding: OffsetEncoding::Utf16,
|
||||
encoding: PositionEncoding::Utf16,
|
||||
endings: LineEndings::Unix,
|
||||
};
|
||||
let range_id = self.add_vertex(lsif::Vertex::Range {
|
||||
@ -248,7 +248,7 @@ fn add_file(&mut self, file: StaticIndexedFile) {
|
||||
let line_index = self.db.line_index(file_id);
|
||||
let line_index = LineIndex {
|
||||
index: line_index,
|
||||
encoding: OffsetEncoding::Utf16,
|
||||
encoding: PositionEncoding::Utf16,
|
||||
endings: LineEndings::Unix,
|
||||
};
|
||||
let result = folds
|
||||
|
@ -5,7 +5,7 @@
|
||||
time::Instant,
|
||||
};
|
||||
|
||||
use crate::line_index::{LineEndings, LineIndex, OffsetEncoding};
|
||||
use crate::line_index::{LineEndings, LineIndex, PositionEncoding};
|
||||
use hir::Name;
|
||||
use ide::{
|
||||
LineCol, MonikerDescriptorKind, StaticIndex, StaticIndexedFile, TextRange, TokenId,
|
||||
@ -91,7 +91,7 @@ pub fn run(self) -> Result<()> {
|
||||
|
||||
let line_index = LineIndex {
|
||||
index: db.line_index(file_id),
|
||||
encoding: OffsetEncoding::Utf8,
|
||||
encoding: PositionEncoding::Utf8,
|
||||
endings: LineEndings::Unix,
|
||||
};
|
||||
|
||||
|
@ -32,7 +32,7 @@
|
||||
use crate::{
|
||||
caps::completion_item_edit_resolve,
|
||||
diagnostics::DiagnosticsMapConfig,
|
||||
line_index::OffsetEncoding,
|
||||
line_index::PositionEncoding,
|
||||
lsp_ext::{self, supports_utf8, WorkspaceSymbolSearchKind, WorkspaceSymbolSearchScope},
|
||||
};
|
||||
|
||||
@ -948,11 +948,11 @@ pub fn completion_label_details_support(&self) -> bool {
|
||||
.is_some()
|
||||
}
|
||||
|
||||
pub fn offset_encoding(&self) -> OffsetEncoding {
|
||||
pub fn position_encoding(&self) -> PositionEncoding {
|
||||
if supports_utf8(&self.caps) {
|
||||
OffsetEncoding::Utf8
|
||||
PositionEncoding::Utf8
|
||||
} else {
|
||||
OffsetEncoding::Utf16
|
||||
PositionEncoding::Utf16
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -8,7 +8,7 @@
|
||||
use vfs::{AbsPath, AbsPathBuf};
|
||||
|
||||
use crate::{
|
||||
global_state::GlobalStateSnapshot, line_index::OffsetEncoding, lsp_ext,
|
||||
global_state::GlobalStateSnapshot, line_index::PositionEncoding, lsp_ext,
|
||||
to_proto::url_from_abs_path,
|
||||
};
|
||||
|
||||
@ -66,17 +66,17 @@ fn location(
|
||||
let uri = url_from_abs_path(&file_name);
|
||||
|
||||
let range = {
|
||||
let offset_encoding = snap.config.offset_encoding();
|
||||
let position_encoding = snap.config.position_encoding();
|
||||
lsp_types::Range::new(
|
||||
position(&offset_encoding, span, span.line_start, span.column_start),
|
||||
position(&offset_encoding, span, span.line_end, span.column_end),
|
||||
position(&position_encoding, span, span.line_start, span.column_start),
|
||||
position(&position_encoding, span, span.line_end, span.column_end),
|
||||
)
|
||||
};
|
||||
lsp_types::Location::new(uri, range)
|
||||
}
|
||||
|
||||
fn position(
|
||||
offset_encoding: &OffsetEncoding,
|
||||
position_encoding: &PositionEncoding,
|
||||
span: &DiagnosticSpan,
|
||||
line_offset: usize,
|
||||
column_offset: usize,
|
||||
@ -93,9 +93,9 @@ fn position(
|
||||
};
|
||||
}
|
||||
let mut char_offset = 0;
|
||||
let len_func = match offset_encoding {
|
||||
OffsetEncoding::Utf8 => char::len_utf8,
|
||||
OffsetEncoding::Utf16 => char::len_utf16,
|
||||
let len_func = match position_encoding {
|
||||
PositionEncoding::Utf8 => char::len_utf8,
|
||||
PositionEncoding::Utf16 => char::len_utf16,
|
||||
};
|
||||
for c in line.text.chars() {
|
||||
char_offset += 1;
|
||||
|
@ -8,7 +8,7 @@
|
||||
use crate::{
|
||||
from_json,
|
||||
global_state::GlobalStateSnapshot,
|
||||
line_index::{LineIndex, OffsetEncoding},
|
||||
line_index::{LineIndex, PositionEncoding},
|
||||
lsp_ext,
|
||||
lsp_utils::invalid_params_error,
|
||||
Result,
|
||||
@ -25,10 +25,10 @@ pub(crate) fn vfs_path(url: &lsp_types::Url) -> Result<vfs::VfsPath> {
|
||||
|
||||
pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> Result<TextSize> {
|
||||
let line_col = match line_index.encoding {
|
||||
OffsetEncoding::Utf8 => {
|
||||
PositionEncoding::Utf8 => {
|
||||
LineCol { line: position.line as u32, col: position.character as u32 }
|
||||
}
|
||||
OffsetEncoding::Utf16 => {
|
||||
PositionEncoding::Utf16 => {
|
||||
let line_col =
|
||||
LineColUtf16 { line: position.line as u32, col: position.character as u32 };
|
||||
line_index.index.to_utf8(line_col)
|
||||
|
@ -383,7 +383,7 @@ pub(crate) fn file_id_to_url(&self, id: FileId) -> Url {
|
||||
pub(crate) fn file_line_index(&self, file_id: FileId) -> Cancellable<LineIndex> {
|
||||
let endings = self.vfs.read().1[&file_id];
|
||||
let index = self.analysis.file_line_index(file_id)?;
|
||||
let res = LineIndex { index, endings, encoding: self.config.offset_encoding() };
|
||||
let res = LineIndex { index, endings, encoding: self.config.position_encoding() };
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
|
@ -7,7 +7,7 @@
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
pub enum OffsetEncoding {
|
||||
pub enum PositionEncoding {
|
||||
Utf8,
|
||||
Utf16,
|
||||
}
|
||||
@ -15,7 +15,7 @@ pub enum OffsetEncoding {
|
||||
pub(crate) struct LineIndex {
|
||||
pub(crate) index: Arc<ide::LineIndex>,
|
||||
pub(crate) endings: LineEndings,
|
||||
pub(crate) encoding: OffsetEncoding,
|
||||
pub(crate) encoding: PositionEncoding,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
|
@ -3,6 +3,7 @@
|
||||
use std::{collections::HashMap, path::PathBuf};
|
||||
|
||||
use lsp_types::request::Request;
|
||||
use lsp_types::PositionEncodingKind;
|
||||
use lsp_types::{
|
||||
notification::Notification, CodeActionKind, DocumentOnTypeFormattingParams,
|
||||
PartialResultParams, Position, Range, TextDocumentIdentifier, WorkDoneProgressParams,
|
||||
@ -455,7 +456,15 @@ pub(crate) enum CodeLensResolveData {
|
||||
}
|
||||
|
||||
pub fn supports_utf8(caps: &lsp_types::ClientCapabilities) -> bool {
|
||||
caps.offset_encoding.as_deref().unwrap_or_default().iter().any(|it| it == "utf-8")
|
||||
match &caps.general {
|
||||
Some(general) => general
|
||||
.position_encodings
|
||||
.as_deref()
|
||||
.unwrap_or_default()
|
||||
.iter()
|
||||
.any(|it| it == &PositionEncodingKind::UTF8),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub enum MoveItem {}
|
||||
|
@ -6,7 +6,7 @@
|
||||
use crate::{
|
||||
from_proto,
|
||||
global_state::GlobalState,
|
||||
line_index::{LineEndings, LineIndex, OffsetEncoding},
|
||||
line_index::{LineEndings, LineIndex, PositionEncoding},
|
||||
LspError,
|
||||
};
|
||||
|
||||
@ -140,7 +140,7 @@ pub(crate) fn apply_document_changes(
|
||||
index: Arc::new(ide::LineIndex::new(old_text)),
|
||||
// We don't care about line endings or offset encoding here.
|
||||
endings: LineEndings::Unix,
|
||||
encoding: OffsetEncoding::Utf16,
|
||||
encoding: PositionEncoding::Utf16,
|
||||
};
|
||||
|
||||
// The changes we got must be applied sequentially, but can cross lines so we
|
||||
|
@ -21,7 +21,7 @@
|
||||
cargo_target_spec::CargoTargetSpec,
|
||||
config::{CallInfoConfig, Config},
|
||||
global_state::GlobalStateSnapshot,
|
||||
line_index::{LineEndings, LineIndex, OffsetEncoding},
|
||||
line_index::{LineEndings, LineIndex, PositionEncoding},
|
||||
lsp_ext,
|
||||
lsp_utils::invalid_params_error,
|
||||
semantic_tokens, Result,
|
||||
@ -30,8 +30,8 @@
|
||||
pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::Position {
|
||||
let line_col = line_index.index.line_col(offset);
|
||||
match line_index.encoding {
|
||||
OffsetEncoding::Utf8 => lsp_types::Position::new(line_col.line, line_col.col),
|
||||
OffsetEncoding::Utf16 => {
|
||||
PositionEncoding::Utf8 => lsp_types::Position::new(line_col.line, line_col.col),
|
||||
PositionEncoding::Utf16 => {
|
||||
let line_col = line_index.index.to_utf16(line_col);
|
||||
lsp_types::Position::new(line_col.line, line_col.col)
|
||||
}
|
||||
@ -1394,7 +1394,7 @@ fn main() {
|
||||
let line_index = LineIndex {
|
||||
index: Arc::new(ide::LineIndex::new(text)),
|
||||
endings: LineEndings::Unix,
|
||||
encoding: OffsetEncoding::Utf16,
|
||||
encoding: PositionEncoding::Utf16,
|
||||
};
|
||||
let converted: Vec<lsp_types::FoldingRange> =
|
||||
folds.into_iter().map(|it| folding_range(text, &line_index, true, it)).collect();
|
||||
|
@ -1,5 +1,5 @@
|
||||
<!---
|
||||
lsp_ext.rs hash: 7b710095d773b978
|
||||
lsp_ext.rs hash: 62068e53ac202dc8
|
||||
|
||||
If you need to change the above hash to make the test pass, please check if you
|
||||
need to adjust this doc as well and ping this issue:
|
||||
@ -19,12 +19,6 @@ Requests, which are likely to always remain specific to `rust-analyzer` are unde
|
||||
|
||||
If you want to be notified about the changes to this document, subscribe to [#4604](https://github.com/rust-lang/rust-analyzer/issues/4604).
|
||||
|
||||
## UTF-8 offsets
|
||||
|
||||
rust-analyzer supports clangd's extension for opting into UTF-8 as the coordinate space for offsets (by default, LSP uses UTF-16 offsets).
|
||||
|
||||
https://clangd.llvm.org/extensions.html#utf-8-offsets
|
||||
|
||||
## Configuration in `initializationOptions`
|
||||
|
||||
**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/567
|
||||
|
@ -13,4 +13,4 @@ serde = { version = "1.0.144", features = ["derive"] }
|
||||
crossbeam-channel = "0.5.6"
|
||||
|
||||
[dev-dependencies]
|
||||
lsp-types = "0.93.1"
|
||||
lsp-types = "0.93.2"
|
||||
|
Loading…
Reference in New Issue
Block a user