2020-02-24 21:17:20 -05:00
//! Semantic Tokens helpers
2020-02-26 19:39:32 +01:00
use std ::ops ;
2020-07-24 17:55:17 -04:00
use lsp_types ::{
Range , SemanticToken , SemanticTokenModifier , SemanticTokenType , SemanticTokens ,
SemanticTokensEdit ,
} ;
2020-02-14 17:56:28 -05:00
2020-04-28 17:14:05 +02:00
macro_rules ! define_semantic_token_types {
2022-08-23 18:06:32 +02:00
(
standard {
$( $standard :ident ) , * $(, ) ?
}
custom {
2023-05-11 09:55:30 +02:00
$( ( $custom :ident , $string :literal ) $( = > $fallback :ident ) ? ) , * $(, ) ?
2022-08-23 18:06:32 +02:00
}
) = > {
$( pub ( crate ) const $standard : SemanticTokenType = SemanticTokenType ::$standard ; ) *
$( pub ( crate ) const $custom : SemanticTokenType = SemanticTokenType ::new ( $string ) ; ) *
2020-02-26 19:39:32 +01:00
2020-04-28 17:14:05 +02:00
pub ( crate ) const SUPPORTED_TYPES : & [ SemanticTokenType ] = & [
2022-08-23 18:06:32 +02:00
$( SemanticTokenType ::$standard , ) *
$( $custom ) , *
2020-04-28 17:14:05 +02:00
] ;
2023-05-11 09:55:30 +02:00
pub ( crate ) fn standard_fallback_type ( token : SemanticTokenType ) -> Option < SemanticTokenType > {
$(
if token = = $custom {
None $( . or ( Some ( SemanticTokenType ::$fallback ) ) ) ?
} else
) *
{ Some ( token ) }
}
2020-04-28 17:14:05 +02:00
} ;
}
2020-02-26 19:39:32 +01:00
2020-04-28 17:14:05 +02:00
define_semantic_token_types! [
2022-08-23 18:06:32 +02:00
standard {
COMMENT ,
DECORATOR ,
ENUM_MEMBER ,
ENUM ,
FUNCTION ,
INTERFACE ,
KEYWORD ,
MACRO ,
METHOD ,
NAMESPACE ,
NUMBER ,
OPERATOR ,
PARAMETER ,
PROPERTY ,
STRING ,
STRUCT ,
TYPE_PARAMETER ,
VARIABLE ,
}
custom {
( ANGLE , " angle " ) ,
2023-05-11 09:55:30 +02:00
( ARITHMETIC , " arithmetic " ) = > OPERATOR ,
( ATTRIBUTE , " attribute " ) = > DECORATOR ,
( ATTRIBUTE_BRACKET , " attributeBracket " ) = > DECORATOR ,
( BITWISE , " bitwise " ) = > OPERATOR ,
2022-08-23 18:06:32 +02:00
( BOOLEAN , " boolean " ) ,
( BRACE , " brace " ) ,
( BRACKET , " bracket " ) ,
2023-05-11 09:55:30 +02:00
( BUILTIN_ATTRIBUTE , " builtinAttribute " ) = > DECORATOR ,
2022-08-23 18:06:32 +02:00
( BUILTIN_TYPE , " builtinType " ) ,
2023-05-11 09:55:30 +02:00
( CHAR , " character " ) = > STRING ,
2022-08-23 18:06:32 +02:00
( COLON , " colon " ) ,
( COMMA , " comma " ) ,
2023-05-11 09:55:30 +02:00
( COMPARISON , " comparison " ) = > OPERATOR ,
2022-08-23 18:06:32 +02:00
( CONST_PARAMETER , " constParameter " ) ,
2023-05-11 09:55:30 +02:00
( DERIVE , " derive " ) = > DECORATOR ,
( DERIVE_HELPER , " deriveHelper " ) = > DECORATOR ,
2022-08-23 18:06:32 +02:00
( DOT , " dot " ) ,
2023-05-11 09:55:30 +02:00
( ESCAPE_SEQUENCE , " escapeSequence " ) = > STRING ,
( FORMAT_SPECIFIER , " formatSpecifier " ) = > STRING ,
( GENERIC , " generic " ) = > TYPE_PARAMETER ,
2022-08-23 18:06:32 +02:00
( LABEL , " label " ) ,
( LIFETIME , " lifetime " ) ,
2023-05-11 09:55:30 +02:00
( LOGICAL , " logical " ) = > OPERATOR ,
( MACRO_BANG , " macroBang " ) = > MACRO ,
2022-08-23 18:06:32 +02:00
( PARENTHESIS , " parenthesis " ) ,
( PUNCTUATION , " punctuation " ) ,
2023-05-11 09:55:30 +02:00
( SELF_KEYWORD , " selfKeyword " ) = > KEYWORD ,
( SELF_TYPE_KEYWORD , " selfTypeKeyword " ) = > KEYWORD ,
2022-08-23 18:06:32 +02:00
( SEMICOLON , " semicolon " ) ,
( TYPE_ALIAS , " typeAlias " ) ,
2023-05-11 09:55:30 +02:00
( TOOL_MODULE , " toolModule " ) = > DECORATOR ,
2022-08-23 18:06:32 +02:00
( UNION , " union " ) ,
( UNRESOLVED_REFERENCE , " unresolvedReference " ) ,
}
2020-02-14 17:56:28 -05:00
] ;
2023-05-11 09:55:30 +02:00
macro_rules ! count_tts {
( ) = > { 0 usize } ;
( $_head :tt $( $tail :tt ) * ) = > { 1 usize + count_tts! ( $( $tail ) * ) } ;
}
2020-04-28 17:14:05 +02:00
macro_rules ! define_semantic_token_modifiers {
2022-08-23 18:06:32 +02:00
(
standard {
$( $standard :ident ) , * $(, ) ?
}
custom {
$( ( $custom :ident , $string :literal ) ) , * $(, ) ?
}
) = > {
$( pub ( crate ) const $standard : SemanticTokenModifier = SemanticTokenModifier ::$standard ; ) *
$( pub ( crate ) const $custom : SemanticTokenModifier = SemanticTokenModifier ::new ( $string ) ; ) *
2020-04-28 17:14:05 +02:00
pub ( crate ) const SUPPORTED_MODIFIERS : & [ SemanticTokenModifier ] = & [
2022-08-23 18:06:32 +02:00
$( SemanticTokenModifier ::$standard , ) *
$( $custom ) , *
2020-04-28 17:14:05 +02:00
] ;
2023-05-11 09:55:30 +02:00
const LAST_STANDARD_MOD : usize = count_tts! ( $( $standard ) * ) ;
2020-04-28 17:14:05 +02:00
} ;
}
define_semantic_token_modifiers! [
2022-08-23 18:06:32 +02:00
standard {
DOCUMENTATION ,
DECLARATION ,
STATIC ,
DEFAULT_LIBRARY ,
}
custom {
( ASYNC , " async " ) ,
( ATTRIBUTE_MODIFIER , " attribute " ) ,
( CALLABLE , " callable " ) ,
( CONSTANT , " constant " ) ,
( CONSUMING , " consuming " ) ,
( CONTROL_FLOW , " controlFlow " ) ,
( CRATE_ROOT , " crateRoot " ) ,
( INJECTED , " injected " ) ,
( INTRA_DOC_LINK , " intraDocLink " ) ,
( LIBRARY , " library " ) ,
2023-05-13 11:43:39 +02:00
( MACRO_MODIFIER , " macro " ) ,
2022-08-23 18:06:32 +02:00
( MUTABLE , " mutable " ) ,
( PUBLIC , " public " ) ,
( REFERENCE , " reference " ) ,
( TRAIT_MODIFIER , " trait " ) ,
( UNSAFE , " unsafe " ) ,
}
2020-02-14 17:56:28 -05:00
] ;
2020-02-26 19:39:32 +01:00
#[ derive(Default) ]
pub ( crate ) struct ModifierSet ( pub ( crate ) u32 ) ;
2020-02-14 17:56:28 -05:00
2023-05-11 09:55:30 +02:00
impl ModifierSet {
pub ( crate ) fn standard_fallback ( & mut self ) {
// Remove all non standard modifiers
self . 0 = self . 0 & ! ( ! 0 u32 < < LAST_STANDARD_MOD )
}
}
2020-02-26 19:39:32 +01:00
impl ops ::BitOrAssign < SemanticTokenModifier > for ModifierSet {
fn bitor_assign ( & mut self , rhs : SemanticTokenModifier ) {
let idx = SUPPORTED_MODIFIERS . iter ( ) . position ( | it | it = = & rhs ) . unwrap ( ) ;
self . 0 | = 1 < < idx ;
}
2020-02-14 17:56:28 -05:00
}
2020-02-24 21:17:20 -05:00
/// Tokens are encoded relative to each other.
2020-02-25 12:42:44 +01:00
///
2021-06-13 21:57:10 -07:00
/// This is a direct port of <https://github.com/microsoft/vscode-languageserver-node/blob/f425af9de46a0187adb78ec8a46b9b2ce80c5412/server/src/sematicTokens.proposed.ts#L45>
2020-02-14 17:56:28 -05:00
pub ( crate ) struct SemanticTokensBuilder {
2020-07-24 17:55:17 -04:00
id : String ,
2020-02-14 17:56:28 -05:00
prev_line : u32 ,
prev_char : u32 ,
data : Vec < SemanticToken > ,
}
impl SemanticTokensBuilder {
2020-11-02 16:31:38 +01:00
pub ( crate ) fn new ( id : String ) -> Self {
2020-07-24 17:55:17 -04:00
SemanticTokensBuilder { id , prev_line : 0 , prev_char : 0 , data : Default ::default ( ) }
}
2020-02-24 21:17:20 -05:00
/// Push a new token onto the builder
2020-11-02 16:31:38 +01:00
pub ( crate ) fn push ( & mut self , range : Range , token_index : u32 , modifier_bitset : u32 ) {
2022-12-30 10:08:07 +00:00
let mut push_line = range . start . line ;
let mut push_char = range . start . character ;
2020-02-14 17:56:28 -05:00
if ! self . data . is_empty ( ) {
push_line - = self . prev_line ;
if push_line = = 0 {
push_char - = self . prev_char ;
}
}
// A token cannot be multiline
let token_len = range . end . character - range . start . character ;
let token = SemanticToken {
delta_line : push_line ,
delta_start : push_char ,
2022-12-30 10:08:07 +00:00
length : token_len ,
2020-02-14 17:56:28 -05:00
token_type : token_index ,
token_modifiers_bitset : modifier_bitset ,
} ;
self . data . push ( token ) ;
2022-12-30 10:08:07 +00:00
self . prev_line = range . start . line ;
self . prev_char = range . start . character ;
2020-02-14 17:56:28 -05:00
}
2020-11-02 16:31:38 +01:00
pub ( crate ) fn build ( self ) -> SemanticTokens {
2020-07-24 17:55:17 -04:00
SemanticTokens { result_id : Some ( self . id ) , data : self . data }
}
}
2020-11-02 16:31:38 +01:00
pub ( crate ) fn diff_tokens ( old : & [ SemanticToken ] , new : & [ SemanticToken ] ) -> Vec < SemanticTokensEdit > {
2020-07-24 17:55:17 -04:00
let offset = new . iter ( ) . zip ( old . iter ( ) ) . take_while ( | & ( n , p ) | n = = p ) . count ( ) ;
let ( _ , old ) = old . split_at ( offset ) ;
let ( _ , new ) = new . split_at ( offset ) ;
let offset_from_end =
new . iter ( ) . rev ( ) . zip ( old . iter ( ) . rev ( ) ) . take_while ( | & ( n , p ) | n = = p ) . count ( ) ;
let ( old , _ ) = old . split_at ( old . len ( ) - offset_from_end ) ;
let ( new , _ ) = new . split_at ( new . len ( ) - offset_from_end ) ;
if old . is_empty ( ) & & new . is_empty ( ) {
vec! [ ]
} else {
// The lsp data field is actually a byte-diff but we
// travel in tokens so `start` and `delete_count` are in multiples of the
// serialized size of `SemanticToken`.
vec! [ SemanticTokensEdit {
start : 5 * offset as u32 ,
delete_count : 5 * old . len ( ) as u32 ,
data : Some ( new . into ( ) ) ,
} ]
2020-02-14 17:56:28 -05:00
}
}
2020-02-26 19:39:32 +01:00
2021-05-17 18:37:06 +03:00
pub ( crate ) fn type_index ( ty : SemanticTokenType ) -> u32 {
SUPPORTED_TYPES . iter ( ) . position ( | it | * it = = ty ) . unwrap ( ) as u32
2020-02-26 19:39:32 +01:00
}
2020-07-24 17:55:17 -04:00
#[ cfg(test) ]
mod tests {
use super ::* ;
fn from ( t : ( u32 , u32 , u32 , u32 , u32 ) ) -> SemanticToken {
SemanticToken {
delta_line : t . 0 ,
delta_start : t . 1 ,
length : t . 2 ,
token_type : t . 3 ,
token_modifiers_bitset : t . 4 ,
}
}
#[ test ]
fn test_diff_insert_at_end ( ) {
let before = [ from ( ( 1 , 2 , 3 , 4 , 5 ) ) , from ( ( 6 , 7 , 8 , 9 , 10 ) ) ] ;
let after = [ from ( ( 1 , 2 , 3 , 4 , 5 ) ) , from ( ( 6 , 7 , 8 , 9 , 10 ) ) , from ( ( 11 , 12 , 13 , 14 , 15 ) ) ] ;
let edits = diff_tokens ( & before , & after ) ;
assert_eq! (
edits [ 0 ] ,
SemanticTokensEdit {
start : 10 ,
delete_count : 0 ,
data : Some ( vec! [ from ( ( 11 , 12 , 13 , 14 , 15 ) ) ] )
}
) ;
}
#[ test ]
fn test_diff_insert_at_beginning ( ) {
let before = [ from ( ( 1 , 2 , 3 , 4 , 5 ) ) , from ( ( 6 , 7 , 8 , 9 , 10 ) ) ] ;
let after = [ from ( ( 11 , 12 , 13 , 14 , 15 ) ) , from ( ( 1 , 2 , 3 , 4 , 5 ) ) , from ( ( 6 , 7 , 8 , 9 , 10 ) ) ] ;
let edits = diff_tokens ( & before , & after ) ;
assert_eq! (
edits [ 0 ] ,
SemanticTokensEdit {
start : 0 ,
delete_count : 0 ,
data : Some ( vec! [ from ( ( 11 , 12 , 13 , 14 , 15 ) ) ] )
}
) ;
}
#[ test ]
fn test_diff_insert_in_middle ( ) {
let before = [ from ( ( 1 , 2 , 3 , 4 , 5 ) ) , from ( ( 6 , 7 , 8 , 9 , 10 ) ) ] ;
let after = [
from ( ( 1 , 2 , 3 , 4 , 5 ) ) ,
from ( ( 10 , 20 , 30 , 40 , 50 ) ) ,
from ( ( 60 , 70 , 80 , 90 , 100 ) ) ,
from ( ( 6 , 7 , 8 , 9 , 10 ) ) ,
] ;
let edits = diff_tokens ( & before , & after ) ;
assert_eq! (
edits [ 0 ] ,
SemanticTokensEdit {
start : 5 ,
delete_count : 0 ,
data : Some ( vec! [ from ( ( 10 , 20 , 30 , 40 , 50 ) ) , from ( ( 60 , 70 , 80 , 90 , 100 ) ) ] )
}
) ;
}
#[ test ]
fn test_diff_remove_from_end ( ) {
let before = [ from ( ( 1 , 2 , 3 , 4 , 5 ) ) , from ( ( 6 , 7 , 8 , 9 , 10 ) ) , from ( ( 11 , 12 , 13 , 14 , 15 ) ) ] ;
let after = [ from ( ( 1 , 2 , 3 , 4 , 5 ) ) , from ( ( 6 , 7 , 8 , 9 , 10 ) ) ] ;
let edits = diff_tokens ( & before , & after ) ;
assert_eq! ( edits [ 0 ] , SemanticTokensEdit { start : 10 , delete_count : 5 , data : Some ( vec! [ ] ) } ) ;
}
#[ test ]
fn test_diff_remove_from_beginning ( ) {
let before = [ from ( ( 11 , 12 , 13 , 14 , 15 ) ) , from ( ( 1 , 2 , 3 , 4 , 5 ) ) , from ( ( 6 , 7 , 8 , 9 , 10 ) ) ] ;
let after = [ from ( ( 1 , 2 , 3 , 4 , 5 ) ) , from ( ( 6 , 7 , 8 , 9 , 10 ) ) ] ;
let edits = diff_tokens ( & before , & after ) ;
assert_eq! ( edits [ 0 ] , SemanticTokensEdit { start : 0 , delete_count : 5 , data : Some ( vec! [ ] ) } ) ;
}
#[ test ]
fn test_diff_remove_from_middle ( ) {
let before = [
from ( ( 1 , 2 , 3 , 4 , 5 ) ) ,
from ( ( 10 , 20 , 30 , 40 , 50 ) ) ,
from ( ( 60 , 70 , 80 , 90 , 100 ) ) ,
from ( ( 6 , 7 , 8 , 9 , 10 ) ) ,
] ;
let after = [ from ( ( 1 , 2 , 3 , 4 , 5 ) ) , from ( ( 6 , 7 , 8 , 9 , 10 ) ) ] ;
let edits = diff_tokens ( & before , & after ) ;
assert_eq! ( edits [ 0 ] , SemanticTokensEdit { start : 5 , delete_count : 10 , data : Some ( vec! [ ] ) } ) ;
}
}