2020-02-06 05:52:32 -06:00
|
|
|
use std::iter::successors;
|
|
|
|
|
2020-02-18 11:35:10 -06:00
|
|
|
use hir::Semantics;
|
2020-02-06 05:52:32 -06:00
|
|
|
use ra_ide_db::RootDatabase;
|
2019-03-20 15:52:55 -05:00
|
|
|
use ra_syntax::{
|
2020-02-26 10:12:26 -06:00
|
|
|
algo::{self, find_covering_element, skip_trivia_token},
|
2019-04-02 02:23:18 -05:00
|
|
|
ast::{self, AstNode, AstToken},
|
2020-02-18 11:35:10 -06:00
|
|
|
Direction, NodeOrToken,
|
2019-10-27 07:18:54 -05:00
|
|
|
SyntaxKind::{self, *},
|
2020-04-24 16:40:41 -05:00
|
|
|
SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset, T,
|
2019-01-08 13:33:36 -06:00
|
|
|
};
|
|
|
|
|
2020-02-18 11:35:10 -06:00
|
|
|
use crate::FileRange;
|
2019-03-20 15:52:55 -05:00
|
|
|
|
2020-05-30 18:54:54 -05:00
|
|
|
// Feature: Extend Selection
|
|
|
|
//
|
|
|
|
// Extends the current selection to the encompassing syntactic construct
|
|
|
|
// (expression, statement, item, module, etc). It works with multiple cursors.
|
|
|
|
//
|
|
|
|
// |===
|
|
|
|
// | Editor | Shortcut
|
|
|
|
//
|
|
|
|
// | VS Code | kbd:[Ctrl+Shift+→]
|
|
|
|
// |===
|
2019-01-08 13:33:36 -06:00
|
|
|
pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange {
|
2020-02-18 11:35:10 -06:00
|
|
|
let sema = Semantics::new(db);
|
|
|
|
let src = sema.parse(frange.file_id);
|
|
|
|
try_extend_selection(&sema, src.syntax(), frange).unwrap_or(frange.range)
|
2019-03-20 15:52:55 -05:00
|
|
|
}
|
|
|
|
|
2019-12-31 12:19:59 -06:00
|
|
|
fn try_extend_selection(
|
2020-02-18 11:35:10 -06:00
|
|
|
sema: &Semantics<RootDatabase>,
|
2020-01-07 10:22:08 -06:00
|
|
|
root: &SyntaxNode,
|
|
|
|
frange: FileRange,
|
2019-12-31 12:19:59 -06:00
|
|
|
) -> Option<TextRange> {
|
2020-01-07 10:22:08 -06:00
|
|
|
let range = frange.range;
|
|
|
|
|
2019-03-20 15:52:55 -05:00
|
|
|
let string_kinds = [COMMENT, STRING, RAW_STRING, BYTE_STRING, RAW_BYTE_STRING];
|
|
|
|
let list_kinds = [
|
2019-08-23 07:55:21 -05:00
|
|
|
RECORD_FIELD_PAT_LIST,
|
2019-03-20 15:52:55 -05:00
|
|
|
MATCH_ARM_LIST,
|
2020-07-30 09:49:13 -05:00
|
|
|
RECORD_FIELD_LIST,
|
|
|
|
TUPLE_FIELD_LIST,
|
2020-07-30 09:21:30 -05:00
|
|
|
RECORD_EXPR_FIELD_LIST,
|
2020-07-30 10:56:53 -05:00
|
|
|
VARIANT_LIST,
|
2019-03-20 15:52:55 -05:00
|
|
|
USE_TREE_LIST,
|
2020-07-30 08:36:21 -05:00
|
|
|
GENERIC_PARAM_LIST,
|
2020-07-31 11:29:29 -05:00
|
|
|
GENERIC_ARG_LIST,
|
2019-10-27 07:18:54 -05:00
|
|
|
TYPE_BOUND_LIST,
|
2019-03-20 15:52:55 -05:00
|
|
|
PARAM_LIST,
|
|
|
|
ARG_LIST,
|
|
|
|
ARRAY_EXPR,
|
2019-10-09 03:26:09 -05:00
|
|
|
TUPLE_EXPR,
|
2019-12-10 21:18:05 -06:00
|
|
|
TUPLE_TYPE,
|
2020-02-19 06:59:19 -06:00
|
|
|
TUPLE_PAT,
|
2019-10-27 07:18:54 -05:00
|
|
|
WHERE_CLAUSE,
|
2019-03-20 15:52:55 -05:00
|
|
|
];
|
|
|
|
|
|
|
|
if range.is_empty() {
|
|
|
|
let offset = range.start();
|
2020-01-07 10:22:08 -06:00
|
|
|
let mut leaves = root.token_at_offset(offset);
|
2019-03-20 15:52:55 -05:00
|
|
|
if leaves.clone().all(|it| it.kind() == WHITESPACE) {
|
2020-01-07 10:22:08 -06:00
|
|
|
return Some(extend_ws(root, leaves.next()?, offset));
|
2019-03-20 15:52:55 -05:00
|
|
|
}
|
|
|
|
let leaf_range = match leaves {
|
2019-03-30 05:25:53 -05:00
|
|
|
TokenAtOffset::None => return None,
|
|
|
|
TokenAtOffset::Single(l) => {
|
2019-03-20 15:52:55 -05:00
|
|
|
if string_kinds.contains(&l.kind()) {
|
2019-07-20 04:58:27 -05:00
|
|
|
extend_single_word_in_comment_or_string(&l, offset)
|
|
|
|
.unwrap_or_else(|| l.text_range())
|
2019-03-20 15:52:55 -05:00
|
|
|
} else {
|
2019-07-20 04:58:27 -05:00
|
|
|
l.text_range()
|
2019-03-20 15:52:55 -05:00
|
|
|
}
|
|
|
|
}
|
2019-07-20 04:58:27 -05:00
|
|
|
TokenAtOffset::Between(l, r) => pick_best(l, r).text_range(),
|
2019-03-20 15:52:55 -05:00
|
|
|
};
|
|
|
|
return Some(leaf_range);
|
|
|
|
};
|
2020-01-07 10:22:08 -06:00
|
|
|
let node = match find_covering_element(root, range) {
|
2019-07-20 12:04:34 -05:00
|
|
|
NodeOrToken::Token(token) => {
|
2019-07-20 04:58:27 -05:00
|
|
|
if token.text_range() != range {
|
|
|
|
return Some(token.text_range());
|
2019-03-30 05:25:53 -05:00
|
|
|
}
|
2019-07-19 04:56:47 -05:00
|
|
|
if let Some(comment) = ast::Comment::cast(token.clone()) {
|
2019-03-30 05:25:53 -05:00
|
|
|
if let Some(range) = extend_comments(comment) {
|
|
|
|
return Some(range);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
token.parent()
|
|
|
|
}
|
2019-07-20 12:04:34 -05:00
|
|
|
NodeOrToken::Node(node) => node,
|
2019-03-30 05:25:53 -05:00
|
|
|
};
|
2019-12-31 12:19:59 -06:00
|
|
|
|
|
|
|
// if we are in single token_tree, we maybe live in macro or attr
|
|
|
|
if node.kind() == TOKEN_TREE {
|
|
|
|
if let Some(macro_call) = node.ancestors().find_map(ast::MacroCall::cast) {
|
2020-02-18 11:35:10 -06:00
|
|
|
if let Some(range) = extend_tokens_from_range(sema, macro_call, range) {
|
2019-12-31 12:19:59 -06:00
|
|
|
return Some(range);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-07-20 04:58:27 -05:00
|
|
|
if node.text_range() != range {
|
|
|
|
return Some(node.text_range());
|
2019-03-30 05:25:53 -05:00
|
|
|
}
|
2019-03-20 15:52:55 -05:00
|
|
|
|
2020-04-19 14:15:49 -05:00
|
|
|
let node = shallowest_node(&node);
|
2019-03-20 15:52:55 -05:00
|
|
|
|
2019-03-30 05:25:53 -05:00
|
|
|
if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) {
|
2019-07-19 04:56:47 -05:00
|
|
|
if let Some(range) = extend_list_item(&node) {
|
2019-03-30 05:25:53 -05:00
|
|
|
return Some(range);
|
2019-03-20 15:52:55 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-07-20 04:58:27 -05:00
|
|
|
node.parent().map(|it| it.text_range())
|
2019-03-20 15:52:55 -05:00
|
|
|
}
|
|
|
|
|
2019-12-31 12:19:59 -06:00
|
|
|
fn extend_tokens_from_range(
|
2020-02-18 11:35:10 -06:00
|
|
|
sema: &Semantics<RootDatabase>,
|
2019-12-31 12:19:59 -06:00
|
|
|
macro_call: ast::MacroCall,
|
|
|
|
original_range: TextRange,
|
|
|
|
) -> Option<TextRange> {
|
2020-01-10 06:36:09 -06:00
|
|
|
let src = find_covering_element(¯o_call.syntax(), original_range);
|
|
|
|
let (first_token, last_token) = match src {
|
|
|
|
NodeOrToken::Node(it) => (it.first_token()?, it.last_token()?),
|
|
|
|
NodeOrToken::Token(it) => (it.clone(), it),
|
|
|
|
};
|
2020-01-08 14:03:50 -06:00
|
|
|
|
2020-02-26 10:12:26 -06:00
|
|
|
let mut first_token = skip_trivia_token(first_token, Direction::Next)?;
|
|
|
|
let mut last_token = skip_trivia_token(last_token, Direction::Prev)?;
|
2020-01-10 06:36:09 -06:00
|
|
|
|
2020-04-24 16:40:41 -05:00
|
|
|
while !original_range.contains_range(first_token.text_range()) {
|
2020-02-26 10:12:26 -06:00
|
|
|
first_token = skip_trivia_token(first_token.next_token()?, Direction::Next)?;
|
2020-01-10 06:36:09 -06:00
|
|
|
}
|
2020-04-24 16:40:41 -05:00
|
|
|
while !original_range.contains_range(last_token.text_range()) {
|
2020-02-26 10:12:26 -06:00
|
|
|
last_token = skip_trivia_token(last_token.prev_token()?, Direction::Prev)?;
|
2020-01-10 06:36:09 -06:00
|
|
|
}
|
2020-01-08 14:03:50 -06:00
|
|
|
|
|
|
|
// compute original mapped token range
|
2020-02-18 11:35:10 -06:00
|
|
|
let extended = {
|
|
|
|
let fst_expanded = sema.descend_into_macros(first_token.clone());
|
|
|
|
let lst_expanded = sema.descend_into_macros(last_token.clone());
|
|
|
|
let mut lca = algo::least_common_ancestor(&fst_expanded.parent(), &lst_expanded.parent())?;
|
|
|
|
lca = shallowest_node(&lca);
|
|
|
|
if lca.first_token() == Some(fst_expanded) && lca.last_token() == Some(lst_expanded) {
|
|
|
|
lca = lca.parent()?;
|
2020-01-08 14:03:50 -06:00
|
|
|
}
|
2020-02-18 11:35:10 -06:00
|
|
|
lca
|
2020-01-08 14:03:50 -06:00
|
|
|
};
|
|
|
|
|
|
|
|
// Compute parent node range
|
2020-02-25 07:59:13 -06:00
|
|
|
let validate = |token: &SyntaxToken| {
|
2020-02-18 11:35:10 -06:00
|
|
|
let expanded = sema.descend_into_macros(token.clone());
|
|
|
|
algo::least_common_ancestor(&extended, &expanded.parent()).as_ref() == Some(&extended)
|
2020-01-08 14:03:50 -06:00
|
|
|
};
|
|
|
|
|
|
|
|
// Find the first and last text range under expanded parent
|
2020-01-10 06:36:09 -06:00
|
|
|
let first = successors(Some(first_token), |token| {
|
2020-02-25 07:59:13 -06:00
|
|
|
let token = token.prev_token()?;
|
2020-02-26 10:12:26 -06:00
|
|
|
skip_trivia_token(token, Direction::Prev)
|
2020-01-10 06:36:09 -06:00
|
|
|
})
|
2020-02-25 07:59:13 -06:00
|
|
|
.take_while(validate)
|
2020-01-10 06:36:09 -06:00
|
|
|
.last()?;
|
2020-02-25 07:59:13 -06:00
|
|
|
|
2020-01-10 06:36:09 -06:00
|
|
|
let last = successors(Some(last_token), |token| {
|
2020-02-25 07:59:13 -06:00
|
|
|
let token = token.next_token()?;
|
2020-02-26 10:12:26 -06:00
|
|
|
skip_trivia_token(token, Direction::Next)
|
2020-01-10 06:36:09 -06:00
|
|
|
})
|
2020-02-25 07:59:13 -06:00
|
|
|
.take_while(validate)
|
2020-01-10 06:36:09 -06:00
|
|
|
.last()?;
|
|
|
|
|
2020-04-24 16:40:41 -05:00
|
|
|
let range = first.text_range().cover(last.text_range());
|
|
|
|
if range.contains_range(original_range) && original_range != range {
|
2019-12-31 12:19:59 -06:00
|
|
|
Some(range)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-01-06 14:35:19 -06:00
|
|
|
/// Find the shallowest node with same range, which allows us to traverse siblings.
|
2020-02-18 11:35:10 -06:00
|
|
|
fn shallowest_node(node: &SyntaxNode) -> SyntaxNode {
|
|
|
|
node.ancestors().take_while(|n| n.text_range() == node.text_range()).last().unwrap()
|
2019-12-31 12:19:59 -06:00
|
|
|
}
|
|
|
|
|
2019-03-20 15:52:55 -05:00
|
|
|
fn extend_single_word_in_comment_or_string(
|
2019-07-19 04:56:47 -05:00
|
|
|
leaf: &SyntaxToken,
|
2020-04-24 16:40:41 -05:00
|
|
|
offset: TextSize,
|
2019-03-20 15:52:55 -05:00
|
|
|
) -> Option<TextRange> {
|
2019-03-30 05:25:53 -05:00
|
|
|
let text: &str = leaf.text();
|
2019-07-20 04:58:27 -05:00
|
|
|
let cursor_position: u32 = (offset - leaf.text_range().start()).into();
|
2019-03-20 15:52:55 -05:00
|
|
|
|
|
|
|
let (before, after) = text.split_at(cursor_position as usize);
|
|
|
|
|
|
|
|
fn non_word_char(c: char) -> bool {
|
|
|
|
!(c.is_alphanumeric() || c == '_')
|
|
|
|
}
|
|
|
|
|
|
|
|
let start_idx = before.rfind(non_word_char)? as u32;
|
2019-06-03 09:27:51 -05:00
|
|
|
let end_idx = after.find(non_word_char).unwrap_or_else(|| after.len()) as u32;
|
2019-03-20 15:52:55 -05:00
|
|
|
|
2020-04-24 16:40:41 -05:00
|
|
|
let from: TextSize = (start_idx + 1).into();
|
|
|
|
let to: TextSize = (cursor_position + end_idx).into();
|
2019-03-20 15:52:55 -05:00
|
|
|
|
2020-04-24 16:40:41 -05:00
|
|
|
let range = TextRange::new(from, to);
|
2019-03-20 15:52:55 -05:00
|
|
|
if range.is_empty() {
|
|
|
|
None
|
|
|
|
} else {
|
2019-07-20 04:58:27 -05:00
|
|
|
Some(range + leaf.text_range().start())
|
2019-03-20 15:52:55 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-04-24 16:40:41 -05:00
|
|
|
fn extend_ws(root: &SyntaxNode, ws: SyntaxToken, offset: TextSize) -> TextRange {
|
2019-03-30 05:25:53 -05:00
|
|
|
let ws_text = ws.text();
|
2020-04-24 16:40:41 -05:00
|
|
|
let suffix = TextRange::new(offset, ws.text_range().end()) - ws.text_range().start();
|
|
|
|
let prefix = TextRange::new(ws.text_range().start(), offset) - ws.text_range().start();
|
2019-03-20 15:52:55 -05:00
|
|
|
let ws_suffix = &ws_text.as_str()[suffix];
|
|
|
|
let ws_prefix = &ws_text.as_str()[prefix];
|
|
|
|
if ws_text.contains('\n') && !ws_suffix.contains('\n') {
|
2019-03-30 05:25:53 -05:00
|
|
|
if let Some(node) = ws.next_sibling_or_token() {
|
2019-03-20 15:52:55 -05:00
|
|
|
let start = match ws_prefix.rfind('\n') {
|
2020-04-24 16:40:41 -05:00
|
|
|
Some(idx) => ws.text_range().start() + TextSize::from((idx + 1) as u32),
|
2019-07-20 04:58:27 -05:00
|
|
|
None => node.text_range().start(),
|
2019-03-20 15:52:55 -05:00
|
|
|
};
|
2019-07-20 04:58:27 -05:00
|
|
|
let end = if root.text().char_at(node.text_range().end()) == Some('\n') {
|
2020-04-24 16:40:41 -05:00
|
|
|
node.text_range().end() + TextSize::of('\n')
|
2019-03-20 15:52:55 -05:00
|
|
|
} else {
|
2019-07-20 04:58:27 -05:00
|
|
|
node.text_range().end()
|
2019-03-20 15:52:55 -05:00
|
|
|
};
|
2020-04-24 16:40:41 -05:00
|
|
|
return TextRange::new(start, end);
|
2019-03-20 15:52:55 -05:00
|
|
|
}
|
|
|
|
}
|
2019-07-20 04:58:27 -05:00
|
|
|
ws.text_range()
|
2019-03-20 15:52:55 -05:00
|
|
|
}
|
|
|
|
|
2019-12-20 14:14:30 -06:00
|
|
|
fn pick_best(l: SyntaxToken, r: SyntaxToken) -> SyntaxToken {
|
2019-07-19 04:56:47 -05:00
|
|
|
return if priority(&r) > priority(&l) { r } else { l };
|
|
|
|
fn priority(n: &SyntaxToken) -> usize {
|
2019-03-20 15:52:55 -05:00
|
|
|
match n.kind() {
|
|
|
|
WHITESPACE => 0,
|
2019-05-08 10:35:32 -05:00
|
|
|
IDENT | T![self] | T![super] | T![crate] | LIFETIME => 2,
|
2019-03-20 15:52:55 -05:00
|
|
|
_ => 1,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-27 07:18:54 -05:00
|
|
|
/// Extend list item selection to include nearby delimiter and whitespace.
|
2019-03-20 15:52:55 -05:00
|
|
|
fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
|
2019-03-30 05:25:53 -05:00
|
|
|
fn is_single_line_ws(node: &SyntaxToken) -> bool {
|
|
|
|
node.kind() == WHITESPACE && !node.text().contains('\n')
|
2019-03-20 15:52:55 -05:00
|
|
|
}
|
|
|
|
|
2019-10-27 07:18:54 -05:00
|
|
|
fn nearby_delimiter(
|
|
|
|
delimiter_kind: SyntaxKind,
|
|
|
|
node: &SyntaxNode,
|
|
|
|
dir: Direction,
|
|
|
|
) -> Option<SyntaxToken> {
|
2019-03-30 05:25:53 -05:00
|
|
|
node.siblings_with_tokens(dir)
|
2019-03-20 15:52:55 -05:00
|
|
|
.skip(1)
|
2019-03-30 05:25:53 -05:00
|
|
|
.skip_while(|node| match node {
|
2019-07-20 12:04:34 -05:00
|
|
|
NodeOrToken::Node(_) => false,
|
|
|
|
NodeOrToken::Token(it) => is_single_line_ws(it),
|
2019-03-30 05:25:53 -05:00
|
|
|
})
|
2019-03-20 15:52:55 -05:00
|
|
|
.next()
|
2019-07-19 11:05:34 -05:00
|
|
|
.and_then(|it| it.into_token())
|
2019-10-27 07:18:54 -05:00
|
|
|
.filter(|node| node.kind() == delimiter_kind)
|
2019-03-20 15:52:55 -05:00
|
|
|
}
|
|
|
|
|
2019-10-27 07:18:54 -05:00
|
|
|
let delimiter = match node.kind() {
|
|
|
|
TYPE_BOUND => T![+],
|
|
|
|
_ => T![,],
|
|
|
|
};
|
2019-12-10 21:18:05 -06:00
|
|
|
|
2019-10-27 07:18:54 -05:00
|
|
|
if let Some(delimiter_node) = nearby_delimiter(delimiter, node, Direction::Next) {
|
|
|
|
// Include any following whitespace when delimiter is after list item.
|
|
|
|
let final_node = delimiter_node
|
2019-03-30 05:25:53 -05:00
|
|
|
.next_sibling_or_token()
|
2019-07-19 11:05:34 -05:00
|
|
|
.and_then(|it| it.into_token())
|
2019-03-20 15:52:55 -05:00
|
|
|
.filter(|node| is_single_line_ws(node))
|
2019-10-27 07:18:54 -05:00
|
|
|
.unwrap_or(delimiter_node);
|
2019-03-20 15:52:55 -05:00
|
|
|
|
2020-04-24 16:40:41 -05:00
|
|
|
return Some(TextRange::new(node.text_range().start(), final_node.text_range().end()));
|
2019-03-20 15:52:55 -05:00
|
|
|
}
|
2019-12-10 21:18:05 -06:00
|
|
|
if let Some(delimiter_node) = nearby_delimiter(delimiter, node, Direction::Prev) {
|
2020-04-24 16:40:41 -05:00
|
|
|
return Some(TextRange::new(delimiter_node.text_range().start(), node.text_range().end()));
|
2019-12-10 21:18:05 -06:00
|
|
|
}
|
2019-03-20 15:52:55 -05:00
|
|
|
|
2019-03-30 05:25:53 -05:00
|
|
|
None
|
2019-03-20 15:52:55 -05:00
|
|
|
}
|
|
|
|
|
2019-04-02 02:23:18 -05:00
|
|
|
fn extend_comments(comment: ast::Comment) -> Option<TextRange> {
|
2019-07-19 04:56:47 -05:00
|
|
|
let prev = adj_comments(&comment, Direction::Prev);
|
|
|
|
let next = adj_comments(&comment, Direction::Next);
|
2019-03-20 15:52:55 -05:00
|
|
|
if prev != next {
|
2020-04-24 16:40:41 -05:00
|
|
|
Some(TextRange::new(prev.syntax().text_range().start(), next.syntax().text_range().end()))
|
2019-03-20 15:52:55 -05:00
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-07-19 04:56:47 -05:00
|
|
|
fn adj_comments(comment: &ast::Comment, dir: Direction) -> ast::Comment {
|
|
|
|
let mut res = comment.clone();
|
2019-03-30 05:25:53 -05:00
|
|
|
for element in comment.syntax().siblings_with_tokens(dir) {
|
|
|
|
let token = match element.as_token() {
|
|
|
|
None => break,
|
|
|
|
Some(token) => token,
|
|
|
|
};
|
2019-07-19 04:56:47 -05:00
|
|
|
if let Some(c) = ast::Comment::cast(token.clone()) {
|
2019-03-30 05:25:53 -05:00
|
|
|
res = c
|
|
|
|
} else if token.kind() != WHITESPACE || token.text().contains("\n\n") {
|
|
|
|
break;
|
2019-03-20 15:52:55 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
res
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
2020-06-24 04:31:30 -05:00
|
|
|
use crate::mock_analysis::analysis_and_position;
|
2020-02-25 07:59:13 -06:00
|
|
|
|
|
|
|
use super::*;
|
|
|
|
|
2019-03-20 15:52:55 -05:00
|
|
|
fn do_check(before: &str, afters: &[&str]) {
|
2020-06-24 04:31:30 -05:00
|
|
|
let (analysis, position) = analysis_and_position(&before);
|
2020-06-24 04:05:47 -05:00
|
|
|
let before = analysis.file_text(position.file_id).unwrap();
|
|
|
|
let range = TextRange::empty(position.offset);
|
|
|
|
let mut frange = FileRange { file_id: position.file_id, range };
|
2019-12-31 12:19:59 -06:00
|
|
|
|
2019-03-20 15:52:55 -05:00
|
|
|
for &after in afters {
|
2019-12-31 12:19:59 -06:00
|
|
|
frange.range = analysis.extend_selection(frange).unwrap();
|
|
|
|
let actual = &before[frange.range];
|
2019-03-20 15:52:55 -05:00
|
|
|
assert_eq!(after, actual);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_extend_selection_arith() {
|
|
|
|
do_check(r#"fn foo() { <|>1 + 1 }"#, &["1", "1 + 1", "{ 1 + 1 }"]);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_extend_selection_list() {
|
|
|
|
do_check(r#"fn foo(<|>x: i32) {}"#, &["x", "x: i32"]);
|
|
|
|
do_check(r#"fn foo(<|>x: i32, y: i32) {}"#, &["x", "x: i32", "x: i32, "]);
|
2019-12-10 21:18:05 -06:00
|
|
|
do_check(r#"fn foo(<|>x: i32,y: i32) {}"#, &["x", "x: i32", "x: i32,", "(x: i32,y: i32)"]);
|
2019-03-20 15:52:55 -05:00
|
|
|
do_check(r#"fn foo(x: i32, <|>y: i32) {}"#, &["y", "y: i32", ", y: i32"]);
|
2019-12-10 21:18:05 -06:00
|
|
|
do_check(r#"fn foo(x: i32, <|>y: i32, ) {}"#, &["y", "y: i32", "y: i32, "]);
|
2019-03-20 15:52:55 -05:00
|
|
|
do_check(r#"fn foo(x: i32,<|>y: i32) {}"#, &["y", "y: i32", ",y: i32"]);
|
|
|
|
|
|
|
|
do_check(r#"const FOO: [usize; 2] = [ 22<|> , 33];"#, &["22", "22 , "]);
|
|
|
|
do_check(r#"const FOO: [usize; 2] = [ 22 , 33<|>];"#, &["33", ", 33"]);
|
2019-12-10 21:18:05 -06:00
|
|
|
do_check(r#"const FOO: [usize; 2] = [ 22 , 33<|> ,];"#, &["33", "33 ,", "[ 22 , 33 ,]"]);
|
2019-03-20 15:52:55 -05:00
|
|
|
|
2019-10-09 03:26:09 -05:00
|
|
|
do_check(r#"fn main() { (1, 2<|>) }"#, &["2", ", 2", "(1, 2)"]);
|
|
|
|
|
2019-03-20 15:52:55 -05:00
|
|
|
do_check(
|
|
|
|
r#"
|
|
|
|
const FOO: [usize; 2] = [
|
|
|
|
22,
|
|
|
|
<|>33,
|
|
|
|
]"#,
|
|
|
|
&["33", "33,"],
|
|
|
|
);
|
|
|
|
|
|
|
|
do_check(
|
|
|
|
r#"
|
|
|
|
const FOO: [usize; 2] = [
|
|
|
|
22
|
|
|
|
, 33<|>,
|
|
|
|
]"#,
|
2019-12-10 21:18:05 -06:00
|
|
|
&["33", "33,"],
|
2019-03-20 15:52:55 -05:00
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_extend_selection_start_of_the_line() {
|
|
|
|
do_check(
|
|
|
|
r#"
|
|
|
|
impl S {
|
|
|
|
<|> fn foo() {
|
|
|
|
|
|
|
|
}
|
|
|
|
}"#,
|
|
|
|
&[" fn foo() {\n\n }\n"],
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_extend_selection_doc_comments() {
|
|
|
|
do_check(
|
|
|
|
r#"
|
|
|
|
struct A;
|
|
|
|
|
|
|
|
/// bla
|
|
|
|
/// bla
|
|
|
|
struct B {
|
|
|
|
<|>
|
|
|
|
}
|
|
|
|
"#,
|
|
|
|
&["\n \n", "{\n \n}", "/// bla\n/// bla\nstruct B {\n \n}"],
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_extend_selection_comments() {
|
|
|
|
do_check(
|
|
|
|
r#"
|
|
|
|
fn bar(){}
|
|
|
|
|
|
|
|
// fn foo() {
|
|
|
|
// 1 + <|>1
|
|
|
|
// }
|
|
|
|
|
|
|
|
// fn foo(){}
|
|
|
|
"#,
|
|
|
|
&["1", "// 1 + 1", "// fn foo() {\n// 1 + 1\n// }"],
|
|
|
|
);
|
|
|
|
|
|
|
|
do_check(
|
|
|
|
r#"
|
|
|
|
// #[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
|
|
|
// pub enum Direction {
|
|
|
|
// <|> Next,
|
|
|
|
// Prev
|
|
|
|
// }
|
|
|
|
"#,
|
|
|
|
&[
|
|
|
|
"// Next,",
|
|
|
|
"// #[derive(Debug, Clone, Copy, PartialEq, Eq)]\n// pub enum Direction {\n// Next,\n// Prev\n// }",
|
|
|
|
],
|
|
|
|
);
|
|
|
|
|
|
|
|
do_check(
|
|
|
|
r#"
|
|
|
|
/*
|
|
|
|
foo
|
|
|
|
_bar1<|>*/
|
2019-03-30 05:25:53 -05:00
|
|
|
"#,
|
2019-03-20 15:52:55 -05:00
|
|
|
&["_bar1", "/*\nfoo\n_bar1*/"],
|
|
|
|
);
|
|
|
|
|
2019-03-30 05:25:53 -05:00
|
|
|
do_check(r#"//!<|>foo_2 bar"#, &["foo_2", "//!foo_2 bar"]);
|
2019-03-20 15:52:55 -05:00
|
|
|
|
2019-03-30 05:25:53 -05:00
|
|
|
do_check(r#"/<|>/foo bar"#, &["//foo bar"]);
|
2019-03-20 15:52:55 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_extend_selection_prefer_idents() {
|
|
|
|
do_check(
|
|
|
|
r#"
|
|
|
|
fn main() { foo<|>+bar;}
|
2019-03-30 05:25:53 -05:00
|
|
|
"#,
|
2019-03-20 15:52:55 -05:00
|
|
|
&["foo", "foo+bar"],
|
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r#"
|
|
|
|
fn main() { foo+<|>bar;}
|
2019-03-30 05:25:53 -05:00
|
|
|
"#,
|
2019-03-20 15:52:55 -05:00
|
|
|
&["bar", "foo+bar"],
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_extend_selection_prefer_lifetimes() {
|
|
|
|
do_check(r#"fn foo<<|>'a>() {}"#, &["'a", "<'a>"]);
|
|
|
|
do_check(r#"fn foo<'a<|>>() {}"#, &["'a", "<'a>"]);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_extend_selection_select_first_word() {
|
|
|
|
do_check(r#"// foo bar b<|>az quxx"#, &["baz", "// foo bar baz quxx"]);
|
|
|
|
do_check(
|
|
|
|
r#"
|
|
|
|
impl S {
|
2019-03-30 05:25:53 -05:00
|
|
|
fn foo() {
|
|
|
|
// hel<|>lo world
|
2019-03-20 15:52:55 -05:00
|
|
|
}
|
2019-03-30 05:25:53 -05:00
|
|
|
}
|
|
|
|
"#,
|
2019-03-20 15:52:55 -05:00
|
|
|
&["hello", "// hello world"],
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_extend_selection_string() {
|
|
|
|
do_check(
|
|
|
|
r#"
|
|
|
|
fn bar(){}
|
|
|
|
|
|
|
|
" fn f<|>oo() {"
|
2019-03-30 05:25:53 -05:00
|
|
|
"#,
|
2019-03-20 15:52:55 -05:00
|
|
|
&["foo", "\" fn foo() {\""],
|
|
|
|
);
|
|
|
|
}
|
2019-10-27 07:18:54 -05:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_extend_trait_bounds_list_in_where_clause() {
|
|
|
|
do_check(
|
|
|
|
r#"
|
2020-02-06 05:52:32 -06:00
|
|
|
fn foo<R>()
|
|
|
|
where
|
2019-10-27 07:18:54 -05:00
|
|
|
R: req::Request + 'static,
|
|
|
|
R::Params: DeserializeOwned<|> + panic::UnwindSafe + 'static,
|
|
|
|
R::Result: Serialize + 'static,
|
|
|
|
"#,
|
|
|
|
&[
|
|
|
|
"DeserializeOwned",
|
|
|
|
"DeserializeOwned + ",
|
|
|
|
"DeserializeOwned + panic::UnwindSafe + 'static",
|
|
|
|
"R::Params: DeserializeOwned + panic::UnwindSafe + 'static",
|
|
|
|
"R::Params: DeserializeOwned + panic::UnwindSafe + 'static,",
|
|
|
|
],
|
|
|
|
);
|
|
|
|
do_check(r#"fn foo<T>() where T: <|>Copy"#, &["Copy"]);
|
|
|
|
do_check(r#"fn foo<T>() where T: <|>Copy + Display"#, &["Copy", "Copy + "]);
|
|
|
|
do_check(r#"fn foo<T>() where T: <|>Copy +Display"#, &["Copy", "Copy +"]);
|
|
|
|
do_check(r#"fn foo<T>() where T: <|>Copy+Display"#, &["Copy", "Copy+"]);
|
|
|
|
do_check(r#"fn foo<T>() where T: Copy + <|>Display"#, &["Display", "+ Display"]);
|
2019-12-10 21:18:05 -06:00
|
|
|
do_check(r#"fn foo<T>() where T: Copy + <|>Display + Sync"#, &["Display", "Display + "]);
|
2019-10-27 07:18:54 -05:00
|
|
|
do_check(r#"fn foo<T>() where T: Copy +<|>Display"#, &["Display", "+Display"]);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_extend_trait_bounds_list_inline() {
|
|
|
|
do_check(r#"fn foo<T: <|>Copy>() {}"#, &["Copy"]);
|
|
|
|
do_check(r#"fn foo<T: <|>Copy + Display>() {}"#, &["Copy", "Copy + "]);
|
|
|
|
do_check(r#"fn foo<T: <|>Copy +Display>() {}"#, &["Copy", "Copy +"]);
|
|
|
|
do_check(r#"fn foo<T: <|>Copy+Display>() {}"#, &["Copy", "Copy+"]);
|
|
|
|
do_check(r#"fn foo<T: Copy + <|>Display>() {}"#, &["Display", "+ Display"]);
|
2019-12-10 21:18:05 -06:00
|
|
|
do_check(r#"fn foo<T: Copy + <|>Display + Sync>() {}"#, &["Display", "Display + "]);
|
2019-10-27 07:18:54 -05:00
|
|
|
do_check(r#"fn foo<T: Copy +<|>Display>() {}"#, &["Display", "+Display"]);
|
|
|
|
do_check(
|
|
|
|
r#"fn foo<T: Copy<|> + Display, U: Copy>() {}"#,
|
|
|
|
&[
|
|
|
|
"Copy",
|
|
|
|
"Copy + ",
|
|
|
|
"Copy + Display",
|
|
|
|
"T: Copy + Display",
|
|
|
|
"T: Copy + Display, ",
|
|
|
|
"<T: Copy + Display, U: Copy>",
|
|
|
|
],
|
|
|
|
);
|
|
|
|
}
|
2019-12-10 21:18:05 -06:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_extend_selection_on_tuple_in_type() {
|
|
|
|
do_check(
|
|
|
|
r#"fn main() { let _: (krate, <|>_crate_def_map, module_id) = (); }"#,
|
|
|
|
&["_crate_def_map", "_crate_def_map, ", "(krate, _crate_def_map, module_id)"],
|
|
|
|
);
|
|
|
|
// white space variations
|
|
|
|
do_check(
|
|
|
|
r#"fn main() { let _: (krate,<|>_crate_def_map,module_id) = (); }"#,
|
|
|
|
&["_crate_def_map", "_crate_def_map,", "(krate,_crate_def_map,module_id)"],
|
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r#"
|
|
|
|
fn main() { let _: (
|
|
|
|
krate,
|
|
|
|
_crate<|>_def_map,
|
|
|
|
module_id
|
|
|
|
) = (); }"#,
|
|
|
|
&[
|
|
|
|
"_crate_def_map",
|
|
|
|
"_crate_def_map,",
|
|
|
|
"(\n krate,\n _crate_def_map,\n module_id\n)",
|
|
|
|
],
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_extend_selection_on_tuple_in_rvalue() {
|
|
|
|
do_check(
|
|
|
|
r#"fn main() { let var = (krate, _crate_def_map<|>, module_id); }"#,
|
|
|
|
&["_crate_def_map", "_crate_def_map, ", "(krate, _crate_def_map, module_id)"],
|
|
|
|
);
|
|
|
|
// white space variations
|
|
|
|
do_check(
|
|
|
|
r#"fn main() { let var = (krate,_crate<|>_def_map,module_id); }"#,
|
|
|
|
&["_crate_def_map", "_crate_def_map,", "(krate,_crate_def_map,module_id)"],
|
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r#"
|
|
|
|
fn main() { let var = (
|
|
|
|
krate,
|
|
|
|
_crate_def_map<|>,
|
|
|
|
module_id
|
|
|
|
); }"#,
|
|
|
|
&[
|
|
|
|
"_crate_def_map",
|
|
|
|
"_crate_def_map,",
|
|
|
|
"(\n krate,\n _crate_def_map,\n module_id\n)",
|
2020-02-19 06:59:19 -06:00
|
|
|
],
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_extend_selection_on_tuple_pat() {
|
|
|
|
do_check(
|
|
|
|
r#"fn main() { let (krate, _crate_def_map<|>, module_id) = var; }"#,
|
|
|
|
&["_crate_def_map", "_crate_def_map, ", "(krate, _crate_def_map, module_id)"],
|
|
|
|
);
|
|
|
|
// white space variations
|
|
|
|
do_check(
|
|
|
|
r#"fn main() { let (krate,_crate<|>_def_map,module_id) = var; }"#,
|
|
|
|
&["_crate_def_map", "_crate_def_map,", "(krate,_crate_def_map,module_id)"],
|
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r#"
|
|
|
|
fn main() { let (
|
|
|
|
krate,
|
|
|
|
_crate_def_map<|>,
|
|
|
|
module_id
|
|
|
|
) = var; }"#,
|
|
|
|
&[
|
|
|
|
"_crate_def_map",
|
|
|
|
"_crate_def_map,",
|
|
|
|
"(\n krate,\n _crate_def_map,\n module_id\n)",
|
2019-12-10 21:18:05 -06:00
|
|
|
],
|
|
|
|
);
|
|
|
|
}
|
2019-12-31 12:19:59 -06:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn extend_selection_inside_macros() {
|
|
|
|
do_check(
|
|
|
|
r#"macro_rules! foo { ($item:item) => {$item} }
|
|
|
|
foo!{fn hello(na<|>me:usize){}}"#,
|
|
|
|
&[
|
|
|
|
"name",
|
|
|
|
"name:usize",
|
|
|
|
"(name:usize)",
|
|
|
|
"fn hello(name:usize){}",
|
|
|
|
"{fn hello(name:usize){}}",
|
|
|
|
"foo!{fn hello(name:usize){}}",
|
|
|
|
],
|
|
|
|
);
|
|
|
|
}
|
2020-01-07 10:22:08 -06:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn extend_selection_inside_recur_macros() {
|
|
|
|
do_check(
|
|
|
|
r#" macro_rules! foo2 { ($item:item) => {$item} }
|
|
|
|
macro_rules! foo { ($item:item) => {foo2!($item);} }
|
|
|
|
foo!{fn hello(na<|>me:usize){}}"#,
|
|
|
|
&[
|
|
|
|
"name",
|
|
|
|
"name:usize",
|
|
|
|
"(name:usize)",
|
|
|
|
"fn hello(name:usize){}",
|
|
|
|
"{fn hello(name:usize){}}",
|
|
|
|
"foo!{fn hello(name:usize){}}",
|
|
|
|
],
|
|
|
|
);
|
|
|
|
}
|
2019-01-08 13:33:36 -06:00
|
|
|
}
|