rust/crates/ide/src/extend_selection.rs

663 lines
20 KiB
Rust
Raw Normal View History

2020-02-06 05:52:32 -06:00
use std::iter::successors;
use hir::Semantics;
2020-08-13 09:39:16 -05:00
use ide_db::RootDatabase;
2020-08-12 11:26:51 -05:00
use syntax::{
2021-01-15 11:15:33 -06:00
algo::{self, skip_trivia_token},
2019-04-02 02:23:18 -05:00
ast::{self, AstNode, AstToken},
Direction, NodeOrToken,
SyntaxKind::{self, *},
2020-04-24 16:40:41 -05:00
SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset, T,
2019-01-08 13:33:36 -06:00
};
use crate::FileRange;
// Feature: Expand and Shrink Selection
2020-05-30 18:54:54 -05:00
//
// Extends or shrinks the current selection to the encompassing syntactic construct
2020-05-30 18:54:54 -05:00
// (expression, statement, item, module, etc). It works with multiple cursors.
//
// This is a standard LSP feature and not a protocol extension.
//
2020-05-30 18:54:54 -05:00
// |===
// | Editor | Shortcut
//
// | VS Code | kbd:[Alt+Shift+→], kbd:[Alt+Shift+←]
2020-05-30 18:54:54 -05:00
// |===
//
// image::https://user-images.githubusercontent.com/48062697/113020651-b42fc800-917a-11eb-8a4f-cf1a07859fac.gif[]
2019-01-08 13:33:36 -06:00
pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange {
let sema = Semantics::new(db);
let src = sema.parse(frange.file_id);
try_extend_selection(&sema, src.syntax(), frange).unwrap_or(frange.range)
}
fn try_extend_selection(
sema: &Semantics<RootDatabase>,
root: &SyntaxNode,
frange: FileRange,
) -> Option<TextRange> {
let range = frange.range;
let string_kinds = [COMMENT, STRING, BYTE_STRING];
let list_kinds = [
2020-07-31 12:54:16 -05:00
RECORD_PAT_FIELD_LIST,
MATCH_ARM_LIST,
2020-07-30 09:49:13 -05:00
RECORD_FIELD_LIST,
TUPLE_FIELD_LIST,
2020-07-30 09:21:30 -05:00
RECORD_EXPR_FIELD_LIST,
2020-07-30 10:56:53 -05:00
VARIANT_LIST,
USE_TREE_LIST,
GENERIC_PARAM_LIST,
2020-07-31 11:29:29 -05:00
GENERIC_ARG_LIST,
TYPE_BOUND_LIST,
PARAM_LIST,
ARG_LIST,
ARRAY_EXPR,
TUPLE_EXPR,
TUPLE_TYPE,
TUPLE_PAT,
WHERE_CLAUSE,
];
if range.is_empty() {
let offset = range.start();
let mut leaves = root.token_at_offset(offset);
if leaves.clone().all(|it| it.kind() == WHITESPACE) {
return Some(extend_ws(root, leaves.next()?, offset));
}
let leaf_range = match leaves {
2019-03-30 05:25:53 -05:00
TokenAtOffset::None => return None,
TokenAtOffset::Single(l) => {
if string_kinds.contains(&l.kind()) {
2019-07-20 04:58:27 -05:00
extend_single_word_in_comment_or_string(&l, offset)
.unwrap_or_else(|| l.text_range())
} else {
2019-07-20 04:58:27 -05:00
l.text_range()
}
}
2019-07-20 04:58:27 -05:00
TokenAtOffset::Between(l, r) => pick_best(l, r).text_range(),
};
return Some(leaf_range);
};
2021-01-15 11:15:33 -06:00
let node = match root.covering_element(range) {
2019-07-20 12:04:34 -05:00
NodeOrToken::Token(token) => {
2019-07-20 04:58:27 -05:00
if token.text_range() != range {
return Some(token.text_range());
2019-03-30 05:25:53 -05:00
}
2019-07-19 04:56:47 -05:00
if let Some(comment) = ast::Comment::cast(token.clone()) {
2019-03-30 05:25:53 -05:00
if let Some(range) = extend_comments(comment) {
return Some(range);
}
}
token.parent()?
2019-03-30 05:25:53 -05:00
}
2019-07-20 12:04:34 -05:00
NodeOrToken::Node(node) => node,
2019-03-30 05:25:53 -05:00
};
// if we are in single token_tree, we maybe live in macro or attr
if node.kind() == TOKEN_TREE {
if let Some(macro_call) = node.ancestors().find_map(ast::MacroCall::cast) {
if let Some(range) = extend_tokens_from_range(sema, macro_call, range) {
return Some(range);
}
}
}
2019-07-20 04:58:27 -05:00
if node.text_range() != range {
return Some(node.text_range());
2019-03-30 05:25:53 -05:00
}
2020-04-19 14:15:49 -05:00
let node = shallowest_node(&node);
2019-03-30 05:25:53 -05:00
if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) {
2019-07-19 04:56:47 -05:00
if let Some(range) = extend_list_item(&node) {
2019-03-30 05:25:53 -05:00
return Some(range);
}
}
2019-07-20 04:58:27 -05:00
node.parent().map(|it| it.text_range())
}
fn extend_tokens_from_range(
sema: &Semantics<RootDatabase>,
macro_call: ast::MacroCall,
original_range: TextRange,
) -> Option<TextRange> {
2021-01-15 11:15:33 -06:00
let src = macro_call.syntax().covering_element(original_range);
2020-01-10 06:36:09 -06:00
let (first_token, last_token) = match src {
NodeOrToken::Node(it) => (it.first_token()?, it.last_token()?),
NodeOrToken::Token(it) => (it.clone(), it),
};
2020-01-08 14:03:50 -06:00
2020-02-26 10:12:26 -06:00
let mut first_token = skip_trivia_token(first_token, Direction::Next)?;
let mut last_token = skip_trivia_token(last_token, Direction::Prev)?;
2020-01-10 06:36:09 -06:00
2020-04-24 16:40:41 -05:00
while !original_range.contains_range(first_token.text_range()) {
2020-02-26 10:12:26 -06:00
first_token = skip_trivia_token(first_token.next_token()?, Direction::Next)?;
2020-01-10 06:36:09 -06:00
}
2020-04-24 16:40:41 -05:00
while !original_range.contains_range(last_token.text_range()) {
2020-02-26 10:12:26 -06:00
last_token = skip_trivia_token(last_token.prev_token()?, Direction::Prev)?;
2020-01-10 06:36:09 -06:00
}
2020-01-08 14:03:50 -06:00
// compute original mapped token range
let extended = {
let fst_expanded = sema.descend_into_macros_single(first_token.clone());
let lst_expanded = sema.descend_into_macros_single(last_token.clone());
let mut lca =
algo::least_common_ancestor(&fst_expanded.parent()?, &lst_expanded.parent()?)?;
lca = shallowest_node(&lca);
if lca.first_token() == Some(fst_expanded) && lca.last_token() == Some(lst_expanded) {
lca = lca.parent()?;
2020-01-08 14:03:50 -06:00
}
lca
2020-01-08 14:03:50 -06:00
};
// Compute parent node range
let validate = |token: &SyntaxToken| -> bool {
let expanded = sema.descend_into_macros_single(token.clone());
let parent = match expanded.parent() {
Some(it) => it,
None => return false,
};
algo::least_common_ancestor(&extended, &parent).as_ref() == Some(&extended)
2020-01-08 14:03:50 -06:00
};
// Find the first and last text range under expanded parent
2020-01-10 06:36:09 -06:00
let first = successors(Some(first_token), |token| {
2020-02-25 07:59:13 -06:00
let token = token.prev_token()?;
2020-02-26 10:12:26 -06:00
skip_trivia_token(token, Direction::Prev)
2020-01-10 06:36:09 -06:00
})
2020-02-25 07:59:13 -06:00
.take_while(validate)
2020-01-10 06:36:09 -06:00
.last()?;
2020-02-25 07:59:13 -06:00
2020-01-10 06:36:09 -06:00
let last = successors(Some(last_token), |token| {
2020-02-25 07:59:13 -06:00
let token = token.next_token()?;
2020-02-26 10:12:26 -06:00
skip_trivia_token(token, Direction::Next)
2020-01-10 06:36:09 -06:00
})
2020-02-25 07:59:13 -06:00
.take_while(validate)
2020-01-10 06:36:09 -06:00
.last()?;
2020-04-24 16:40:41 -05:00
let range = first.text_range().cover(last.text_range());
if range.contains_range(original_range) && original_range != range {
Some(range)
} else {
None
}
}
2020-01-06 14:35:19 -06:00
/// Find the shallowest node with same range, which allows us to traverse siblings.
fn shallowest_node(node: &SyntaxNode) -> SyntaxNode {
node.ancestors().take_while(|n| n.text_range() == node.text_range()).last().unwrap()
}
fn extend_single_word_in_comment_or_string(
2019-07-19 04:56:47 -05:00
leaf: &SyntaxToken,
2020-04-24 16:40:41 -05:00
offset: TextSize,
) -> Option<TextRange> {
2019-03-30 05:25:53 -05:00
let text: &str = leaf.text();
2019-07-20 04:58:27 -05:00
let cursor_position: u32 = (offset - leaf.text_range().start()).into();
let (before, after) = text.split_at(cursor_position as usize);
fn non_word_char(c: char) -> bool {
!(c.is_alphanumeric() || c == '_')
}
let start_idx = before.rfind(non_word_char)? as u32;
2019-06-03 09:27:51 -05:00
let end_idx = after.find(non_word_char).unwrap_or_else(|| after.len()) as u32;
2020-04-24 16:40:41 -05:00
let from: TextSize = (start_idx + 1).into();
let to: TextSize = (cursor_position + end_idx).into();
2020-04-24 16:40:41 -05:00
let range = TextRange::new(from, to);
if range.is_empty() {
None
} else {
2019-07-20 04:58:27 -05:00
Some(range + leaf.text_range().start())
}
}
2020-04-24 16:40:41 -05:00
fn extend_ws(root: &SyntaxNode, ws: SyntaxToken, offset: TextSize) -> TextRange {
2019-03-30 05:25:53 -05:00
let ws_text = ws.text();
2020-04-24 16:40:41 -05:00
let suffix = TextRange::new(offset, ws.text_range().end()) - ws.text_range().start();
let prefix = TextRange::new(ws.text_range().start(), offset) - ws.text_range().start();
2021-01-19 16:56:11 -06:00
let ws_suffix = &ws_text[suffix];
let ws_prefix = &ws_text[prefix];
if ws_text.contains('\n') && !ws_suffix.contains('\n') {
2019-03-30 05:25:53 -05:00
if let Some(node) = ws.next_sibling_or_token() {
let start = match ws_prefix.rfind('\n') {
2020-04-24 16:40:41 -05:00
Some(idx) => ws.text_range().start() + TextSize::from((idx + 1) as u32),
2019-07-20 04:58:27 -05:00
None => node.text_range().start(),
};
2019-07-20 04:58:27 -05:00
let end = if root.text().char_at(node.text_range().end()) == Some('\n') {
2020-04-24 16:40:41 -05:00
node.text_range().end() + TextSize::of('\n')
} else {
2019-07-20 04:58:27 -05:00
node.text_range().end()
};
2020-04-24 16:40:41 -05:00
return TextRange::new(start, end);
}
}
2019-07-20 04:58:27 -05:00
ws.text_range()
}
2019-12-20 14:14:30 -06:00
fn pick_best(l: SyntaxToken, r: SyntaxToken) -> SyntaxToken {
2019-07-19 04:56:47 -05:00
return if priority(&r) > priority(&l) { r } else { l };
fn priority(n: &SyntaxToken) -> usize {
match n.kind() {
WHITESPACE => 0,
2020-12-15 12:23:51 -06:00
IDENT | T![self] | T![super] | T![crate] | LIFETIME_IDENT => 2,
_ => 1,
}
}
}
/// Extend list item selection to include nearby delimiter and whitespace.
fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
2019-03-30 05:25:53 -05:00
fn is_single_line_ws(node: &SyntaxToken) -> bool {
node.kind() == WHITESPACE && !node.text().contains('\n')
}
fn nearby_delimiter(
delimiter_kind: SyntaxKind,
node: &SyntaxNode,
dir: Direction,
) -> Option<SyntaxToken> {
2019-03-30 05:25:53 -05:00
node.siblings_with_tokens(dir)
.skip(1)
.find(|node| match node {
NodeOrToken::Node(_) => true,
NodeOrToken::Token(it) => !is_single_line_ws(it),
2019-03-30 05:25:53 -05:00
})
2019-07-19 11:05:34 -05:00
.and_then(|it| it.into_token())
.filter(|node| node.kind() == delimiter_kind)
}
let delimiter = match node.kind() {
TYPE_BOUND => T![+],
_ => T![,],
};
if let Some(delimiter_node) = nearby_delimiter(delimiter, node, Direction::Next) {
// Include any following whitespace when delimiter is after list item.
let final_node = delimiter_node
2019-03-30 05:25:53 -05:00
.next_sibling_or_token()
2019-07-19 11:05:34 -05:00
.and_then(|it| it.into_token())
.filter(|node| is_single_line_ws(node))
.unwrap_or(delimiter_node);
2020-04-24 16:40:41 -05:00
return Some(TextRange::new(node.text_range().start(), final_node.text_range().end()));
}
if let Some(delimiter_node) = nearby_delimiter(delimiter, node, Direction::Prev) {
2020-04-24 16:40:41 -05:00
return Some(TextRange::new(delimiter_node.text_range().start(), node.text_range().end()));
}
2019-03-30 05:25:53 -05:00
None
}
2019-04-02 02:23:18 -05:00
fn extend_comments(comment: ast::Comment) -> Option<TextRange> {
2019-07-19 04:56:47 -05:00
let prev = adj_comments(&comment, Direction::Prev);
let next = adj_comments(&comment, Direction::Next);
if prev != next {
2020-04-24 16:40:41 -05:00
Some(TextRange::new(prev.syntax().text_range().start(), next.syntax().text_range().end()))
} else {
None
}
}
2019-07-19 04:56:47 -05:00
fn adj_comments(comment: &ast::Comment, dir: Direction) -> ast::Comment {
let mut res = comment.clone();
2019-03-30 05:25:53 -05:00
for element in comment.syntax().siblings_with_tokens(dir) {
let token = match element.as_token() {
None => break,
Some(token) => token,
};
2019-07-19 04:56:47 -05:00
if let Some(c) = ast::Comment::cast(token.clone()) {
2019-03-30 05:25:53 -05:00
res = c
} else if token.kind() != WHITESPACE || token.text().contains("\n\n") {
break;
}
}
res
}
#[cfg(test)]
mod tests {
2020-10-02 10:34:31 -05:00
use crate::fixture;
2020-02-25 07:59:13 -06:00
use super::*;
fn do_check(before: &str, afters: &[&str]) {
2021-06-12 22:54:16 -05:00
let (analysis, position) = fixture::position(before);
2020-06-24 04:05:47 -05:00
let before = analysis.file_text(position.file_id).unwrap();
let range = TextRange::empty(position.offset);
let mut frange = FileRange { file_id: position.file_id, range };
for &after in afters {
frange.range = analysis.extend_selection(frange).unwrap();
let actual = &before[frange.range];
assert_eq!(after, actual);
}
}
#[test]
fn test_extend_selection_arith() {
2021-01-06 14:15:48 -06:00
do_check(r#"fn foo() { $01 + 1 }"#, &["1", "1 + 1", "{ 1 + 1 }"]);
}
#[test]
fn test_extend_selection_list() {
2021-01-06 14:15:48 -06:00
do_check(r#"fn foo($0x: i32) {}"#, &["x", "x: i32"]);
do_check(r#"fn foo($0x: i32, y: i32) {}"#, &["x", "x: i32", "x: i32, "]);
do_check(r#"fn foo($0x: i32,y: i32) {}"#, &["x", "x: i32", "x: i32,", "(x: i32,y: i32)"]);
do_check(r#"fn foo(x: i32, $0y: i32) {}"#, &["y", "y: i32", ", y: i32"]);
do_check(r#"fn foo(x: i32, $0y: i32, ) {}"#, &["y", "y: i32", "y: i32, "]);
do_check(r#"fn foo(x: i32,$0y: i32) {}"#, &["y", "y: i32", ",y: i32"]);
2021-01-06 14:15:48 -06:00
do_check(r#"const FOO: [usize; 2] = [ 22$0 , 33];"#, &["22", "22 , "]);
do_check(r#"const FOO: [usize; 2] = [ 22 , 33$0];"#, &["33", ", 33"]);
do_check(r#"const FOO: [usize; 2] = [ 22 , 33$0 ,];"#, &["33", "33 ,", "[ 22 , 33 ,]"]);
2021-01-06 14:15:48 -06:00
do_check(r#"fn main() { (1, 2$0) }"#, &["2", ", 2", "(1, 2)"]);
do_check(
r#"
const FOO: [usize; 2] = [
22,
2021-01-06 14:15:48 -06:00
$033,
]"#,
&["33", "33,"],
);
do_check(
r#"
const FOO: [usize; 2] = [
22
2021-01-06 14:15:48 -06:00
, 33$0,
]"#,
&["33", "33,"],
);
}
#[test]
fn test_extend_selection_start_of_the_line() {
do_check(
r#"
impl S {
2021-01-06 14:15:48 -06:00
$0 fn foo() {
}
}"#,
&[" fn foo() {\n\n }\n"],
);
}
#[test]
fn test_extend_selection_doc_comments() {
do_check(
r#"
struct A;
/// bla
/// bla
struct B {
2021-01-06 14:15:48 -06:00
$0
}
"#,
&["\n \n", "{\n \n}", "/// bla\n/// bla\nstruct B {\n \n}"],
)
}
#[test]
fn test_extend_selection_comments() {
do_check(
r#"
fn bar(){}
// fn foo() {
2021-01-06 14:15:48 -06:00
// 1 + $01
// }
// fn foo(){}
"#,
&["1", "// 1 + 1", "// fn foo() {\n// 1 + 1\n// }"],
);
do_check(
r#"
// #[derive(Debug, Clone, Copy, PartialEq, Eq)]
// pub enum Direction {
2021-01-06 14:15:48 -06:00
// $0 Next,
// Prev
// }
"#,
&[
"// Next,",
"// #[derive(Debug, Clone, Copy, PartialEq, Eq)]\n// pub enum Direction {\n// Next,\n// Prev\n// }",
],
);
do_check(
r#"
/*
foo
2021-01-06 14:15:48 -06:00
_bar1$0*/
2019-03-30 05:25:53 -05:00
"#,
&["_bar1", "/*\nfoo\n_bar1*/"],
);
2021-01-06 14:15:48 -06:00
do_check(r#"//!$0foo_2 bar"#, &["foo_2", "//!foo_2 bar"]);
2021-01-06 14:15:48 -06:00
do_check(r#"/$0/foo bar"#, &["//foo bar"]);
}
#[test]
fn test_extend_selection_prefer_idents() {
do_check(
r#"
2021-01-06 14:15:48 -06:00
fn main() { foo$0+bar;}
2019-03-30 05:25:53 -05:00
"#,
&["foo", "foo+bar"],
);
do_check(
r#"
2021-01-06 14:15:48 -06:00
fn main() { foo+$0bar;}
2019-03-30 05:25:53 -05:00
"#,
&["bar", "foo+bar"],
);
}
#[test]
fn test_extend_selection_prefer_lifetimes() {
2021-01-06 14:15:48 -06:00
do_check(r#"fn foo<$0'a>() {}"#, &["'a", "<'a>"]);
do_check(r#"fn foo<'a$0>() {}"#, &["'a", "<'a>"]);
}
#[test]
fn test_extend_selection_select_first_word() {
2021-01-06 14:15:48 -06:00
do_check(r#"// foo bar b$0az quxx"#, &["baz", "// foo bar baz quxx"]);
do_check(
r#"
impl S {
2019-03-30 05:25:53 -05:00
fn foo() {
2021-01-06 14:15:48 -06:00
// hel$0lo world
}
2019-03-30 05:25:53 -05:00
}
"#,
&["hello", "// hello world"],
);
}
#[test]
fn test_extend_selection_string() {
do_check(
r#"
fn bar(){}
2021-01-06 14:15:48 -06:00
" fn f$0oo() {"
2019-03-30 05:25:53 -05:00
"#,
&["foo", "\" fn foo() {\""],
);
}
#[test]
fn test_extend_trait_bounds_list_in_where_clause() {
do_check(
r#"
2020-02-06 05:52:32 -06:00
fn foo<R>()
where
R: req::Request + 'static,
2021-01-06 14:15:48 -06:00
R::Params: DeserializeOwned$0 + panic::UnwindSafe + 'static,
R::Result: Serialize + 'static,
"#,
&[
"DeserializeOwned",
"DeserializeOwned + ",
"DeserializeOwned + panic::UnwindSafe + 'static",
"R::Params: DeserializeOwned + panic::UnwindSafe + 'static",
"R::Params: DeserializeOwned + panic::UnwindSafe + 'static,",
],
);
2021-01-06 14:15:48 -06:00
do_check(r#"fn foo<T>() where T: $0Copy"#, &["Copy"]);
do_check(r#"fn foo<T>() where T: $0Copy + Display"#, &["Copy", "Copy + "]);
do_check(r#"fn foo<T>() where T: $0Copy +Display"#, &["Copy", "Copy +"]);
do_check(r#"fn foo<T>() where T: $0Copy+Display"#, &["Copy", "Copy+"]);
do_check(r#"fn foo<T>() where T: Copy + $0Display"#, &["Display", "+ Display"]);
do_check(r#"fn foo<T>() where T: Copy + $0Display + Sync"#, &["Display", "Display + "]);
do_check(r#"fn foo<T>() where T: Copy +$0Display"#, &["Display", "+Display"]);
}
#[test]
fn test_extend_trait_bounds_list_inline() {
2021-01-06 14:15:48 -06:00
do_check(r#"fn foo<T: $0Copy>() {}"#, &["Copy"]);
do_check(r#"fn foo<T: $0Copy + Display>() {}"#, &["Copy", "Copy + "]);
do_check(r#"fn foo<T: $0Copy +Display>() {}"#, &["Copy", "Copy +"]);
do_check(r#"fn foo<T: $0Copy+Display>() {}"#, &["Copy", "Copy+"]);
do_check(r#"fn foo<T: Copy + $0Display>() {}"#, &["Display", "+ Display"]);
do_check(r#"fn foo<T: Copy + $0Display + Sync>() {}"#, &["Display", "Display + "]);
do_check(r#"fn foo<T: Copy +$0Display>() {}"#, &["Display", "+Display"]);
do_check(
2021-01-06 14:15:48 -06:00
r#"fn foo<T: Copy$0 + Display, U: Copy>() {}"#,
&[
"Copy",
"Copy + ",
"Copy + Display",
"T: Copy + Display",
"T: Copy + Display, ",
"<T: Copy + Display, U: Copy>",
],
);
}
#[test]
fn test_extend_selection_on_tuple_in_type() {
do_check(
2021-01-06 14:15:48 -06:00
r#"fn main() { let _: (krate, $0_crate_def_map, module_id) = (); }"#,
&["_crate_def_map", "_crate_def_map, ", "(krate, _crate_def_map, module_id)"],
);
// white space variations
do_check(
2021-01-06 14:15:48 -06:00
r#"fn main() { let _: (krate,$0_crate_def_map,module_id) = (); }"#,
&["_crate_def_map", "_crate_def_map,", "(krate,_crate_def_map,module_id)"],
);
do_check(
r#"
fn main() { let _: (
krate,
2021-01-06 14:15:48 -06:00
_crate$0_def_map,
module_id
) = (); }"#,
&[
"_crate_def_map",
"_crate_def_map,",
"(\n krate,\n _crate_def_map,\n module_id\n)",
],
);
}
#[test]
fn test_extend_selection_on_tuple_in_rvalue() {
do_check(
2021-01-06 14:15:48 -06:00
r#"fn main() { let var = (krate, _crate_def_map$0, module_id); }"#,
&["_crate_def_map", "_crate_def_map, ", "(krate, _crate_def_map, module_id)"],
);
// white space variations
do_check(
2021-01-06 14:15:48 -06:00
r#"fn main() { let var = (krate,_crate$0_def_map,module_id); }"#,
&["_crate_def_map", "_crate_def_map,", "(krate,_crate_def_map,module_id)"],
);
do_check(
r#"
fn main() { let var = (
krate,
2021-01-06 14:15:48 -06:00
_crate_def_map$0,
module_id
); }"#,
&[
"_crate_def_map",
"_crate_def_map,",
"(\n krate,\n _crate_def_map,\n module_id\n)",
],
);
}
#[test]
fn test_extend_selection_on_tuple_pat() {
do_check(
2021-01-06 14:15:48 -06:00
r#"fn main() { let (krate, _crate_def_map$0, module_id) = var; }"#,
&["_crate_def_map", "_crate_def_map, ", "(krate, _crate_def_map, module_id)"],
);
// white space variations
do_check(
2021-01-06 14:15:48 -06:00
r#"fn main() { let (krate,_crate$0_def_map,module_id) = var; }"#,
&["_crate_def_map", "_crate_def_map,", "(krate,_crate_def_map,module_id)"],
);
do_check(
r#"
fn main() { let (
krate,
2021-01-06 14:15:48 -06:00
_crate_def_map$0,
module_id
) = var; }"#,
&[
"_crate_def_map",
"_crate_def_map,",
"(\n krate,\n _crate_def_map,\n module_id\n)",
],
);
}
#[test]
fn extend_selection_inside_macros() {
do_check(
r#"macro_rules! foo { ($item:item) => {$item} }
2021-01-06 14:15:48 -06:00
foo!{fn hello(na$0me:usize){}}"#,
&[
"name",
"name:usize",
"(name:usize)",
"fn hello(name:usize){}",
"{fn hello(name:usize){}}",
"foo!{fn hello(name:usize){}}",
],
);
}
#[test]
fn extend_selection_inside_recur_macros() {
do_check(
r#" macro_rules! foo2 { ($item:item) => {$item} }
macro_rules! foo { ($item:item) => {foo2!($item);} }
2021-01-06 14:15:48 -06:00
foo!{fn hello(na$0me:usize){}}"#,
&[
"name",
"name:usize",
"(name:usize)",
"fn hello(name:usize){}",
"{fn hello(name:usize){}}",
"foo!{fn hello(name:usize){}}",
],
);
}
2019-01-08 13:33:36 -06:00
}