streamline API
This commit is contained in:
parent
62be91b82d
commit
d52ee59a71
@ -2,8 +2,9 @@
|
||||
use ra_db::FileRange;
|
||||
use ra_fmt::{leading_indent, reindent};
|
||||
use ra_syntax::{
|
||||
algo::{find_covering_element, find_node_at_offset, find_token_at_offset, TokenAtOffset},
|
||||
algo::{find_covering_element, find_node_at_offset},
|
||||
AstNode, SourceFile, SyntaxElement, SyntaxNode, SyntaxToken, TextRange, TextUnit,
|
||||
TokenAtOffset,
|
||||
};
|
||||
use ra_text_edit::TextEditBuilder;
|
||||
|
||||
@ -105,7 +106,7 @@ pub(crate) fn build(self) -> Option<Assist> {
|
||||
}
|
||||
|
||||
pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken> {
|
||||
find_token_at_offset(self.source_file.syntax(), self.frange.range.start())
|
||||
self.source_file.syntax().token_at_offset(self.frange.range.start())
|
||||
}
|
||||
|
||||
pub(crate) fn node_at_offset<N: AstNode>(&self) -> Option<N> {
|
||||
|
@ -3211,8 +3211,7 @@ fn foo() -> i32 {
|
||||
);
|
||||
{
|
||||
let file = db.parse(pos.file_id).ok().unwrap();
|
||||
let node =
|
||||
algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent();
|
||||
let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent();
|
||||
let events = db.log_executed(|| {
|
||||
SourceAnalyzer::new(&db, pos.file_id, &node, None);
|
||||
});
|
||||
@ -3232,8 +3231,7 @@ fn foo() -> i32 {
|
||||
|
||||
{
|
||||
let file = db.parse(pos.file_id).ok().unwrap();
|
||||
let node =
|
||||
algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent();
|
||||
let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent();
|
||||
let events = db.log_executed(|| {
|
||||
SourceAnalyzer::new(&db, pos.file_id, &node, None);
|
||||
});
|
||||
|
@ -1,6 +1,6 @@
|
||||
use hir::source_binder;
|
||||
use ra_syntax::{
|
||||
algo::{find_covering_element, find_node_at_offset, find_token_at_offset},
|
||||
algo::{find_covering_element, find_node_at_offset},
|
||||
ast, AstNode, Parse, SourceFile,
|
||||
SyntaxKind::*,
|
||||
SyntaxNode, SyntaxToken, TextRange, TextUnit,
|
||||
@ -48,7 +48,7 @@ pub(super) fn new(
|
||||
) -> Option<CompletionContext<'a>> {
|
||||
let module = source_binder::module_from_position(db, position);
|
||||
let token =
|
||||
find_token_at_offset(original_parse.tree().syntax(), position.offset).left_biased()?;
|
||||
original_parse.tree().syntax().token_at_offset(position.offset).left_biased()?;
|
||||
let analyzer =
|
||||
hir::SourceAnalyzer::new(db, position.file_id, &token.parent(), Some(position.offset));
|
||||
let mut ctx = CompletionContext {
|
||||
|
@ -1,10 +1,10 @@
|
||||
use ra_db::SourceDatabase;
|
||||
use ra_syntax::{
|
||||
algo::{find_covering_element, find_token_at_offset, TokenAtOffset},
|
||||
algo::find_covering_element,
|
||||
ast::{self, AstNode, AstToken},
|
||||
Direction, NodeOrToken,
|
||||
SyntaxKind::*,
|
||||
SyntaxNode, SyntaxToken, TextRange, TextUnit, T,
|
||||
SyntaxNode, SyntaxToken, TextRange, TextUnit, TokenAtOffset, T,
|
||||
};
|
||||
|
||||
use crate::{db::RootDatabase, FileRange};
|
||||
@ -34,7 +34,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
|
||||
|
||||
if range.is_empty() {
|
||||
let offset = range.start();
|
||||
let mut leaves = find_token_at_offset(root, offset);
|
||||
let mut leaves = root.token_at_offset(offset);
|
||||
if leaves.clone().all(|it| it.kind() == WHITESPACE) {
|
||||
return Some(extend_ws(root, leaves.next()?, offset));
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
use ra_db::SourceDatabase;
|
||||
use ra_syntax::{algo::find_token_at_offset, ast, AstNode};
|
||||
use ra_syntax::{ast, AstNode};
|
||||
|
||||
use crate::{db::RootDatabase, FilePosition, NavigationTarget, RangeInfo};
|
||||
|
||||
@ -9,7 +9,7 @@ pub(crate) fn goto_type_definition(
|
||||
) -> Option<RangeInfo<Vec<NavigationTarget>>> {
|
||||
let parse = db.parse(position.file_id);
|
||||
|
||||
let node = find_token_at_offset(parse.tree().syntax(), position.offset).find_map(|token| {
|
||||
let node = parse.tree().syntax().token_at_offset(position.offset).find_map(|token| {
|
||||
token
|
||||
.parent()
|
||||
.ancestors()
|
||||
|
@ -1,9 +1,11 @@
|
||||
use ra_syntax::{algo::find_token_at_offset, ast::AstNode, SourceFile, SyntaxKind, TextUnit, T};
|
||||
use ra_syntax::{ast::AstNode, SourceFile, SyntaxKind, TextUnit, T};
|
||||
|
||||
pub fn matching_brace(file: &SourceFile, offset: TextUnit) -> Option<TextUnit> {
|
||||
const BRACES: &[SyntaxKind] =
|
||||
&[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>]];
|
||||
let (brace_node, brace_idx) = find_token_at_offset(file.syntax(), offset)
|
||||
let (brace_node, brace_idx) = file
|
||||
.syntax()
|
||||
.token_at_offset(offset)
|
||||
.filter_map(|node| {
|
||||
let idx = BRACES.iter().position(|&brace| brace == node.kind())?;
|
||||
Some((node, idx))
|
||||
|
@ -1,11 +1,11 @@
|
||||
use ra_db::{FilePosition, SourceDatabase};
|
||||
use ra_fmt::leading_indent;
|
||||
use ra_syntax::{
|
||||
algo::{find_node_at_offset, find_token_at_offset, TokenAtOffset},
|
||||
algo::find_node_at_offset,
|
||||
ast::{self, AstToken},
|
||||
AstNode, SmolStr, SourceFile,
|
||||
SyntaxKind::*,
|
||||
SyntaxToken, TextRange, TextUnit,
|
||||
SyntaxToken, TextRange, TextUnit, TokenAtOffset,
|
||||
};
|
||||
use ra_text_edit::{TextEdit, TextEditBuilder};
|
||||
|
||||
@ -14,7 +14,9 @@
|
||||
pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> {
|
||||
let parse = db.parse(position.file_id);
|
||||
let file = parse.tree();
|
||||
let comment = find_token_at_offset(file.syntax(), position.offset)
|
||||
let comment = file
|
||||
.syntax()
|
||||
.token_at_offset(position.offset)
|
||||
.left_biased()
|
||||
.and_then(ast::Comment::cast)?;
|
||||
|
||||
@ -45,7 +47,7 @@ pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<Sour
|
||||
}
|
||||
|
||||
fn node_indent(file: &SourceFile, token: &SyntaxToken) -> Option<SmolStr> {
|
||||
let ws = match find_token_at_offset(file.syntax(), token.text_range().start()) {
|
||||
let ws = match file.syntax().token_at_offset(token.text_range().start()) {
|
||||
TokenAtOffset::Between(l, r) => {
|
||||
assert!(r == *token);
|
||||
l
|
||||
@ -91,7 +93,10 @@ pub(crate) fn on_dot_typed(db: &RootDatabase, position: FilePosition) -> Option<
|
||||
let parse = db.parse(position.file_id);
|
||||
assert_eq!(parse.tree().syntax().text().char_at(position.offset), Some('.'));
|
||||
|
||||
let whitespace = find_token_at_offset(parse.tree().syntax(), position.offset)
|
||||
let whitespace = parse
|
||||
.tree()
|
||||
.syntax()
|
||||
.token_at_offset(position.offset)
|
||||
.left_biased()
|
||||
.and_then(ast::Whitespace::cast)?;
|
||||
|
||||
|
@ -5,16 +5,9 @@
|
||||
use itertools::Itertools;
|
||||
|
||||
use crate::{
|
||||
AstNode, Direction, NodeOrToken, SourceFile, SyntaxElement, SyntaxNode, SyntaxNodePtr,
|
||||
SyntaxToken, TextRange, TextUnit,
|
||||
AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxNodePtr, TextRange, TextUnit,
|
||||
};
|
||||
|
||||
pub use rowan::TokenAtOffset;
|
||||
|
||||
pub fn find_token_at_offset(node: &SyntaxNode, offset: TextUnit) -> TokenAtOffset<SyntaxToken> {
|
||||
node.token_at_offset(offset)
|
||||
}
|
||||
|
||||
/// Returns ancestors of the node at the offset, sorted by length. This should
|
||||
/// do the right thing at an edge, e.g. when searching for expressions at `{
|
||||
/// <|>foo }` we will get the name reference instead of the whole block, which
|
||||
@ -24,7 +17,7 @@ pub fn ancestors_at_offset(
|
||||
node: &SyntaxNode,
|
||||
offset: TextUnit,
|
||||
) -> impl Iterator<Item = SyntaxNode> {
|
||||
find_token_at_offset(node, offset)
|
||||
node.token_at_offset(offset)
|
||||
.map(|token| token.parent().ancestors())
|
||||
.kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
|
||||
}
|
||||
@ -137,14 +130,14 @@ fn with_children(
|
||||
let len = new_children.iter().map(|it| it.text_len()).sum::<TextUnit>();
|
||||
let new_node =
|
||||
rowan::GreenNode::new(rowan::cursor::SyntaxKind(parent.kind() as u16), new_children);
|
||||
let new_file_node = parent.replace_with(new_node);
|
||||
let file = SourceFile::new(new_file_node);
|
||||
let new_root_node = parent.replace_with(new_node);
|
||||
let new_root_node = SyntaxNode::new_root(new_root_node);
|
||||
|
||||
// FIXME: use a more elegant way to re-fetch the node (#1185), make
|
||||
// `range` private afterwards
|
||||
let mut ptr = SyntaxNodePtr::new(parent);
|
||||
ptr.range = TextRange::offset_len(ptr.range().start(), len);
|
||||
ptr.to_node(file.syntax()).to_owned()
|
||||
ptr.to_node(&new_root_node)
|
||||
}
|
||||
|
||||
fn position_of_child(parent: &SyntaxNode, child: SyntaxElement) -> usize {
|
||||
|
@ -44,12 +44,10 @@
|
||||
syntax_error::{Location, SyntaxError, SyntaxErrorKind},
|
||||
syntax_node::{
|
||||
Direction, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken, SyntaxTreeBuilder,
|
||||
WalkEvent,
|
||||
},
|
||||
};
|
||||
pub use ra_parser::SyntaxKind;
|
||||
pub use ra_parser::T;
|
||||
pub use rowan::{SmolStr, SyntaxText, TextRange, TextUnit};
|
||||
pub use ra_parser::{SyntaxKind, T};
|
||||
pub use rowan::{SmolStr, SyntaxText, TextRange, TextUnit, TokenAtOffset, WalkEvent};
|
||||
|
||||
/// `Parse` is the result of the parsing: a syntax tree and a collection of
|
||||
/// errors.
|
||||
|
@ -14,7 +14,6 @@
|
||||
Parse, SmolStr, SyntaxKind, TextUnit,
|
||||
};
|
||||
|
||||
pub use rowan::WalkEvent;
|
||||
pub(crate) use rowan::{GreenNode, GreenToken};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
|
Loading…
Reference in New Issue
Block a user