Run cargo fix
This commit is contained in:
parent
6605dbaff3
commit
171c176833
crates/ra_syntax/src
@ -1,7 +1,7 @@
|
||||
pub mod walk;
|
||||
pub mod visit;
|
||||
|
||||
use {
|
||||
use crate::{
|
||||
SyntaxNodeRef, TextUnit, TextRange,
|
||||
text_utils::{contains_offset_nonstrict, is_subrange},
|
||||
};
|
||||
|
@ -1,5 +1,5 @@
|
||||
use std::marker::PhantomData;
|
||||
use {SyntaxNodeRef, AstNode};
|
||||
use crate::{SyntaxNodeRef, AstNode};
|
||||
|
||||
|
||||
pub fn visitor<'a, T>() -> impl Visitor<'a, Output=T> {
|
||||
|
@ -1,4 +1,4 @@
|
||||
use {
|
||||
use crate::{
|
||||
SyntaxNodeRef,
|
||||
algo::generate,
|
||||
};
|
||||
|
@ -1,7 +1,7 @@
|
||||
// This file is automatically generated based on the file `./generated.rs.tera` when `cargo gen-kinds` is run
|
||||
// Do not edit manually
|
||||
|
||||
use {
|
||||
use crate::{
|
||||
ast,
|
||||
SyntaxNodeRef, AstNode,
|
||||
SyntaxKind::*,
|
||||
|
@ -4,7 +4,7 @@ use std::marker::PhantomData;
|
||||
|
||||
use itertools::Itertools;
|
||||
|
||||
use {
|
||||
use crate::{
|
||||
SmolStr, SyntaxNodeRef, SyntaxKind::*,
|
||||
yellow::{RefRoot, SyntaxNodeChildren},
|
||||
};
|
||||
|
@ -31,7 +31,7 @@ mod type_args;
|
||||
mod type_params;
|
||||
mod types;
|
||||
|
||||
use {
|
||||
use crate::{
|
||||
token_set::TokenSet,
|
||||
parser_api::{Marker, CompletedMarker, Parser},
|
||||
SyntaxKind::{self, *},
|
||||
|
@ -1,6 +1,6 @@
|
||||
use lexer::ptr::Ptr;
|
||||
use crate::lexer::ptr::Ptr;
|
||||
|
||||
use SyntaxKind::{self, *};
|
||||
use crate::SyntaxKind::{self, *};
|
||||
|
||||
pub(crate) fn scan_shebang(ptr: &mut Ptr) -> bool {
|
||||
if ptr.at_str("!/") {
|
||||
|
@ -4,7 +4,7 @@ mod numbers;
|
||||
mod ptr;
|
||||
mod strings;
|
||||
|
||||
use {
|
||||
use crate::{
|
||||
SyntaxKind::{self, *},
|
||||
TextUnit,
|
||||
};
|
||||
|
@ -1,7 +1,7 @@
|
||||
use lexer::classes::*;
|
||||
use lexer::ptr::Ptr;
|
||||
use crate::lexer::classes::*;
|
||||
use crate::lexer::ptr::Ptr;
|
||||
|
||||
use SyntaxKind::{self, *};
|
||||
use crate::SyntaxKind::{self, *};
|
||||
|
||||
pub(crate) fn scan_number(c: char, ptr: &mut Ptr) -> SyntaxKind {
|
||||
if c == '0' {
|
||||
|
@ -1,4 +1,4 @@
|
||||
use TextUnit;
|
||||
use crate::TextUnit;
|
||||
|
||||
use std::str::Chars;
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
use SyntaxKind::{self, *};
|
||||
use crate::SyntaxKind::{self, *};
|
||||
|
||||
use lexer::ptr::Ptr;
|
||||
use crate::lexer::ptr::Ptr;
|
||||
|
||||
pub(crate) fn is_string_literal_start(c: char, c1: Option<char>, c2: Option<char>) -> bool {
|
||||
match (c, c1, c2) {
|
||||
|
@ -46,7 +46,7 @@ mod yellow;
|
||||
pub mod utils;
|
||||
pub mod text_utils;
|
||||
|
||||
pub use {
|
||||
pub use crate::{
|
||||
rowan::{SmolStr, TextRange, TextUnit},
|
||||
ast::AstNode,
|
||||
lexer::{tokenize, Token},
|
||||
@ -55,7 +55,7 @@ pub use {
|
||||
reparsing::AtomEdit,
|
||||
};
|
||||
|
||||
use {
|
||||
use crate::{
|
||||
yellow::{GreenNode},
|
||||
};
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
use {
|
||||
use crate::{
|
||||
token_set::TokenSet,
|
||||
parser_impl::ParserImpl,
|
||||
SyntaxKind::{self, ERROR},
|
||||
|
@ -8,7 +8,7 @@
|
||||
//! `start node`, `finish node`, and `FileBuilder` converts
|
||||
//! this stream to a real tree.
|
||||
use std::mem;
|
||||
use {
|
||||
use crate::{
|
||||
TextUnit, TextRange, SmolStr,
|
||||
lexer::Token,
|
||||
parser_impl::Sink,
|
||||
|
@ -1,4 +1,4 @@
|
||||
use {lexer::Token, SyntaxKind, SyntaxKind::EOF, TextRange, TextUnit};
|
||||
use crate::{lexer::Token, SyntaxKind, SyntaxKind::EOF, TextRange, TextUnit};
|
||||
|
||||
use std::ops::{Add, AddAssign};
|
||||
|
||||
|
@ -3,7 +3,7 @@ mod input;
|
||||
|
||||
use std::cell::Cell;
|
||||
|
||||
use {
|
||||
use crate::{
|
||||
TextUnit, SmolStr,
|
||||
lexer::Token,
|
||||
parser_api::Parser,
|
||||
@ -13,7 +13,7 @@ use {
|
||||
},
|
||||
};
|
||||
|
||||
use SyntaxKind::{self, EOF, TOMBSTONE};
|
||||
use crate::SyntaxKind::{self, EOF, TOMBSTONE};
|
||||
|
||||
pub(crate) trait Sink {
|
||||
type Tree;
|
||||
|
@ -1,14 +1,14 @@
|
||||
use algo;
|
||||
use grammar;
|
||||
use lexer::{tokenize, Token};
|
||||
use yellow::{self, GreenNode, SyntaxNodeRef, SyntaxError};
|
||||
use parser_impl;
|
||||
use parser_api::Parser;
|
||||
use {
|
||||
use crate::algo;
|
||||
use crate::grammar;
|
||||
use crate::lexer::{tokenize, Token};
|
||||
use crate::yellow::{self, GreenNode, SyntaxNodeRef, SyntaxError};
|
||||
use crate::parser_impl;
|
||||
use crate::parser_api::Parser;
|
||||
use crate::{
|
||||
TextUnit, TextRange,
|
||||
SyntaxKind::*,
|
||||
};
|
||||
use text_utils::replace_range;
|
||||
use crate::text_utils::replace_range;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AtomEdit {
|
||||
|
@ -1,7 +1,7 @@
|
||||
mod generated;
|
||||
|
||||
use std::fmt;
|
||||
use SyntaxKind::*;
|
||||
use crate::SyntaxKind::*;
|
||||
|
||||
pub use self::generated::SyntaxKind;
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
use {TextRange, TextUnit};
|
||||
use crate::{TextRange, TextUnit};
|
||||
|
||||
pub fn contains_offset_nonstrict(range: TextRange, offset: TextUnit) -> bool {
|
||||
range.start() <= offset && offset <= range.end()
|
||||
|
@ -1,4 +1,4 @@
|
||||
use SyntaxKind;
|
||||
use crate::SyntaxKind;
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub(crate) struct TokenSet(pub(crate) u128);
|
||||
@ -29,7 +29,7 @@ macro_rules! token_set_union {
|
||||
|
||||
#[test]
|
||||
fn token_set_works_for_tokens() {
|
||||
use SyntaxKind::*;
|
||||
use crate::SyntaxKind::*;
|
||||
let ts = token_set! { EOF, SHEBANG };
|
||||
assert!(ts.contains(EOF));
|
||||
assert!(ts.contains(SHEBANG));
|
||||
|
@ -1,5 +1,5 @@
|
||||
use std::fmt::Write;
|
||||
use {
|
||||
use crate::{
|
||||
algo::walk::{walk, WalkEvent},
|
||||
SyntaxKind, File, SyntaxNodeRef
|
||||
};
|
||||
|
@ -1,5 +1,5 @@
|
||||
use rowan::GreenNodeBuilder;
|
||||
use {
|
||||
use crate::{
|
||||
TextUnit, SmolStr,
|
||||
parser_impl::Sink,
|
||||
yellow::{GreenNode, SyntaxError, RaTypes},
|
||||
|
@ -6,7 +6,7 @@ use std::{
|
||||
hash::{Hash, Hasher},
|
||||
};
|
||||
use rowan::Types;
|
||||
use {SyntaxKind, TextUnit, TextRange, SmolStr};
|
||||
use crate::{SyntaxKind, TextUnit, TextRange, SmolStr};
|
||||
use self::syntax_text::SyntaxText;
|
||||
|
||||
pub use rowan::{TreeRoot};
|
||||
@ -70,16 +70,16 @@ impl<'a> SyntaxNodeRef<'a> {
|
||||
self.0.leaf_text()
|
||||
}
|
||||
pub fn ancestors(self) -> impl Iterator<Item=SyntaxNodeRef<'a>> {
|
||||
::algo::generate(Some(self), |&node| node.parent())
|
||||
crate::algo::generate(Some(self), |&node| node.parent())
|
||||
}
|
||||
pub fn descendants(self) -> impl Iterator<Item=SyntaxNodeRef<'a>> {
|
||||
::algo::walk::walk(self).filter_map(|event| match event {
|
||||
::algo::walk::WalkEvent::Enter(node) => Some(node),
|
||||
::algo::walk::WalkEvent::Exit(_) => None,
|
||||
crate::algo::walk::walk(self).filter_map(|event| match event {
|
||||
crate::algo::walk::WalkEvent::Enter(node) => Some(node),
|
||||
crate::algo::walk::WalkEvent::Exit(_) => None,
|
||||
})
|
||||
}
|
||||
pub fn siblings(self, direction: Direction) -> impl Iterator<Item=SyntaxNodeRef<'a>> {
|
||||
::algo::generate(Some(self), move |&node| match direction {
|
||||
crate::algo::generate(Some(self), move |&node| match direction {
|
||||
Direction::Next => node.next_sibling(),
|
||||
Direction::Prev => node.prev_sibling(),
|
||||
})
|
||||
@ -156,7 +156,7 @@ impl<R: TreeRoot<RaTypes>> Iterator for SyntaxNodeChildren<R> {
|
||||
|
||||
|
||||
fn has_short_text(kind: SyntaxKind) -> bool {
|
||||
use SyntaxKind::*;
|
||||
use crate::SyntaxKind::*;
|
||||
match kind {
|
||||
IDENT | LIFETIME | INT_NUMBER | FLOAT_NUMBER => true,
|
||||
_ => false,
|
||||
|
@ -2,7 +2,7 @@ use std::{
|
||||
fmt, ops,
|
||||
};
|
||||
|
||||
use {
|
||||
use crate::{
|
||||
SyntaxNodeRef, TextRange, TextUnit,
|
||||
text_utils::{intersect, contains_offset_nonstrict},
|
||||
};
|
||||
|
Loading…
x
Reference in New Issue
Block a user