2019-09-26 14:08:44 -05:00
|
|
|
//! This module contains functions for editing syntax trees. As the trees are
|
|
|
|
//! immutable, all function here return a fresh copy of the tree, instead of
|
|
|
|
//! doing an in-place modification.
|
2019-09-28 11:50:16 -05:00
|
|
|
use std::{iter, ops::RangeInclusive};
|
2019-09-26 14:08:44 -05:00
|
|
|
|
|
|
|
use arrayvec::ArrayVec;
|
2019-09-30 02:05:12 -05:00
|
|
|
use rustc_hash::FxHashMap;
|
2019-09-26 14:08:44 -05:00
|
|
|
|
|
|
|
use crate::{
|
|
|
|
algo,
|
2019-09-28 12:09:57 -05:00
|
|
|
ast::{
|
|
|
|
self,
|
|
|
|
make::{self, tokens},
|
2019-09-30 01:56:20 -05:00
|
|
|
AstNode, TypeBoundsOwner,
|
2019-09-28 12:09:57 -05:00
|
|
|
},
|
2019-11-23 23:14:57 -06:00
|
|
|
AstToken, Direction, InsertPosition, SmolStr, SyntaxElement, SyntaxKind,
|
2019-09-28 11:50:16 -05:00
|
|
|
SyntaxKind::{ATTR, COMMENT, WHITESPACE},
|
2019-10-12 14:07:47 -05:00
|
|
|
SyntaxNode, SyntaxToken, T,
|
2019-09-26 14:08:44 -05:00
|
|
|
};
|
|
|
|
|
2019-11-23 23:14:57 -06:00
|
|
|
impl ast::BinExpr {
|
|
|
|
#[must_use]
|
|
|
|
pub fn replace_op(&self, op: SyntaxKind) -> Option<ast::BinExpr> {
|
|
|
|
let op_node: SyntaxElement = self.op_details()?.0.into();
|
2020-01-15 11:14:49 -06:00
|
|
|
let to_insert: Option<SyntaxElement> = Some(make::token(op).into());
|
2020-01-15 11:30:23 -06:00
|
|
|
Some(replace_children(self, single_node(op_node), to_insert))
|
2019-11-23 23:14:57 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-26 14:08:44 -05:00
|
|
|
impl ast::FnDef {
|
|
|
|
#[must_use]
|
|
|
|
pub fn with_body(&self, body: ast::Block) -> ast::FnDef {
|
|
|
|
let mut to_insert: ArrayVec<[SyntaxElement; 2]> = ArrayVec::new();
|
|
|
|
let old_body_or_semi: SyntaxElement = if let Some(old_body) = self.body() {
|
|
|
|
old_body.syntax().clone().into()
|
|
|
|
} else if let Some(semi) = self.semicolon_token() {
|
|
|
|
to_insert.push(make::tokens::single_space().into());
|
|
|
|
semi.into()
|
|
|
|
} else {
|
|
|
|
to_insert.push(make::tokens::single_space().into());
|
|
|
|
to_insert.push(body.syntax().clone().into());
|
2020-01-15 11:30:23 -06:00
|
|
|
return insert_children(self, InsertPosition::Last, to_insert);
|
2019-09-26 14:08:44 -05:00
|
|
|
};
|
|
|
|
to_insert.push(body.syntax().clone().into());
|
2020-01-15 11:30:23 -06:00
|
|
|
replace_children(self, single_node(old_body_or_semi), to_insert)
|
2019-09-26 14:08:44 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-28 12:09:57 -05:00
|
|
|
impl ast::ItemList {
|
|
|
|
#[must_use]
|
|
|
|
pub fn append_items(&self, items: impl Iterator<Item = ast::ImplItem>) -> ast::ItemList {
|
|
|
|
let mut res = self.clone();
|
|
|
|
if !self.syntax().text().contains_char('\n') {
|
|
|
|
res = res.make_multiline();
|
|
|
|
}
|
|
|
|
items.for_each(|it| res = res.append_item(it));
|
|
|
|
res
|
|
|
|
}
|
|
|
|
|
|
|
|
#[must_use]
|
|
|
|
pub fn append_item(&self, item: ast::ImplItem) -> ast::ItemList {
|
|
|
|
let (indent, position) = match self.impl_items().last() {
|
|
|
|
Some(it) => (
|
|
|
|
leading_indent(it.syntax()).unwrap_or_default().to_string(),
|
|
|
|
InsertPosition::After(it.syntax().clone().into()),
|
|
|
|
),
|
|
|
|
None => match self.l_curly() {
|
|
|
|
Some(it) => (
|
|
|
|
" ".to_string() + &leading_indent(self.syntax()).unwrap_or_default(),
|
|
|
|
InsertPosition::After(it),
|
|
|
|
),
|
|
|
|
None => return self.clone(),
|
|
|
|
},
|
|
|
|
};
|
|
|
|
let ws = tokens::WsBuilder::new(&format!("\n{}", indent));
|
|
|
|
let to_insert: ArrayVec<[SyntaxElement; 2]> =
|
|
|
|
[ws.ws().into(), item.syntax().clone().into()].into();
|
2020-01-15 11:30:23 -06:00
|
|
|
insert_children(self, position, to_insert)
|
2019-09-28 12:09:57 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
fn l_curly(&self) -> Option<SyntaxElement> {
|
|
|
|
self.syntax().children_with_tokens().find(|it| it.kind() == T!['{'])
|
|
|
|
}
|
|
|
|
|
|
|
|
fn make_multiline(&self) -> ast::ItemList {
|
|
|
|
let l_curly = match self.syntax().children_with_tokens().find(|it| it.kind() == T!['{']) {
|
|
|
|
Some(it) => it,
|
|
|
|
None => return self.clone(),
|
|
|
|
};
|
|
|
|
let sibling = match l_curly.next_sibling_or_token() {
|
|
|
|
Some(it) => it,
|
|
|
|
None => return self.clone(),
|
|
|
|
};
|
|
|
|
let existing_ws = match sibling.as_token() {
|
|
|
|
None => None,
|
|
|
|
Some(tok) if tok.kind() != WHITESPACE => None,
|
|
|
|
Some(ws) => {
|
|
|
|
if ws.text().contains('\n') {
|
|
|
|
return self.clone();
|
|
|
|
}
|
|
|
|
Some(ws.clone())
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2019-12-20 08:43:30 -06:00
|
|
|
let indent = leading_indent(self.syntax()).unwrap_or_default();
|
2019-09-28 12:09:57 -05:00
|
|
|
let ws = tokens::WsBuilder::new(&format!("\n{}", indent));
|
|
|
|
let to_insert = iter::once(ws.ws().into());
|
|
|
|
match existing_ws {
|
|
|
|
None => insert_children(self, InsertPosition::After(l_curly), to_insert),
|
2020-01-15 10:56:25 -06:00
|
|
|
Some(ws) => replace_children(self, single_node(ws), to_insert),
|
2019-09-28 12:09:57 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-30 01:27:26 -05:00
|
|
|
impl ast::RecordFieldList {
|
|
|
|
#[must_use]
|
|
|
|
pub fn append_field(&self, field: &ast::RecordField) -> ast::RecordFieldList {
|
|
|
|
self.insert_field(InsertPosition::Last, field)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[must_use]
|
|
|
|
pub fn insert_field(
|
|
|
|
&self,
|
|
|
|
position: InsertPosition<&'_ ast::RecordField>,
|
|
|
|
field: &ast::RecordField,
|
|
|
|
) -> ast::RecordFieldList {
|
|
|
|
let is_multiline = self.syntax().text().contains_char('\n');
|
|
|
|
let ws;
|
|
|
|
let space = if is_multiline {
|
|
|
|
ws = tokens::WsBuilder::new(&format!(
|
|
|
|
"\n{} ",
|
2019-12-20 08:43:30 -06:00
|
|
|
leading_indent(self.syntax()).unwrap_or_default()
|
2019-09-30 01:27:26 -05:00
|
|
|
));
|
|
|
|
ws.ws()
|
|
|
|
} else {
|
|
|
|
tokens::single_space()
|
|
|
|
};
|
|
|
|
|
|
|
|
let mut to_insert: ArrayVec<[SyntaxElement; 4]> = ArrayVec::new();
|
|
|
|
to_insert.push(space.into());
|
|
|
|
to_insert.push(field.syntax().clone().into());
|
|
|
|
to_insert.push(tokens::comma().into());
|
|
|
|
|
|
|
|
macro_rules! after_l_curly {
|
|
|
|
() => {{
|
|
|
|
let anchor = match self.l_curly() {
|
|
|
|
Some(it) => it,
|
|
|
|
None => return self.clone(),
|
|
|
|
};
|
|
|
|
InsertPosition::After(anchor)
|
|
|
|
}};
|
|
|
|
}
|
|
|
|
|
|
|
|
macro_rules! after_field {
|
|
|
|
($anchor:expr) => {
|
|
|
|
if let Some(comma) = $anchor
|
|
|
|
.syntax()
|
|
|
|
.siblings_with_tokens(Direction::Next)
|
|
|
|
.find(|it| it.kind() == T![,])
|
|
|
|
{
|
|
|
|
InsertPosition::After(comma)
|
|
|
|
} else {
|
|
|
|
to_insert.insert(0, tokens::comma().into());
|
|
|
|
InsertPosition::After($anchor.syntax().clone().into())
|
|
|
|
}
|
|
|
|
};
|
|
|
|
};
|
|
|
|
|
|
|
|
let position = match position {
|
|
|
|
InsertPosition::First => after_l_curly!(),
|
|
|
|
InsertPosition::Last => {
|
|
|
|
if !is_multiline {
|
|
|
|
// don't insert comma before curly
|
|
|
|
to_insert.pop();
|
|
|
|
}
|
|
|
|
match self.fields().last() {
|
|
|
|
Some(it) => after_field!(it),
|
|
|
|
None => after_l_curly!(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
InsertPosition::Before(anchor) => {
|
|
|
|
InsertPosition::Before(anchor.syntax().clone().into())
|
|
|
|
}
|
|
|
|
InsertPosition::After(anchor) => after_field!(anchor),
|
|
|
|
};
|
|
|
|
|
2020-01-15 11:30:23 -06:00
|
|
|
insert_children(self, position, to_insert)
|
2019-09-30 01:27:26 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
fn l_curly(&self) -> Option<SyntaxElement> {
|
|
|
|
self.syntax().children_with_tokens().find(|it| it.kind() == T!['{'])
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-30 01:56:20 -05:00
|
|
|
impl ast::TypeParam {
|
2019-09-30 02:08:28 -05:00
|
|
|
#[must_use]
|
2019-09-30 01:56:20 -05:00
|
|
|
pub fn remove_bounds(&self) -> ast::TypeParam {
|
|
|
|
let colon = match self.colon_token() {
|
|
|
|
Some(it) => it,
|
|
|
|
None => return self.clone(),
|
|
|
|
};
|
|
|
|
let end = match self.type_bound_list() {
|
|
|
|
Some(it) => it.syntax().clone().into(),
|
|
|
|
None => colon.clone().into(),
|
|
|
|
};
|
2020-01-15 10:54:25 -06:00
|
|
|
replace_children(self, colon.into()..=end, iter::empty())
|
2019-09-30 01:56:20 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-01-15 11:48:28 -06:00
|
|
|
impl ast::Path {
|
|
|
|
#[must_use]
|
|
|
|
pub fn with_segment(&self, segment: ast::PathSegment) -> ast::Path {
|
|
|
|
if let Some(old) = self.segment() {
|
|
|
|
return replace_children(
|
|
|
|
self,
|
|
|
|
single_node(old.syntax().clone()),
|
|
|
|
iter::once(segment.syntax().clone().into()),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
self.clone()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl ast::PathSegment {
|
|
|
|
#[must_use]
|
|
|
|
pub fn with_type_args(&self, type_args: ast::TypeArgList) -> ast::PathSegment {
|
|
|
|
self._with_type_args(type_args, false)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[must_use]
|
|
|
|
pub fn with_turbo_fish(&self, type_args: ast::TypeArgList) -> ast::PathSegment {
|
|
|
|
self._with_type_args(type_args, true)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn _with_type_args(&self, type_args: ast::TypeArgList, turbo: bool) -> ast::PathSegment {
|
|
|
|
if let Some(old) = self.type_arg_list() {
|
|
|
|
return replace_children(
|
|
|
|
self,
|
|
|
|
single_node(old.syntax().clone()),
|
|
|
|
iter::once(type_args.syntax().clone().into()),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
let mut to_insert: ArrayVec<[SyntaxElement; 2]> = ArrayVec::new();
|
|
|
|
if turbo {
|
|
|
|
to_insert.push(make::token(T![::]).into());
|
|
|
|
}
|
|
|
|
to_insert.push(type_args.syntax().clone().into());
|
|
|
|
insert_children(self, InsertPosition::Last, to_insert)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-30 02:08:28 -05:00
|
|
|
#[must_use]
|
|
|
|
pub fn strip_attrs_and_docs<N: ast::AttrsOwner>(node: &N) -> N {
|
2019-09-28 11:50:16 -05:00
|
|
|
N::cast(strip_attrs_and_docs_inner(node.syntax().clone())).unwrap()
|
|
|
|
}
|
|
|
|
|
|
|
|
fn strip_attrs_and_docs_inner(mut node: SyntaxNode) -> SyntaxNode {
|
|
|
|
while let Some(start) =
|
|
|
|
node.children_with_tokens().find(|it| it.kind() == ATTR || it.kind() == COMMENT)
|
|
|
|
{
|
|
|
|
let end = match &start.next_sibling_or_token() {
|
|
|
|
Some(el) if el.kind() == WHITESPACE => el.clone(),
|
|
|
|
Some(_) | None => start.clone(),
|
|
|
|
};
|
2020-01-15 10:54:25 -06:00
|
|
|
node = algo::replace_children(&node, start..=end, &mut iter::empty());
|
2019-09-28 11:50:16 -05:00
|
|
|
}
|
|
|
|
node
|
|
|
|
}
|
|
|
|
|
2019-09-30 02:08:28 -05:00
|
|
|
#[must_use]
|
2019-09-30 02:05:12 -05:00
|
|
|
pub fn replace_descendants<N: AstNode, D: AstNode>(
|
|
|
|
parent: &N,
|
2020-01-15 11:30:23 -06:00
|
|
|
replacement_map: impl IntoIterator<Item = (D, D)>,
|
2019-09-30 02:05:12 -05:00
|
|
|
) -> N {
|
|
|
|
let map = replacement_map
|
2020-01-15 11:30:23 -06:00
|
|
|
.into_iter()
|
2019-09-30 02:05:12 -05:00
|
|
|
.map(|(from, to)| (from.syntax().clone().into(), to.syntax().clone().into()))
|
2020-01-10 11:26:18 -06:00
|
|
|
.collect::<FxHashMap<SyntaxElement, _>>();
|
|
|
|
let new_syntax = algo::replace_descendants(parent.syntax(), &|n| map.get(n).cloned());
|
2019-09-30 02:05:12 -05:00
|
|
|
N::cast(new_syntax).unwrap()
|
|
|
|
}
|
|
|
|
|
2019-10-12 14:07:47 -05:00
|
|
|
#[derive(Debug, Clone, Copy)]
|
|
|
|
pub struct IndentLevel(pub u8);
|
|
|
|
|
|
|
|
impl From<u8> for IndentLevel {
|
|
|
|
fn from(level: u8) -> IndentLevel {
|
|
|
|
IndentLevel(level)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl IndentLevel {
|
|
|
|
pub fn from_node(node: &SyntaxNode) -> IndentLevel {
|
|
|
|
let first_token = match node.first_token() {
|
|
|
|
Some(it) => it,
|
|
|
|
None => return IndentLevel(0),
|
|
|
|
};
|
|
|
|
for ws in prev_tokens(first_token).filter_map(ast::Whitespace::cast) {
|
|
|
|
let text = ws.syntax().text();
|
|
|
|
if let Some(pos) = text.rfind('\n') {
|
|
|
|
let level = text[pos + 1..].chars().count() / 4;
|
|
|
|
return IndentLevel(level as u8);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
IndentLevel(0)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn increase_indent<N: AstNode>(self, node: N) -> N {
|
|
|
|
N::cast(self._increase_indent(node.syntax().clone())).unwrap()
|
|
|
|
}
|
|
|
|
|
|
|
|
fn _increase_indent(self, node: SyntaxNode) -> SyntaxNode {
|
|
|
|
let replacements: FxHashMap<SyntaxElement, SyntaxElement> = node
|
|
|
|
.descendants_with_tokens()
|
|
|
|
.filter_map(|el| el.into_token())
|
|
|
|
.filter_map(ast::Whitespace::cast)
|
|
|
|
.filter(|ws| {
|
|
|
|
let text = ws.syntax().text();
|
|
|
|
text.contains('\n')
|
|
|
|
})
|
|
|
|
.map(|ws| {
|
|
|
|
(
|
|
|
|
ws.syntax().clone().into(),
|
|
|
|
make::tokens::whitespace(&format!(
|
|
|
|
"{}{:width$}",
|
|
|
|
ws.syntax().text(),
|
|
|
|
"",
|
|
|
|
width = self.0 as usize * 4
|
|
|
|
))
|
|
|
|
.into(),
|
|
|
|
)
|
|
|
|
})
|
|
|
|
.collect();
|
2020-01-10 11:26:18 -06:00
|
|
|
algo::replace_descendants(&node, &|n| replacements.get(n).cloned())
|
2019-10-12 14:07:47 -05:00
|
|
|
}
|
2019-10-20 13:00:09 -05:00
|
|
|
|
|
|
|
pub fn decrease_indent<N: AstNode>(self, node: N) -> N {
|
|
|
|
N::cast(self._decrease_indent(node.syntax().clone())).unwrap()
|
|
|
|
}
|
|
|
|
|
|
|
|
fn _decrease_indent(self, node: SyntaxNode) -> SyntaxNode {
|
|
|
|
let replacements: FxHashMap<SyntaxElement, SyntaxElement> = node
|
|
|
|
.descendants_with_tokens()
|
|
|
|
.filter_map(|el| el.into_token())
|
|
|
|
.filter_map(ast::Whitespace::cast)
|
|
|
|
.filter(|ws| {
|
|
|
|
let text = ws.syntax().text();
|
|
|
|
text.contains('\n')
|
|
|
|
})
|
|
|
|
.map(|ws| {
|
|
|
|
(
|
|
|
|
ws.syntax().clone().into(),
|
|
|
|
make::tokens::whitespace(
|
|
|
|
&ws.syntax()
|
|
|
|
.text()
|
|
|
|
.replace(&format!("\n{:1$}", "", self.0 as usize * 4), "\n"),
|
|
|
|
)
|
|
|
|
.into(),
|
|
|
|
)
|
|
|
|
})
|
|
|
|
.collect();
|
2020-01-10 11:26:18 -06:00
|
|
|
algo::replace_descendants(&node, &|n| replacements.get(n).cloned())
|
2019-10-20 13:00:09 -05:00
|
|
|
}
|
2019-10-12 14:07:47 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// FIXME: replace usages with IndentLevel above
|
2019-09-28 12:09:57 -05:00
|
|
|
fn leading_indent(node: &SyntaxNode) -> Option<SmolStr> {
|
2019-10-12 14:07:47 -05:00
|
|
|
for token in prev_tokens(node.first_token()?) {
|
2019-09-28 12:09:57 -05:00
|
|
|
if let Some(ws) = ast::Whitespace::cast(token.clone()) {
|
|
|
|
let ws_text = ws.text();
|
|
|
|
if let Some(pos) = ws_text.rfind('\n') {
|
|
|
|
return Some(ws_text[pos + 1..].into());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if token.text().contains('\n') {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
None
|
|
|
|
}
|
|
|
|
|
2019-10-12 14:07:47 -05:00
|
|
|
fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> {
|
|
|
|
iter::successors(Some(token), |token| token.prev_token())
|
|
|
|
}
|
|
|
|
|
2019-09-26 14:08:44 -05:00
|
|
|
#[must_use]
|
|
|
|
fn insert_children<N: AstNode>(
|
|
|
|
parent: &N,
|
|
|
|
position: InsertPosition<SyntaxElement>,
|
2020-01-15 11:30:23 -06:00
|
|
|
to_insert: impl IntoIterator<Item = SyntaxElement>,
|
2019-09-26 14:08:44 -05:00
|
|
|
) -> N {
|
2020-01-15 11:30:23 -06:00
|
|
|
let new_syntax = algo::insert_children(parent.syntax(), position, &mut to_insert.into_iter());
|
2019-09-26 14:08:44 -05:00
|
|
|
N::cast(new_syntax).unwrap()
|
|
|
|
}
|
|
|
|
|
2020-01-15 10:56:25 -06:00
|
|
|
fn single_node(element: impl Into<SyntaxElement>) -> RangeInclusive<SyntaxElement> {
|
|
|
|
let element = element.into();
|
|
|
|
element.clone()..=element
|
|
|
|
}
|
|
|
|
|
2019-09-26 14:08:44 -05:00
|
|
|
#[must_use]
|
|
|
|
fn replace_children<N: AstNode>(
|
|
|
|
parent: &N,
|
|
|
|
to_replace: RangeInclusive<SyntaxElement>,
|
2020-01-15 11:30:23 -06:00
|
|
|
to_insert: impl IntoIterator<Item = SyntaxElement>,
|
2019-09-26 14:08:44 -05:00
|
|
|
) -> N {
|
2020-01-15 11:30:23 -06:00
|
|
|
let new_syntax =
|
|
|
|
algo::replace_children(parent.syntax(), to_replace, &mut to_insert.into_iter());
|
2019-09-26 14:08:44 -05:00
|
|
|
N::cast(new_syntax).unwrap()
|
|
|
|
}
|
2019-10-12 14:07:47 -05:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_increase_indent() {
|
|
|
|
let arm_list = {
|
|
|
|
let arm = make::match_arm(iter::once(make::placeholder_pat().into()), make::expr_unit());
|
2019-11-13 02:55:43 -06:00
|
|
|
make::match_arm_list(vec![arm.clone(), arm])
|
2019-10-12 14:07:47 -05:00
|
|
|
};
|
|
|
|
assert_eq!(
|
|
|
|
arm_list.syntax().to_string(),
|
|
|
|
"{
|
|
|
|
_ => (),
|
|
|
|
_ => (),
|
|
|
|
}"
|
|
|
|
);
|
|
|
|
let indented = IndentLevel(2).increase_indent(arm_list);
|
|
|
|
assert_eq!(
|
|
|
|
indented.syntax().to_string(),
|
|
|
|
"{
|
|
|
|
_ => (),
|
|
|
|
_ => (),
|
|
|
|
}"
|
|
|
|
);
|
|
|
|
}
|