Merge branch 'master' into add-type-lenses
This commit is contained in:
commit
31aef808d9
22
Cargo.lock
generated
22
Cargo.lock
generated
@ -263,11 +263,6 @@ dependencies = [
|
||||
"bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "colosseum"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "console"
|
||||
version = "0.7.7"
|
||||
@ -484,6 +479,11 @@ dependencies = [
|
||||
"yansi 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "format-buf"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "fs_extra"
|
||||
version = "1.1.0"
|
||||
@ -1124,6 +1124,7 @@ name = "ra_assists"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"arrayvec 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"format-buf 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"once_cell 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -1211,6 +1212,7 @@ dependencies = [
|
||||
name = "ra_ide_api"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"format-buf 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"fst 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"insta 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -1313,7 +1315,7 @@ dependencies = [
|
||||
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ra_parser 0.1.0",
|
||||
"ra_text_edit 0.1.0",
|
||||
"rowan 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rowan 0.6.0-pre.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"smol_str 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"test_utils 0.1.0",
|
||||
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -1584,11 +1586,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rowan"
|
||||
version = "0.5.6"
|
||||
version = "0.6.0-pre.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"colosseum 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"smol_str 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"text_unit 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -2150,7 +2150,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
"checksum clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5067f5bb2d80ef5d68b4c87db81601f0b75bca627bc2ef76b141d7b846a3c6d9"
|
||||
"checksum clicolors-control 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "73abfd4c73d003a674ce5d2933fca6ce6c42480ea84a5ffe0a2dc39ed56300f9"
|
||||
"checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
|
||||
"checksum colosseum 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "370c83b49aedf022ee27942e8ae1d9de1cf40dc9653ee6550e4455d08f6406f9"
|
||||
"checksum console 0.7.7 (registry+https://github.com/rust-lang/crates.io-index)" = "8ca57c2c14b8a2bf3105bc9d15574aad80babf6a9c44b1058034cdf8bd169628"
|
||||
"checksum cpuprofiler 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "33f07976bb6821459632d7a18d97ccca005cb5c552f251f822c7c1781c1d7035"
|
||||
"checksum crossbeam-channel 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "0f0ed1a4de2235cabda8558ff5840bffb97fcb64c97827f354a451307df5f72b"
|
||||
@ -2177,6 +2176,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
"checksum filetime 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "2f8c63033fcba1f51ef744505b3cad42510432b904c062afa67ad7ece008429d"
|
||||
"checksum fixedbitset 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "86d4de0081402f5e88cdac65c8dcdcc73118c1a7a465e2a05f0da05843a8ea33"
|
||||
"checksum flexi_logger 0.13.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e9d3c4470d1ff8446baa0c13202646722886dde8dc4c5d33cb8242d70ece79d5"
|
||||
"checksum format-buf 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f7aea5a5909a74969507051a3b17adc84737e31a5f910559892aedce026f4d53"
|
||||
"checksum fs_extra 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5f2a4a2034423744d2cc7ca2068453168dcdb82c438419e639a26bd87839c674"
|
||||
"checksum fsevent 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5ab7d1bd1bd33cc98b0889831b72da23c0aa4df9cec7e0702f46ecea04b35db6"
|
||||
"checksum fsevent-sys 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f41b048a94555da0f42f1d632e2e19510084fb8e303b0daa2816e733fb3644a0"
|
||||
@ -2275,7 +2275,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
"checksum relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0e7790c7f1cc73d831d28dc5a7deb316a006e7848e6a7f467cdb10a0a9e0fb1c"
|
||||
"checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e"
|
||||
"checksum ron 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "17f52a24414403f81528b67488cf8edc4eda977d3af1646bb6b106a600ead78f"
|
||||
"checksum rowan 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0c433ffe99ac9b96fa9882805d05eee5d750c9202fb42d0546c556e5d70d54be"
|
||||
"checksum rowan 0.6.0-pre.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0eeee40f1a2724b7d0d9fa5f73a7804cd2f4c91b37ba9f785d429f31819d60df"
|
||||
"checksum rustc-demangle 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "a7f4dccf6f4891ebcc0c39f9b6eb1a83b9bf5d747cb439ec6fba4f3b977038af"
|
||||
"checksum rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7540fc8b0c49f096ee9c961cda096467dce8084bec6bdca2fc83895fd9b28cb8"
|
||||
"checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
|
||||
|
@ -5,6 +5,7 @@ version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
|
||||
[dependencies]
|
||||
format-buf = "1.0.0"
|
||||
once_cell = "0.2.0"
|
||||
join_to_string = "0.1.3"
|
||||
itertools = "0.8.0"
|
||||
|
@ -1,5 +1,4 @@
|
||||
use std::fmt::Write;
|
||||
|
||||
use format_buf::format;
|
||||
use hir::db::HirDatabase;
|
||||
use join_to_string::join;
|
||||
use ra_syntax::{
|
||||
@ -19,7 +18,7 @@ pub(crate) fn add_impl(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
||||
let mut buf = String::new();
|
||||
buf.push_str("\n\nimpl");
|
||||
if let Some(type_params) = &type_params {
|
||||
write!(buf, "{}", type_params.syntax()).unwrap();
|
||||
format!(buf, "{}", type_params.syntax());
|
||||
}
|
||||
buf.push_str(" ");
|
||||
buf.push_str(name.text().as_str());
|
||||
|
@ -2,8 +2,9 @@ use hir::db::HirDatabase;
|
||||
use ra_db::FileRange;
|
||||
use ra_fmt::{leading_indent, reindent};
|
||||
use ra_syntax::{
|
||||
algo::{find_covering_element, find_node_at_offset, find_token_at_offset, TokenAtOffset},
|
||||
algo::{find_covering_element, find_node_at_offset},
|
||||
AstNode, SourceFile, SyntaxElement, SyntaxNode, SyntaxToken, TextRange, TextUnit,
|
||||
TokenAtOffset,
|
||||
};
|
||||
use ra_text_edit::TextEditBuilder;
|
||||
|
||||
@ -105,7 +106,7 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> {
|
||||
}
|
||||
|
||||
pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken> {
|
||||
find_token_at_offset(self.source_file.syntax(), self.frange.range.start())
|
||||
self.source_file.syntax().token_at_offset(self.frange.range.start())
|
||||
}
|
||||
|
||||
pub(crate) fn node_at_offset<N: AstNode>(&self) -> Option<N> {
|
||||
|
@ -4,7 +4,10 @@ use arrayvec::ArrayVec;
|
||||
use hir::Name;
|
||||
use ra_fmt::leading_indent;
|
||||
use ra_syntax::{
|
||||
ast, AstNode, Direction, InsertPosition, SourceFile, SyntaxElement, SyntaxKind::*, T,
|
||||
algo::{insert_children, replace_children},
|
||||
ast, AstNode, Direction, InsertPosition, SourceFile, SyntaxElement,
|
||||
SyntaxKind::*,
|
||||
T,
|
||||
};
|
||||
use ra_text_edit::TextEditBuilder;
|
||||
|
||||
@ -38,7 +41,7 @@ impl<N: AstNode> AstEditor<N> {
|
||||
position: InsertPosition<SyntaxElement>,
|
||||
to_insert: impl Iterator<Item = SyntaxElement>,
|
||||
) -> N {
|
||||
let new_syntax = self.ast().syntax().insert_children(position, to_insert);
|
||||
let new_syntax = insert_children(self.ast().syntax(), position, to_insert);
|
||||
N::cast(new_syntax).unwrap()
|
||||
}
|
||||
|
||||
@ -48,7 +51,7 @@ impl<N: AstNode> AstEditor<N> {
|
||||
to_delete: RangeInclusive<SyntaxElement>,
|
||||
to_insert: impl Iterator<Item = SyntaxElement>,
|
||||
) -> N {
|
||||
let new_syntax = self.ast().syntax().replace_children(to_delete, to_insert);
|
||||
let new_syntax = replace_children(self.ast().syntax(), to_delete, to_insert);
|
||||
N::cast(new_syntax).unwrap()
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,4 @@
|
||||
use std::fmt::Write;
|
||||
|
||||
use format_buf::format;
|
||||
use hir::db::HirDatabase;
|
||||
use ra_syntax::{
|
||||
ast::{self, AstNode},
|
||||
@ -37,7 +36,7 @@ pub(crate) fn introduce_variable(mut ctx: AssistCtx<impl HirDatabase>) -> Option
|
||||
buf.push_str("let var_name = ");
|
||||
TextUnit::of_str("let ")
|
||||
};
|
||||
write!(buf, "{}", expr.syntax()).unwrap();
|
||||
format!(buf, "{}", expr.syntax());
|
||||
let full_stmt = ast::ExprStmt::cast(anchor_stmt.clone());
|
||||
let is_full_stmt = if let Some(expr_stmt) = &full_stmt {
|
||||
Some(expr.syntax().clone()) == expr_stmt.expr().map(|e| e.syntax().clone())
|
||||
|
@ -2,7 +2,7 @@ use hir::db::HirDatabase;
|
||||
use ra_syntax::{
|
||||
ast,
|
||||
ast::{AstNode, AstToken, IfExpr, MatchArm},
|
||||
SyntaxElement, TextUnit,
|
||||
TextUnit,
|
||||
};
|
||||
|
||||
use crate::{Assist, AssistCtx, AssistId};
|
||||
@ -18,10 +18,10 @@ pub(crate) fn move_guard_to_arm_body(mut ctx: AssistCtx<impl HirDatabase>) -> Op
|
||||
|
||||
ctx.add_action(AssistId("move_guard_to_arm_body"), "move guard to arm body", |edit| {
|
||||
edit.target(guard.syntax().text_range());
|
||||
let offseting_amount = match &space_before_guard {
|
||||
Some(SyntaxElement::Token(tok)) => {
|
||||
let offseting_amount = match space_before_guard.and_then(|it| it.into_token()) {
|
||||
Some(tok) => {
|
||||
if let Some(_) = ast::Whitespace::cast(tok.clone()) {
|
||||
let ele = space_before_guard.unwrap().text_range();
|
||||
let ele = tok.text_range();
|
||||
edit.delete(ele);
|
||||
ele.len()
|
||||
} else {
|
||||
|
@ -266,9 +266,14 @@ impl SourceAnalyzer {
|
||||
self.infer.as_ref()?.field_resolution(expr_id)
|
||||
}
|
||||
|
||||
pub fn resolve_variant(&self, struct_lit: &ast::StructLit) -> Option<crate::VariantDef> {
|
||||
pub fn resolve_struct_literal(&self, struct_lit: &ast::StructLit) -> Option<crate::VariantDef> {
|
||||
let expr_id = self.body_source_map.as_ref()?.node_expr(&struct_lit.clone().into())?;
|
||||
self.infer.as_ref()?.variant_resolution(expr_id)
|
||||
self.infer.as_ref()?.variant_resolution_for_expr(expr_id)
|
||||
}
|
||||
|
||||
pub fn resolve_struct_pattern(&self, struct_pat: &ast::StructPat) -> Option<crate::VariantDef> {
|
||||
let pat_id = self.body_source_map.as_ref()?.node_pat(&struct_pat.clone().into())?;
|
||||
self.infer.as_ref()?.variant_resolution_for_pat(pat_id)
|
||||
}
|
||||
|
||||
pub fn resolve_macro_call(
|
||||
|
@ -472,7 +472,7 @@ impl Ty {
|
||||
|
||||
/// Returns the type parameters of this type if it has some (i.e. is an ADT
|
||||
/// or function); so if `self` is `Option<u32>`, this returns the `u32`.
|
||||
fn substs(&self) -> Option<Substs> {
|
||||
pub fn substs(&self) -> Option<Substs> {
|
||||
match self {
|
||||
Ty::Apply(ApplicationTy { parameters, .. }) => Some(parameters.clone()),
|
||||
_ => None,
|
||||
|
@ -113,7 +113,8 @@ pub struct InferenceResult {
|
||||
method_resolutions: FxHashMap<ExprId, Function>,
|
||||
/// For each field access expr, records the field it resolves to.
|
||||
field_resolutions: FxHashMap<ExprId, StructField>,
|
||||
variant_resolutions: FxHashMap<ExprId, VariantDef>,
|
||||
/// For each struct literal, records the variant it resolves to.
|
||||
variant_resolutions: FxHashMap<ExprOrPatId, VariantDef>,
|
||||
/// For each associated item record what it resolves to
|
||||
assoc_resolutions: FxHashMap<ExprOrPatId, ImplItem>,
|
||||
diagnostics: Vec<InferenceDiagnostic>,
|
||||
@ -128,8 +129,11 @@ impl InferenceResult {
|
||||
pub fn field_resolution(&self, expr: ExprId) -> Option<StructField> {
|
||||
self.field_resolutions.get(&expr).copied()
|
||||
}
|
||||
pub fn variant_resolution(&self, expr: ExprId) -> Option<VariantDef> {
|
||||
self.variant_resolutions.get(&expr).copied()
|
||||
pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option<VariantDef> {
|
||||
self.variant_resolutions.get(&id.into()).copied()
|
||||
}
|
||||
pub fn variant_resolution_for_pat(&self, id: PatId) -> Option<VariantDef> {
|
||||
self.variant_resolutions.get(&id.into()).copied()
|
||||
}
|
||||
pub fn assoc_resolutions_for_expr(&self, id: ExprId) -> Option<ImplItem> {
|
||||
self.assoc_resolutions.get(&id.into()).copied()
|
||||
@ -218,8 +222,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||
self.result.field_resolutions.insert(expr, field);
|
||||
}
|
||||
|
||||
fn write_variant_resolution(&mut self, expr: ExprId, variant: VariantDef) {
|
||||
self.result.variant_resolutions.insert(expr, variant);
|
||||
fn write_variant_resolution(&mut self, id: ExprOrPatId, variant: VariantDef) {
|
||||
self.result.variant_resolutions.insert(id, variant);
|
||||
}
|
||||
|
||||
fn write_assoc_resolution(&mut self, id: ExprOrPatId, item: ImplItem) {
|
||||
@ -678,8 +682,12 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||
subpats: &[FieldPat],
|
||||
expected: &Ty,
|
||||
default_bm: BindingMode,
|
||||
id: PatId,
|
||||
) -> Ty {
|
||||
let (ty, def) = self.resolve_variant(path);
|
||||
if let Some(variant) = def {
|
||||
self.write_variant_resolution(id.into(), variant);
|
||||
}
|
||||
|
||||
self.unify(&ty, expected);
|
||||
|
||||
@ -762,7 +770,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||
self.infer_tuple_struct_pat(p.as_ref(), subpats, expected, default_bm)
|
||||
}
|
||||
Pat::Struct { path: ref p, args: ref fields } => {
|
||||
self.infer_struct_pat(p.as_ref(), fields, expected, default_bm)
|
||||
self.infer_struct_pat(p.as_ref(), fields, expected, default_bm, pat)
|
||||
}
|
||||
Pat::Path(path) => {
|
||||
// FIXME use correct resolver for the surrounding expression
|
||||
@ -1064,7 +1072,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||
Expr::StructLit { path, fields, spread } => {
|
||||
let (ty, def_id) = self.resolve_variant(path.as_ref());
|
||||
if let Some(variant) = def_id {
|
||||
self.write_variant_resolution(tgt_expr, variant);
|
||||
self.write_variant_resolution(tgt_expr.into(), variant);
|
||||
}
|
||||
|
||||
let substs = ty.substs().unwrap_or_else(Substs::empty);
|
||||
|
@ -3211,8 +3211,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
|
||||
);
|
||||
{
|
||||
let file = db.parse(pos.file_id).ok().unwrap();
|
||||
let node =
|
||||
algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent();
|
||||
let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent();
|
||||
let events = db.log_executed(|| {
|
||||
SourceAnalyzer::new(&db, pos.file_id, &node, None);
|
||||
});
|
||||
@ -3232,8 +3231,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
|
||||
|
||||
{
|
||||
let file = db.parse(pos.file_id).ok().unwrap();
|
||||
let node =
|
||||
algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent();
|
||||
let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent();
|
||||
let events = db.log_executed(|| {
|
||||
SourceAnalyzer::new(&db, pos.file_id, &node, None);
|
||||
});
|
||||
|
@ -5,6 +5,7 @@ version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
|
||||
[dependencies]
|
||||
format-buf = "1.0.0"
|
||||
itertools = "0.8.0"
|
||||
join_to_string = "0.1.3"
|
||||
log = "0.4.5"
|
||||
|
@ -4,6 +4,7 @@ mod presentation;
|
||||
|
||||
mod complete_dot;
|
||||
mod complete_struct_literal;
|
||||
mod complete_struct_pattern;
|
||||
mod complete_pattern;
|
||||
mod complete_fn_param;
|
||||
mod complete_keyword;
|
||||
@ -65,6 +66,7 @@ pub(crate) fn completions(db: &db::RootDatabase, position: FilePosition) -> Opti
|
||||
complete_scope::complete_scope(&mut acc, &ctx);
|
||||
complete_dot::complete_dot(&mut acc, &ctx);
|
||||
complete_struct_literal::complete_struct_literal(&mut acc, &ctx);
|
||||
complete_struct_pattern::complete_struct_pattern(&mut acc, &ctx);
|
||||
complete_pattern::complete_pattern(&mut acc, &ctx);
|
||||
complete_postfix::complete_postfix(&mut acc, &ctx);
|
||||
Some(acc)
|
||||
|
@ -1,23 +1,22 @@
|
||||
use hir::{Substs, Ty};
|
||||
use hir::Substs;
|
||||
|
||||
use crate::completion::{CompletionContext, Completions};
|
||||
|
||||
/// Complete fields in fields literals.
|
||||
pub(super) fn complete_struct_literal(acc: &mut Completions, ctx: &CompletionContext) {
|
||||
let (ty, variant) = match ctx.struct_lit_syntax.as_ref().and_then(|it| {
|
||||
Some((ctx.analyzer.type_of(ctx.db, &it.clone().into())?, ctx.analyzer.resolve_variant(it)?))
|
||||
Some((
|
||||
ctx.analyzer.type_of(ctx.db, &it.clone().into())?,
|
||||
ctx.analyzer.resolve_struct_literal(it)?,
|
||||
))
|
||||
}) {
|
||||
Some(it) => it,
|
||||
_ => return,
|
||||
};
|
||||
|
||||
let ty_substs = match ty {
|
||||
Ty::Apply(it) => it.parameters,
|
||||
_ => Substs::empty(),
|
||||
};
|
||||
let substs = &ty.substs().unwrap_or_else(Substs::empty);
|
||||
|
||||
for field in variant.fields(ctx.db) {
|
||||
acc.add_field(ctx, field, &ty_substs);
|
||||
acc.add_field(ctx, field, substs);
|
||||
}
|
||||
}
|
||||
|
||||
|
94
crates/ra_ide_api/src/completion/complete_struct_pattern.rs
Normal file
94
crates/ra_ide_api/src/completion/complete_struct_pattern.rs
Normal file
@ -0,0 +1,94 @@
|
||||
use hir::Substs;
|
||||
|
||||
use crate::completion::{CompletionContext, Completions};
|
||||
|
||||
pub(super) fn complete_struct_pattern(acc: &mut Completions, ctx: &CompletionContext) {
|
||||
let (ty, variant) = match ctx.struct_lit_pat.as_ref().and_then(|it| {
|
||||
Some((
|
||||
ctx.analyzer.type_of_pat(ctx.db, &it.clone().into())?,
|
||||
ctx.analyzer.resolve_struct_pattern(it)?,
|
||||
))
|
||||
}) {
|
||||
Some(it) => it,
|
||||
_ => return,
|
||||
};
|
||||
let substs = &ty.substs().unwrap_or_else(Substs::empty);
|
||||
|
||||
for field in variant.fields(ctx.db) {
|
||||
acc.add_field(ctx, field, substs);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::completion::{do_completion, CompletionItem, CompletionKind};
|
||||
use insta::assert_debug_snapshot_matches;
|
||||
|
||||
fn complete(code: &str) -> Vec<CompletionItem> {
|
||||
do_completion(code, CompletionKind::Reference)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_struct_pattern_field() {
|
||||
let completions = complete(
|
||||
r"
|
||||
struct S { foo: u32 }
|
||||
|
||||
fn process(f: S) {
|
||||
match f {
|
||||
S { f<|>: 92 } => (),
|
||||
}
|
||||
}
|
||||
",
|
||||
);
|
||||
assert_debug_snapshot_matches!(completions, @r###"
|
||||
⋮[
|
||||
⋮ CompletionItem {
|
||||
⋮ label: "foo",
|
||||
⋮ source_range: [117; 118),
|
||||
⋮ delete: [117; 118),
|
||||
⋮ insert: "foo",
|
||||
⋮ kind: Field,
|
||||
⋮ detail: "u32",
|
||||
⋮ },
|
||||
⋮]
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_struct_pattern_enum_variant() {
|
||||
let completions = complete(
|
||||
r"
|
||||
enum E {
|
||||
S { foo: u32, bar: () }
|
||||
}
|
||||
|
||||
fn process(e: E) {
|
||||
match e {
|
||||
E::S { <|> } => (),
|
||||
}
|
||||
}
|
||||
",
|
||||
);
|
||||
assert_debug_snapshot_matches!(completions, @r###"
|
||||
⋮[
|
||||
⋮ CompletionItem {
|
||||
⋮ label: "bar",
|
||||
⋮ source_range: [161; 161),
|
||||
⋮ delete: [161; 161),
|
||||
⋮ insert: "bar",
|
||||
⋮ kind: Field,
|
||||
⋮ detail: "()",
|
||||
⋮ },
|
||||
⋮ CompletionItem {
|
||||
⋮ label: "foo",
|
||||
⋮ source_range: [161; 161),
|
||||
⋮ delete: [161; 161),
|
||||
⋮ insert: "foo",
|
||||
⋮ kind: Field,
|
||||
⋮ detail: "u32",
|
||||
⋮ },
|
||||
⋮]
|
||||
"###);
|
||||
}
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
use hir::source_binder;
|
||||
use ra_syntax::{
|
||||
algo::{find_covering_element, find_node_at_offset, find_token_at_offset},
|
||||
algo::{find_covering_element, find_node_at_offset},
|
||||
ast, AstNode, Parse, SourceFile,
|
||||
SyntaxKind::*,
|
||||
SyntaxNode, SyntaxToken, TextRange, TextUnit,
|
||||
@ -21,6 +21,7 @@ pub(crate) struct CompletionContext<'a> {
|
||||
pub(super) function_syntax: Option<ast::FnDef>,
|
||||
pub(super) use_item_syntax: Option<ast::UseItem>,
|
||||
pub(super) struct_lit_syntax: Option<ast::StructLit>,
|
||||
pub(super) struct_lit_pat: Option<ast::StructPat>,
|
||||
pub(super) is_param: bool,
|
||||
/// If a name-binding or reference to a const in a pattern.
|
||||
/// Irrefutable patterns (like let) are excluded.
|
||||
@ -48,7 +49,7 @@ impl<'a> CompletionContext<'a> {
|
||||
) -> Option<CompletionContext<'a>> {
|
||||
let module = source_binder::module_from_position(db, position);
|
||||
let token =
|
||||
find_token_at_offset(original_parse.tree().syntax(), position.offset).left_biased()?;
|
||||
original_parse.tree().syntax().token_at_offset(position.offset).left_biased()?;
|
||||
let analyzer =
|
||||
hir::SourceAnalyzer::new(db, position.file_id, &token.parent(), Some(position.offset));
|
||||
let mut ctx = CompletionContext {
|
||||
@ -60,6 +61,7 @@ impl<'a> CompletionContext<'a> {
|
||||
function_syntax: None,
|
||||
use_item_syntax: None,
|
||||
struct_lit_syntax: None,
|
||||
struct_lit_pat: None,
|
||||
is_param: false,
|
||||
is_pat_binding: false,
|
||||
is_trivial_path: false,
|
||||
@ -106,8 +108,7 @@ impl<'a> CompletionContext<'a> {
|
||||
// Otherwise, see if this is a declaration. We can use heuristics to
|
||||
// suggest declaration names, see `CompletionKind::Magic`.
|
||||
if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), offset) {
|
||||
if is_node::<ast::BindPat>(name.syntax()) {
|
||||
let bind_pat = name.syntax().ancestors().find_map(ast::BindPat::cast).unwrap();
|
||||
if let Some(bind_pat) = name.syntax().ancestors().find_map(ast::BindPat::cast) {
|
||||
let parent = bind_pat.syntax().parent();
|
||||
if parent.clone().and_then(ast::MatchArm::cast).is_some()
|
||||
|| parent.and_then(ast::Condition::cast).is_some()
|
||||
@ -119,6 +120,10 @@ impl<'a> CompletionContext<'a> {
|
||||
self.is_param = true;
|
||||
return;
|
||||
}
|
||||
if name.syntax().ancestors().find_map(ast::FieldPatList::cast).is_some() {
|
||||
self.struct_lit_pat =
|
||||
find_node_at_offset(original_parse.tree().syntax(), self.offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -235,7 +240,7 @@ fn find_node_with_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Op
|
||||
}
|
||||
|
||||
fn is_node<N: AstNode>(node: &SyntaxNode) -> bool {
|
||||
match node.ancestors().filter_map(N::cast).next() {
|
||||
match node.ancestors().find_map(N::cast) {
|
||||
None => false,
|
||||
Some(n) => n.syntax().text_range() == node.text_range(),
|
||||
}
|
||||
|
@ -1,5 +1,4 @@
|
||||
use std::fmt::Write;
|
||||
|
||||
use format_buf::format;
|
||||
use ra_syntax::ast::{self, AstNode, NameOwner, TypeAscriptionOwner, VisibilityOwner};
|
||||
|
||||
pub(crate) trait ShortLabel {
|
||||
@ -73,7 +72,7 @@ where
|
||||
let mut buf = short_label_from_node(node, prefix)?;
|
||||
|
||||
if let Some(type_ref) = node.ascribed_type() {
|
||||
write!(buf, ": {}", type_ref.syntax()).unwrap();
|
||||
format!(buf, ": {}", type_ref.syntax());
|
||||
}
|
||||
|
||||
Some(buf)
|
||||
|
@ -1,10 +1,10 @@
|
||||
use ra_db::SourceDatabase;
|
||||
use ra_syntax::{
|
||||
algo::{find_covering_element, find_token_at_offset, TokenAtOffset},
|
||||
algo::find_covering_element,
|
||||
ast::{self, AstNode, AstToken},
|
||||
Direction, SyntaxElement,
|
||||
Direction, NodeOrToken,
|
||||
SyntaxKind::*,
|
||||
SyntaxNode, SyntaxToken, TextRange, TextUnit, T,
|
||||
SyntaxNode, SyntaxToken, TextRange, TextUnit, TokenAtOffset, T,
|
||||
};
|
||||
|
||||
use crate::{db::RootDatabase, FileRange};
|
||||
@ -34,7 +34,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
|
||||
|
||||
if range.is_empty() {
|
||||
let offset = range.start();
|
||||
let mut leaves = find_token_at_offset(root, offset);
|
||||
let mut leaves = root.token_at_offset(offset);
|
||||
if leaves.clone().all(|it| it.kind() == WHITESPACE) {
|
||||
return Some(extend_ws(root, leaves.next()?, offset));
|
||||
}
|
||||
@ -53,7 +53,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
|
||||
return Some(leaf_range);
|
||||
};
|
||||
let node = match find_covering_element(root, range) {
|
||||
SyntaxElement::Token(token) => {
|
||||
NodeOrToken::Token(token) => {
|
||||
if token.text_range() != range {
|
||||
return Some(token.text_range());
|
||||
}
|
||||
@ -64,7 +64,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
|
||||
}
|
||||
token.parent()
|
||||
}
|
||||
SyntaxElement::Node(node) => node,
|
||||
NodeOrToken::Node(node) => node,
|
||||
};
|
||||
if node.text_range() != range {
|
||||
return Some(node.text_range());
|
||||
@ -153,8 +153,8 @@ fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
|
||||
node.siblings_with_tokens(dir)
|
||||
.skip(1)
|
||||
.skip_while(|node| match node {
|
||||
SyntaxElement::Node(_) => false,
|
||||
SyntaxElement::Token(it) => is_single_line_ws(it),
|
||||
NodeOrToken::Node(_) => false,
|
||||
NodeOrToken::Token(it) => is_single_line_ws(it),
|
||||
})
|
||||
.next()
|
||||
.and_then(|it| it.into_token())
|
||||
|
@ -2,7 +2,7 @@ use rustc_hash::FxHashSet;
|
||||
|
||||
use ra_syntax::{
|
||||
ast::{self, AstNode, AstToken, VisibilityOwner},
|
||||
Direction, SourceFile, SyntaxElement,
|
||||
Direction, NodeOrToken, SourceFile,
|
||||
SyntaxKind::{self, *},
|
||||
SyntaxNode, TextRange,
|
||||
};
|
||||
@ -31,8 +31,8 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
|
||||
// Fold items that span multiple lines
|
||||
if let Some(kind) = fold_kind(element.kind()) {
|
||||
let is_multiline = match &element {
|
||||
SyntaxElement::Node(node) => node.text().contains_char('\n'),
|
||||
SyntaxElement::Token(token) => token.text().contains('\n'),
|
||||
NodeOrToken::Node(node) => node.text().contains_char('\n'),
|
||||
NodeOrToken::Token(token) => token.text().contains('\n'),
|
||||
};
|
||||
if is_multiline {
|
||||
res.push(Fold { range: element.text_range(), kind });
|
||||
@ -41,7 +41,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
|
||||
}
|
||||
|
||||
match element {
|
||||
SyntaxElement::Token(token) => {
|
||||
NodeOrToken::Token(token) => {
|
||||
// Fold groups of comments
|
||||
if let Some(comment) = ast::Comment::cast(token) {
|
||||
if !visited_comments.contains(&comment) {
|
||||
@ -53,7 +53,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
|
||||
}
|
||||
}
|
||||
}
|
||||
SyntaxElement::Node(node) => {
|
||||
NodeOrToken::Node(node) => {
|
||||
// Fold groups of imports
|
||||
if node.kind() == USE_ITEM && !visited_imports.contains(&node) {
|
||||
if let Some(range) = contiguous_range_for_group(&node, &mut visited_imports) {
|
||||
@ -108,7 +108,7 @@ fn contiguous_range_for_group_unless(
|
||||
let mut last = first.clone();
|
||||
for element in first.siblings_with_tokens(Direction::Next) {
|
||||
let node = match element {
|
||||
SyntaxElement::Token(token) => {
|
||||
NodeOrToken::Token(token) => {
|
||||
if let Some(ws) = ast::Whitespace::cast(token) {
|
||||
if !ws.spans_multiple_lines() {
|
||||
// Ignore whitespace without blank lines
|
||||
@ -119,7 +119,7 @@ fn contiguous_range_for_group_unless(
|
||||
// group ends here
|
||||
break;
|
||||
}
|
||||
SyntaxElement::Node(node) => node,
|
||||
NodeOrToken::Node(node) => node,
|
||||
};
|
||||
|
||||
// Stop if we find a node that doesn't belong to the group
|
||||
@ -154,7 +154,7 @@ fn contiguous_range_for_comment(
|
||||
let mut last = first.clone();
|
||||
for element in first.syntax().siblings_with_tokens(Direction::Next) {
|
||||
match element {
|
||||
SyntaxElement::Token(token) => {
|
||||
NodeOrToken::Token(token) => {
|
||||
if let Some(ws) = ast::Whitespace::cast(token.clone()) {
|
||||
if !ws.spans_multiple_lines() {
|
||||
// Ignore whitespace without blank lines
|
||||
@ -173,7 +173,7 @@ fn contiguous_range_for_comment(
|
||||
// * A comment of a different flavor was reached
|
||||
break;
|
||||
}
|
||||
SyntaxElement::Node(_) => break,
|
||||
NodeOrToken::Node(_) => break,
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
use ra_db::SourceDatabase;
|
||||
use ra_syntax::{algo::find_token_at_offset, ast, AstNode};
|
||||
use ra_syntax::{ast, AstNode};
|
||||
|
||||
use crate::{db::RootDatabase, FilePosition, NavigationTarget, RangeInfo};
|
||||
|
||||
@ -9,7 +9,7 @@ pub(crate) fn goto_type_definition(
|
||||
) -> Option<RangeInfo<Vec<NavigationTarget>>> {
|
||||
let parse = db.parse(position.file_id);
|
||||
|
||||
let node = find_token_at_offset(parse.tree().syntax(), position.offset).find_map(|token| {
|
||||
let node = parse.tree().syntax().token_at_offset(position.offset).find_map(|token| {
|
||||
token
|
||||
.parent()
|
||||
.ancestors()
|
||||
|
@ -3,7 +3,7 @@ use ra_fmt::{compute_ws, extract_trivial_expression};
|
||||
use ra_syntax::{
|
||||
algo::{find_covering_element, non_trivia_sibling},
|
||||
ast::{self, AstNode, AstToken},
|
||||
Direction, SourceFile, SyntaxElement,
|
||||
Direction, NodeOrToken, SourceFile,
|
||||
SyntaxKind::{self, WHITESPACE},
|
||||
SyntaxNode, SyntaxToken, TextRange, TextUnit, T,
|
||||
};
|
||||
@ -23,8 +23,8 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit {
|
||||
};
|
||||
|
||||
let node = match find_covering_element(file.syntax(), range) {
|
||||
SyntaxElement::Node(node) => node,
|
||||
SyntaxElement::Token(token) => token.parent(),
|
||||
NodeOrToken::Node(node) => node,
|
||||
NodeOrToken::Token(token) => token.parent(),
|
||||
};
|
||||
let mut edit = TextEditBuilder::default();
|
||||
for token in node.descendants_with_tokens().filter_map(|it| it.into_token()) {
|
||||
|
@ -1,9 +1,11 @@
|
||||
use ra_syntax::{algo::find_token_at_offset, ast::AstNode, SourceFile, SyntaxKind, TextUnit, T};
|
||||
use ra_syntax::{ast::AstNode, SourceFile, SyntaxKind, TextUnit, T};
|
||||
|
||||
pub fn matching_brace(file: &SourceFile, offset: TextUnit) -> Option<TextUnit> {
|
||||
const BRACES: &[SyntaxKind] =
|
||||
&[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>]];
|
||||
let (brace_node, brace_idx) = find_token_at_offset(file.syntax(), offset)
|
||||
let (brace_node, brace_idx) = file
|
||||
.syntax()
|
||||
.token_at_offset(offset)
|
||||
.filter_map(|node| {
|
||||
let idx = BRACES.iter().position(|&brace| brace == node.kind())?;
|
||||
Some((node, idx))
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::db::RootDatabase;
|
||||
use ra_db::SourceDatabase;
|
||||
use ra_syntax::{
|
||||
algo, AstNode, SourceFile, SyntaxElement,
|
||||
algo, AstNode, NodeOrToken, SourceFile,
|
||||
SyntaxKind::{RAW_STRING, STRING},
|
||||
SyntaxToken, TextRange,
|
||||
};
|
||||
@ -16,8 +16,8 @@ pub(crate) fn syntax_tree(
|
||||
let parse = db.parse(file_id);
|
||||
if let Some(text_range) = text_range {
|
||||
let node = match algo::find_covering_element(parse.tree().syntax(), text_range) {
|
||||
SyntaxElement::Node(node) => node,
|
||||
SyntaxElement::Token(token) => {
|
||||
NodeOrToken::Node(node) => node,
|
||||
NodeOrToken::Token(token) => {
|
||||
if let Some(tree) = syntax_tree_for_string(&token, text_range) {
|
||||
return tree;
|
||||
}
|
||||
|
@ -1,11 +1,11 @@
|
||||
use ra_db::{FilePosition, SourceDatabase};
|
||||
use ra_fmt::leading_indent;
|
||||
use ra_syntax::{
|
||||
algo::{find_node_at_offset, find_token_at_offset, TokenAtOffset},
|
||||
algo::find_node_at_offset,
|
||||
ast::{self, AstToken},
|
||||
AstNode, SmolStr, SourceFile,
|
||||
SyntaxKind::*,
|
||||
SyntaxToken, TextRange, TextUnit,
|
||||
SyntaxToken, TextRange, TextUnit, TokenAtOffset,
|
||||
};
|
||||
use ra_text_edit::{TextEdit, TextEditBuilder};
|
||||
|
||||
@ -14,7 +14,9 @@ use crate::{db::RootDatabase, SourceChange, SourceFileEdit};
|
||||
pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> {
|
||||
let parse = db.parse(position.file_id);
|
||||
let file = parse.tree();
|
||||
let comment = find_token_at_offset(file.syntax(), position.offset)
|
||||
let comment = file
|
||||
.syntax()
|
||||
.token_at_offset(position.offset)
|
||||
.left_biased()
|
||||
.and_then(ast::Comment::cast)?;
|
||||
|
||||
@ -45,7 +47,7 @@ pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<Sour
|
||||
}
|
||||
|
||||
fn node_indent(file: &SourceFile, token: &SyntaxToken) -> Option<SmolStr> {
|
||||
let ws = match find_token_at_offset(file.syntax(), token.text_range().start()) {
|
||||
let ws = match file.syntax().token_at_offset(token.text_range().start()) {
|
||||
TokenAtOffset::Between(l, r) => {
|
||||
assert!(r == *token);
|
||||
l
|
||||
@ -91,7 +93,10 @@ pub(crate) fn on_dot_typed(db: &RootDatabase, position: FilePosition) -> Option<
|
||||
let parse = db.parse(position.file_id);
|
||||
assert_eq!(parse.tree().syntax().text().char_at(position.offset), Some('.'));
|
||||
|
||||
let whitespace = find_token_at_offset(parse.tree().syntax(), position.offset)
|
||||
let whitespace = parse
|
||||
.tree()
|
||||
.syntax()
|
||||
.token_at_offset(position.offset)
|
||||
.left_biased()
|
||||
.and_then(ast::Whitespace::cast)?;
|
||||
|
||||
|
@ -1,12 +1,13 @@
|
||||
use crate::subtree_source::SubtreeTokenSource;
|
||||
use crate::ExpandError;
|
||||
use ra_parser::{ParseError, TreeSink};
|
||||
use ra_syntax::{
|
||||
ast, AstNode, AstToken, Parse, SmolStr, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxNode,
|
||||
ast, AstNode, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind, SyntaxKind::*, SyntaxNode,
|
||||
SyntaxTreeBuilder, TextRange, TextUnit, T,
|
||||
};
|
||||
use tt::buffer::{Cursor, TokenBuffer};
|
||||
|
||||
use crate::subtree_source::SubtreeTokenSource;
|
||||
use crate::ExpandError;
|
||||
|
||||
/// Maps `tt::TokenId` to the relative range of the original token.
|
||||
#[derive(Default)]
|
||||
pub struct TokenMap {
|
||||
@ -200,7 +201,7 @@ fn convert_tt(
|
||||
}
|
||||
|
||||
match child {
|
||||
SyntaxElement::Token(token) => {
|
||||
NodeOrToken::Token(token) => {
|
||||
if let Some(doc_tokens) = convert_doc_comment(&token) {
|
||||
token_trees.extend(doc_tokens);
|
||||
} else if token.kind().is_trivia() {
|
||||
@ -210,7 +211,7 @@ fn convert_tt(
|
||||
let char = token.text().chars().next().unwrap();
|
||||
|
||||
let spacing = match child_iter.peek() {
|
||||
Some(SyntaxElement::Token(token)) => {
|
||||
Some(NodeOrToken::Token(token)) => {
|
||||
if token.kind().is_punct() {
|
||||
tt::Spacing::Joint
|
||||
} else {
|
||||
@ -241,7 +242,7 @@ fn convert_tt(
|
||||
token_trees.push(child);
|
||||
}
|
||||
}
|
||||
SyntaxElement::Node(node) => {
|
||||
NodeOrToken::Node(node) => {
|
||||
let child = convert_tt(token_map, global_offset, &node)?.into();
|
||||
token_trees.push(child);
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
use ra_syntax::{ast, AstNode};
|
||||
use ra_syntax::{ast, AstNode, NodeOrToken};
|
||||
|
||||
use super::*;
|
||||
|
||||
@ -118,11 +118,11 @@ pub fn debug_dump_ignore_spaces(node: &ra_syntax::SyntaxNode) -> String {
|
||||
match event {
|
||||
WalkEvent::Enter(element) => {
|
||||
match element {
|
||||
ra_syntax::SyntaxElement::Node(node) => {
|
||||
NodeOrToken::Node(node) => {
|
||||
indent!();
|
||||
writeln!(buf, "{:?}", node.kind()).unwrap();
|
||||
}
|
||||
ra_syntax::SyntaxElement::Token(token) => match token.kind() {
|
||||
NodeOrToken::Token(token) => match token.kind() {
|
||||
ra_syntax::SyntaxKind::WHITESPACE => {}
|
||||
_ => {
|
||||
indent!();
|
||||
|
@ -10,7 +10,7 @@ repository = "https://github.com/rust-analyzer/rust-analyzer"
|
||||
[dependencies]
|
||||
unicode-xid = "0.1.0"
|
||||
itertools = "0.8.0"
|
||||
rowan = "0.5.6"
|
||||
rowan = "0.6.0-pre.1"
|
||||
|
||||
# ideally, `serde` should be enabled by `ra_lsp_server`, but we enable it here
|
||||
# to reduce number of compilations
|
||||
|
@ -1,18 +1,12 @@
|
||||
pub mod visit;
|
||||
|
||||
use std::ops::RangeInclusive;
|
||||
|
||||
use itertools::Itertools;
|
||||
|
||||
use crate::{AstNode, Direction, SyntaxElement, SyntaxNode, SyntaxToken, TextRange, TextUnit};
|
||||
|
||||
pub use rowan::TokenAtOffset;
|
||||
|
||||
pub fn find_token_at_offset(node: &SyntaxNode, offset: TextUnit) -> TokenAtOffset<SyntaxToken> {
|
||||
match node.0.token_at_offset(offset) {
|
||||
TokenAtOffset::None => TokenAtOffset::None,
|
||||
TokenAtOffset::Single(n) => TokenAtOffset::Single(SyntaxToken(n)),
|
||||
TokenAtOffset::Between(l, r) => TokenAtOffset::Between(SyntaxToken(l), SyntaxToken(r)),
|
||||
}
|
||||
}
|
||||
use crate::{
|
||||
AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxNodePtr, TextRange, TextUnit,
|
||||
};
|
||||
|
||||
/// Returns ancestors of the node at the offset, sorted by length. This should
|
||||
/// do the right thing at an edge, e.g. when searching for expressions at `{
|
||||
@ -23,7 +17,7 @@ pub fn ancestors_at_offset(
|
||||
node: &SyntaxNode,
|
||||
offset: TextUnit,
|
||||
) -> impl Iterator<Item = SyntaxNode> {
|
||||
find_token_at_offset(node, offset)
|
||||
node.token_at_offset(offset)
|
||||
.map(|token| token.parent().ancestors())
|
||||
.kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
|
||||
}
|
||||
@ -44,20 +38,118 @@ pub fn find_node_at_offset<N: AstNode>(syntax: &SyntaxNode, offset: TextUnit) ->
|
||||
/// Finds the first sibling in the given direction which is not `trivia`
|
||||
pub fn non_trivia_sibling(element: SyntaxElement, direction: Direction) -> Option<SyntaxElement> {
|
||||
return match element {
|
||||
SyntaxElement::Node(node) => node.siblings_with_tokens(direction).skip(1).find(not_trivia),
|
||||
SyntaxElement::Token(token) => {
|
||||
token.siblings_with_tokens(direction).skip(1).find(not_trivia)
|
||||
}
|
||||
NodeOrToken::Node(node) => node.siblings_with_tokens(direction).skip(1).find(not_trivia),
|
||||
NodeOrToken::Token(token) => token.siblings_with_tokens(direction).skip(1).find(not_trivia),
|
||||
};
|
||||
|
||||
fn not_trivia(element: &SyntaxElement) -> bool {
|
||||
match element {
|
||||
SyntaxElement::Node(_) => true,
|
||||
SyntaxElement::Token(token) => !token.kind().is_trivia(),
|
||||
NodeOrToken::Node(_) => true,
|
||||
NodeOrToken::Token(token) => !token.kind().is_trivia(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn find_covering_element(root: &SyntaxNode, range: TextRange) -> SyntaxElement {
|
||||
SyntaxElement::new(root.0.covering_node(range))
|
||||
root.covering_element(range)
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
pub enum InsertPosition<T> {
|
||||
First,
|
||||
Last,
|
||||
Before(T),
|
||||
After(T),
|
||||
}
|
||||
|
||||
/// Adds specified children (tokens or nodes) to the current node at the
|
||||
/// specific position.
|
||||
///
|
||||
/// This is a type-unsafe low-level editing API, if you need to use it,
|
||||
/// prefer to create a type-safe abstraction on top of it instead.
|
||||
pub fn insert_children(
|
||||
parent: &SyntaxNode,
|
||||
position: InsertPosition<SyntaxElement>,
|
||||
to_insert: impl Iterator<Item = SyntaxElement>,
|
||||
) -> SyntaxNode {
|
||||
let mut delta = TextUnit::default();
|
||||
let to_insert = to_insert.map(|element| {
|
||||
delta += element.text_range().len();
|
||||
to_green_element(element)
|
||||
});
|
||||
|
||||
let old_children = parent.green().children();
|
||||
|
||||
let new_children = match &position {
|
||||
InsertPosition::First => {
|
||||
to_insert.chain(old_children.iter().cloned()).collect::<Box<[_]>>()
|
||||
}
|
||||
InsertPosition::Last => old_children.iter().cloned().chain(to_insert).collect::<Box<[_]>>(),
|
||||
InsertPosition::Before(anchor) | InsertPosition::After(anchor) => {
|
||||
let take_anchor = if let InsertPosition::After(_) = position { 1 } else { 0 };
|
||||
let split_at = position_of_child(parent, anchor.clone()) + take_anchor;
|
||||
let (before, after) = old_children.split_at(split_at);
|
||||
before
|
||||
.iter()
|
||||
.cloned()
|
||||
.chain(to_insert)
|
||||
.chain(after.iter().cloned())
|
||||
.collect::<Box<[_]>>()
|
||||
}
|
||||
};
|
||||
|
||||
with_children(parent, new_children)
|
||||
}
|
||||
|
||||
/// Replaces all nodes in `to_delete` with nodes from `to_insert`
|
||||
///
|
||||
/// This is a type-unsafe low-level editing API, if you need to use it,
|
||||
/// prefer to create a type-safe abstraction on top of it instead.
|
||||
pub fn replace_children(
|
||||
parent: &SyntaxNode,
|
||||
to_delete: RangeInclusive<SyntaxElement>,
|
||||
to_insert: impl Iterator<Item = SyntaxElement>,
|
||||
) -> SyntaxNode {
|
||||
let start = position_of_child(parent, to_delete.start().clone());
|
||||
let end = position_of_child(parent, to_delete.end().clone());
|
||||
let old_children = parent.green().children();
|
||||
|
||||
let new_children = old_children[..start]
|
||||
.iter()
|
||||
.cloned()
|
||||
.chain(to_insert.map(to_green_element))
|
||||
.chain(old_children[end + 1..].iter().cloned())
|
||||
.collect::<Box<[_]>>();
|
||||
with_children(parent, new_children)
|
||||
}
|
||||
|
||||
fn with_children(
|
||||
parent: &SyntaxNode,
|
||||
new_children: Box<[NodeOrToken<rowan::GreenNode, rowan::GreenToken>]>,
|
||||
) -> SyntaxNode {
|
||||
let len = new_children.iter().map(|it| it.text_len()).sum::<TextUnit>();
|
||||
let new_node =
|
||||
rowan::GreenNode::new(rowan::cursor::SyntaxKind(parent.kind() as u16), new_children);
|
||||
let new_root_node = parent.replace_with(new_node);
|
||||
let new_root_node = SyntaxNode::new_root(new_root_node);
|
||||
|
||||
// FIXME: use a more elegant way to re-fetch the node (#1185), make
|
||||
// `range` private afterwards
|
||||
let mut ptr = SyntaxNodePtr::new(parent);
|
||||
ptr.range = TextRange::offset_len(ptr.range().start(), len);
|
||||
ptr.to_node(&new_root_node)
|
||||
}
|
||||
|
||||
fn position_of_child(parent: &SyntaxNode, child: SyntaxElement) -> usize {
|
||||
parent
|
||||
.children_with_tokens()
|
||||
.position(|it| it == child)
|
||||
.expect("element is not a child of current element")
|
||||
}
|
||||
|
||||
fn to_green_element(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> {
|
||||
match element {
|
||||
NodeOrToken::Node(it) => it.green().clone().into(),
|
||||
NodeOrToken::Token(it) => it.green().clone().into(),
|
||||
}
|
||||
}
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
use crate::{
|
||||
ast::{self, child_opt, children, AstChildren, AstNode},
|
||||
SmolStr, SyntaxElement,
|
||||
SmolStr,
|
||||
SyntaxKind::*,
|
||||
SyntaxToken, T,
|
||||
};
|
||||
@ -229,14 +229,11 @@ pub enum LiteralKind {
|
||||
|
||||
impl ast::Literal {
|
||||
pub fn token(&self) -> SyntaxToken {
|
||||
let elem = self
|
||||
.syntax()
|
||||
self.syntax()
|
||||
.children_with_tokens()
|
||||
.find(|e| e.kind() != ATTR && !e.kind().is_trivia());
|
||||
match elem {
|
||||
Some(SyntaxElement::Token(token)) => token,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
.find(|e| e.kind() != ATTR && !e.kind().is_trivia())
|
||||
.and_then(|e| e.into_token())
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub fn kind(&self) -> LiteralKind {
|
||||
|
@ -24,10 +24,7 @@ impl ast::NameRef {
|
||||
}
|
||||
|
||||
fn text_of_first_token(node: &SyntaxNode) -> &SmolStr {
|
||||
match node.0.green().children().first() {
|
||||
Some(rowan::GreenElement::Token(it)) => it.text(),
|
||||
_ => panic!(),
|
||||
}
|
||||
node.green().children().first().and_then(|it| it.as_token()).unwrap().text()
|
||||
}
|
||||
|
||||
impl ast::Attr {
|
||||
|
@ -20,7 +20,6 @@
|
||||
//! [Swift]: <https://github.com/apple/swift/blob/13d593df6f359d0cb2fc81cfaac273297c539455/lib/Syntax/README.md>
|
||||
|
||||
mod syntax_node;
|
||||
mod syntax_text;
|
||||
mod syntax_error;
|
||||
mod parsing;
|
||||
mod validation;
|
||||
@ -38,19 +37,17 @@ use ra_text_edit::AtomTextEdit;
|
||||
use crate::syntax_node::GreenNode;
|
||||
|
||||
pub use crate::{
|
||||
algo::InsertPosition,
|
||||
ast::{AstNode, AstToken},
|
||||
parsing::{classify_literal, tokenize, Token},
|
||||
ptr::{AstPtr, SyntaxNodePtr},
|
||||
syntax_error::{Location, SyntaxError, SyntaxErrorKind},
|
||||
syntax_node::{
|
||||
Direction, InsertPosition, SyntaxElement, SyntaxNode, SyntaxToken, SyntaxTreeBuilder,
|
||||
WalkEvent,
|
||||
Direction, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken, SyntaxTreeBuilder,
|
||||
},
|
||||
syntax_text::SyntaxText,
|
||||
};
|
||||
pub use ra_parser::SyntaxKind;
|
||||
pub use ra_parser::T;
|
||||
pub use rowan::{SmolStr, TextRange, TextUnit};
|
||||
pub use ra_parser::{SyntaxKind, T};
|
||||
pub use rowan::{SmolStr, SyntaxText, TextRange, TextUnit, TokenAtOffset, WalkEvent};
|
||||
|
||||
/// `Parse` is the result of the parsing: a syntax tree and a collection of
|
||||
/// errors.
|
||||
@ -76,7 +73,7 @@ impl<T> Parse<T> {
|
||||
}
|
||||
|
||||
pub fn syntax_node(&self) -> SyntaxNode {
|
||||
SyntaxNode::new(self.green.clone())
|
||||
SyntaxNode::new_root(self.green.clone())
|
||||
}
|
||||
}
|
||||
|
||||
@ -146,18 +143,17 @@ impl Parse<SourceFile> {
|
||||
pub use crate::ast::SourceFile;
|
||||
|
||||
impl SourceFile {
|
||||
fn new(green: GreenNode) -> SourceFile {
|
||||
let root = SyntaxNode::new(green);
|
||||
pub fn parse(text: &str) -> Parse<SourceFile> {
|
||||
let (green, mut errors) = parsing::parse_text(text);
|
||||
let root = SyntaxNode::new_root(green.clone());
|
||||
|
||||
if cfg!(debug_assertions) {
|
||||
validation::validate_block_structure(&root);
|
||||
}
|
||||
assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
|
||||
SourceFile::cast(root).unwrap()
|
||||
}
|
||||
|
||||
pub fn parse(text: &str) -> Parse<SourceFile> {
|
||||
let (green, mut errors) = parsing::parse_text(text);
|
||||
errors.extend(validation::validate(&SourceFile::new(green.clone())));
|
||||
errors.extend(validation::validate(&root));
|
||||
|
||||
assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
|
||||
Parse { green, errors: Arc::new(errors), _ty: PhantomData }
|
||||
}
|
||||
}
|
||||
@ -267,8 +263,8 @@ fn api_walkthrough() {
|
||||
match event {
|
||||
WalkEvent::Enter(node) => {
|
||||
let text = match &node {
|
||||
SyntaxElement::Node(it) => it.text().to_string(),
|
||||
SyntaxElement::Token(it) => it.text().to_string(),
|
||||
NodeOrToken::Node(it) => it.text().to_string(),
|
||||
NodeOrToken::Token(it) => it.text().to_string(),
|
||||
};
|
||||
buf += &format!("{:indent$}{:?} {:?}\n", " ", text, node.kind(), indent = indent);
|
||||
indent += 2;
|
||||
|
@ -16,7 +16,7 @@ use crate::{
|
||||
text_token_source::TextTokenSource,
|
||||
text_tree_sink::TextTreeSink,
|
||||
},
|
||||
syntax_node::{GreenNode, GreenToken, SyntaxElement, SyntaxNode},
|
||||
syntax_node::{GreenNode, GreenToken, NodeOrToken, SyntaxElement, SyntaxNode},
|
||||
SyntaxError,
|
||||
SyntaxKind::*,
|
||||
TextRange, TextUnit, T,
|
||||
@ -70,7 +70,8 @@ fn reparse_token<'node>(
|
||||
}
|
||||
}
|
||||
|
||||
let new_token = GreenToken::new(rowan::SyntaxKind(token.kind().into()), text.into());
|
||||
let new_token =
|
||||
GreenToken::new(rowan::cursor::SyntaxKind(token.kind().into()), text.into());
|
||||
Some((token.replace_with(new_token), token.text_range()))
|
||||
}
|
||||
_ => None,
|
||||
@ -98,8 +99,8 @@ fn get_text_after_edit(element: SyntaxElement, edit: &AtomTextEdit) -> String {
|
||||
let edit =
|
||||
AtomTextEdit::replace(edit.delete - element.text_range().start(), edit.insert.clone());
|
||||
let text = match element {
|
||||
SyntaxElement::Token(token) => token.text().to_string(),
|
||||
SyntaxElement::Node(node) => node.text().to_string(),
|
||||
NodeOrToken::Token(token) => token.text().to_string(),
|
||||
NodeOrToken::Node(node) => node.text().to_string(),
|
||||
};
|
||||
edit.apply(text)
|
||||
}
|
||||
@ -114,8 +115,8 @@ fn is_contextual_kw(text: &str) -> bool {
|
||||
fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> {
|
||||
let node = algo::find_covering_element(node, range);
|
||||
let mut ancestors = match node {
|
||||
SyntaxElement::Token(it) => it.parent().ancestors(),
|
||||
SyntaxElement::Node(it) => it.ancestors(),
|
||||
NodeOrToken::Token(it) => it.parent().ancestors(),
|
||||
NodeOrToken::Node(it) => it.ancestors(),
|
||||
};
|
||||
ancestors.find_map(|node| {
|
||||
let first_child = node.first_child_or_token().map(|it| it.kind());
|
||||
|
@ -6,487 +6,37 @@
|
||||
//! The *real* implementation is in the (language-agnostic) `rowan` crate, this
|
||||
//! modules just wraps its API.
|
||||
|
||||
use std::{fmt, iter::successors, ops::RangeInclusive};
|
||||
|
||||
use ra_parser::ParseError;
|
||||
use rowan::GreenNodeBuilder;
|
||||
use rowan::{GreenNodeBuilder, Language};
|
||||
|
||||
use crate::{
|
||||
syntax_error::{SyntaxError, SyntaxErrorKind},
|
||||
AstNode, Parse, SmolStr, SourceFile, SyntaxKind, SyntaxNodePtr, SyntaxText, TextRange,
|
||||
TextUnit,
|
||||
Parse, SmolStr, SyntaxKind, TextUnit,
|
||||
};
|
||||
|
||||
pub use rowan::WalkEvent;
|
||||
pub(crate) use rowan::{GreenNode, GreenToken};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
pub enum InsertPosition<T> {
|
||||
First,
|
||||
Last,
|
||||
Before(T),
|
||||
After(T),
|
||||
}
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub enum RustLanguage {}
|
||||
impl Language for RustLanguage {
|
||||
type Kind = SyntaxKind;
|
||||
|
||||
#[derive(PartialEq, Eq, Hash, Clone)]
|
||||
pub struct SyntaxNode(pub(crate) rowan::cursor::SyntaxNode);
|
||||
fn kind_from_raw(raw: rowan::cursor::SyntaxKind) -> SyntaxKind {
|
||||
SyntaxKind::from(raw.0)
|
||||
}
|
||||
|
||||
impl fmt::Debug for SyntaxNode {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
if f.alternate() {
|
||||
let mut level = 0;
|
||||
for event in self.preorder_with_tokens() {
|
||||
match event {
|
||||
WalkEvent::Enter(element) => {
|
||||
for _ in 0..level {
|
||||
write!(f, " ")?;
|
||||
}
|
||||
match element {
|
||||
SyntaxElement::Node(node) => writeln!(f, "{:?}", node)?,
|
||||
SyntaxElement::Token(token) => writeln!(f, "{:?}", token)?,
|
||||
}
|
||||
level += 1;
|
||||
}
|
||||
WalkEvent::Leave(_) => level -= 1,
|
||||
}
|
||||
}
|
||||
assert_eq!(level, 0);
|
||||
Ok(())
|
||||
} else {
|
||||
write!(f, "{:?}@{:?}", self.kind(), self.text_range())
|
||||
}
|
||||
fn kind_to_raw(kind: SyntaxKind) -> rowan::cursor::SyntaxKind {
|
||||
rowan::cursor::SyntaxKind(kind.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for SyntaxNode {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Display::fmt(&self.text(), fmt)
|
||||
}
|
||||
}
|
||||
pub type SyntaxNode = rowan::SyntaxNode<RustLanguage>;
|
||||
pub type SyntaxToken = rowan::SyntaxToken<RustLanguage>;
|
||||
pub type SyntaxElement = rowan::NodeOrToken<SyntaxNode, SyntaxToken>;
|
||||
pub type SyntaxNodeChildren = rowan::SyntaxNodeChildren<RustLanguage>;
|
||||
pub type SyntaxElementChildren = rowan::SyntaxElementChildren<RustLanguage>;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum Direction {
|
||||
Next,
|
||||
Prev,
|
||||
}
|
||||
|
||||
impl SyntaxNode {
|
||||
pub(crate) fn new(green: GreenNode) -> SyntaxNode {
|
||||
let inner = rowan::cursor::SyntaxNode::new_root(green);
|
||||
SyntaxNode(inner)
|
||||
}
|
||||
|
||||
pub fn kind(&self) -> SyntaxKind {
|
||||
self.0.kind().0.into()
|
||||
}
|
||||
|
||||
pub fn text_range(&self) -> TextRange {
|
||||
self.0.text_range()
|
||||
}
|
||||
|
||||
pub fn text(&self) -> SyntaxText {
|
||||
SyntaxText::new(self.clone())
|
||||
}
|
||||
|
||||
pub fn parent(&self) -> Option<SyntaxNode> {
|
||||
self.0.parent().map(SyntaxNode)
|
||||
}
|
||||
|
||||
pub fn first_child(&self) -> Option<SyntaxNode> {
|
||||
self.0.first_child().map(SyntaxNode)
|
||||
}
|
||||
|
||||
pub fn first_child_or_token(&self) -> Option<SyntaxElement> {
|
||||
self.0.first_child_or_token().map(SyntaxElement::new)
|
||||
}
|
||||
|
||||
pub fn last_child(&self) -> Option<SyntaxNode> {
|
||||
self.0.last_child().map(SyntaxNode)
|
||||
}
|
||||
|
||||
pub fn last_child_or_token(&self) -> Option<SyntaxElement> {
|
||||
self.0.last_child_or_token().map(SyntaxElement::new)
|
||||
}
|
||||
|
||||
pub fn next_sibling(&self) -> Option<SyntaxNode> {
|
||||
self.0.next_sibling().map(SyntaxNode)
|
||||
}
|
||||
|
||||
pub fn next_sibling_or_token(&self) -> Option<SyntaxElement> {
|
||||
self.0.next_sibling_or_token().map(SyntaxElement::new)
|
||||
}
|
||||
|
||||
pub fn prev_sibling(&self) -> Option<SyntaxNode> {
|
||||
self.0.prev_sibling().map(SyntaxNode)
|
||||
}
|
||||
|
||||
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement> {
|
||||
self.0.prev_sibling_or_token().map(SyntaxElement::new)
|
||||
}
|
||||
|
||||
pub fn children(&self) -> SyntaxNodeChildren {
|
||||
SyntaxNodeChildren(self.0.children())
|
||||
}
|
||||
|
||||
pub fn children_with_tokens(&self) -> SyntaxElementChildren {
|
||||
SyntaxElementChildren(self.0.children_with_tokens())
|
||||
}
|
||||
|
||||
pub fn first_token(&self) -> Option<SyntaxToken> {
|
||||
self.0.first_token().map(SyntaxToken)
|
||||
}
|
||||
|
||||
pub fn last_token(&self) -> Option<SyntaxToken> {
|
||||
self.0.last_token().map(SyntaxToken)
|
||||
}
|
||||
|
||||
pub fn ancestors(&self) -> impl Iterator<Item = SyntaxNode> {
|
||||
successors(Some(self.clone()), |node| node.parent())
|
||||
}
|
||||
|
||||
pub fn descendants(&self) -> impl Iterator<Item = SyntaxNode> {
|
||||
self.preorder().filter_map(|event| match event {
|
||||
WalkEvent::Enter(node) => Some(node),
|
||||
WalkEvent::Leave(_) => None,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn descendants_with_tokens(&self) -> impl Iterator<Item = SyntaxElement> {
|
||||
self.preorder_with_tokens().filter_map(|event| match event {
|
||||
WalkEvent::Enter(it) => Some(it),
|
||||
WalkEvent::Leave(_) => None,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn siblings(&self, direction: Direction) -> impl Iterator<Item = SyntaxNode> {
|
||||
successors(Some(self.clone()), move |node| match direction {
|
||||
Direction::Next => node.next_sibling(),
|
||||
Direction::Prev => node.prev_sibling(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn siblings_with_tokens(
|
||||
&self,
|
||||
direction: Direction,
|
||||
) -> impl Iterator<Item = SyntaxElement> {
|
||||
let me: SyntaxElement = self.clone().into();
|
||||
successors(Some(me), move |el| match direction {
|
||||
Direction::Next => el.next_sibling_or_token(),
|
||||
Direction::Prev => el.prev_sibling_or_token(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn preorder(&self) -> impl Iterator<Item = WalkEvent<SyntaxNode>> {
|
||||
self.0.preorder().map(|event| match event {
|
||||
WalkEvent::Enter(n) => WalkEvent::Enter(SyntaxNode(n)),
|
||||
WalkEvent::Leave(n) => WalkEvent::Leave(SyntaxNode(n)),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn preorder_with_tokens(&self) -> impl Iterator<Item = WalkEvent<SyntaxElement>> {
|
||||
self.0.preorder_with_tokens().map(|event| match event {
|
||||
WalkEvent::Enter(n) => WalkEvent::Enter(SyntaxElement::new(n)),
|
||||
WalkEvent::Leave(n) => WalkEvent::Leave(SyntaxElement::new(n)),
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn replace_with(&self, replacement: GreenNode) -> GreenNode {
|
||||
self.0.replace_with(replacement)
|
||||
}
|
||||
|
||||
/// Adds specified children (tokens or nodes) to the current node at the
|
||||
/// specific position.
|
||||
///
|
||||
/// This is a type-unsafe low-level editing API, if you need to use it,
|
||||
/// prefer to create a type-safe abstraction on top of it instead.
|
||||
pub fn insert_children(
|
||||
&self,
|
||||
position: InsertPosition<SyntaxElement>,
|
||||
to_insert: impl Iterator<Item = SyntaxElement>,
|
||||
) -> SyntaxNode {
|
||||
let mut delta = TextUnit::default();
|
||||
let to_insert = to_insert.map(|element| {
|
||||
delta += element.text_len();
|
||||
to_green_element(element)
|
||||
});
|
||||
|
||||
let old_children = self.0.green().children();
|
||||
|
||||
let new_children = match &position {
|
||||
InsertPosition::First => {
|
||||
to_insert.chain(old_children.iter().cloned()).collect::<Box<[_]>>()
|
||||
}
|
||||
InsertPosition::Last => {
|
||||
old_children.iter().cloned().chain(to_insert).collect::<Box<[_]>>()
|
||||
}
|
||||
InsertPosition::Before(anchor) | InsertPosition::After(anchor) => {
|
||||
let take_anchor = if let InsertPosition::After(_) = position { 1 } else { 0 };
|
||||
let split_at = self.position_of_child(anchor.clone()) + take_anchor;
|
||||
let (before, after) = old_children.split_at(split_at);
|
||||
before
|
||||
.iter()
|
||||
.cloned()
|
||||
.chain(to_insert)
|
||||
.chain(after.iter().cloned())
|
||||
.collect::<Box<[_]>>()
|
||||
}
|
||||
};
|
||||
|
||||
self.with_children(new_children)
|
||||
}
|
||||
|
||||
/// Replaces all nodes in `to_delete` with nodes from `to_insert`
|
||||
///
|
||||
/// This is a type-unsafe low-level editing API, if you need to use it,
|
||||
/// prefer to create a type-safe abstraction on top of it instead.
|
||||
pub fn replace_children(
|
||||
&self,
|
||||
to_delete: RangeInclusive<SyntaxElement>,
|
||||
to_insert: impl Iterator<Item = SyntaxElement>,
|
||||
) -> SyntaxNode {
|
||||
let start = self.position_of_child(to_delete.start().clone());
|
||||
let end = self.position_of_child(to_delete.end().clone());
|
||||
let old_children = self.0.green().children();
|
||||
|
||||
let new_children = old_children[..start]
|
||||
.iter()
|
||||
.cloned()
|
||||
.chain(to_insert.map(to_green_element))
|
||||
.chain(old_children[end + 1..].iter().cloned())
|
||||
.collect::<Box<[_]>>();
|
||||
self.with_children(new_children)
|
||||
}
|
||||
|
||||
fn with_children(&self, new_children: Box<[rowan::GreenElement]>) -> SyntaxNode {
|
||||
let len = new_children.iter().map(|it| it.text_len()).sum::<TextUnit>();
|
||||
let new_node = GreenNode::new(rowan::SyntaxKind(self.kind() as u16), new_children);
|
||||
let new_file_node = self.replace_with(new_node);
|
||||
let file = SourceFile::new(new_file_node);
|
||||
|
||||
// FIXME: use a more elegant way to re-fetch the node (#1185), make
|
||||
// `range` private afterwards
|
||||
let mut ptr = SyntaxNodePtr::new(self);
|
||||
ptr.range = TextRange::offset_len(ptr.range().start(), len);
|
||||
ptr.to_node(file.syntax()).to_owned()
|
||||
}
|
||||
|
||||
fn position_of_child(&self, child: SyntaxElement) -> usize {
|
||||
self.children_with_tokens()
|
||||
.position(|it| it == child)
|
||||
.expect("element is not a child of current element")
|
||||
}
|
||||
}
|
||||
|
||||
fn to_green_element(element: SyntaxElement) -> rowan::GreenElement {
|
||||
match element {
|
||||
SyntaxElement::Node(node) => node.0.green().clone().into(),
|
||||
SyntaxElement::Token(tok) => {
|
||||
GreenToken::new(rowan::SyntaxKind(tok.kind() as u16), tok.text().clone()).into()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||
pub struct SyntaxToken(pub(crate) rowan::cursor::SyntaxToken);
|
||||
|
||||
impl fmt::Debug for SyntaxToken {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(fmt, "{:?}@{:?}", self.kind(), self.text_range())?;
|
||||
if self.text().len() < 25 {
|
||||
return write!(fmt, " {:?}", self.text());
|
||||
}
|
||||
let text = self.text().as_str();
|
||||
for idx in 21..25 {
|
||||
if text.is_char_boundary(idx) {
|
||||
let text = format!("{} ...", &text[..idx]);
|
||||
return write!(fmt, " {:?}", text);
|
||||
}
|
||||
}
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for SyntaxToken {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Display::fmt(self.text(), fmt)
|
||||
}
|
||||
}
|
||||
|
||||
impl SyntaxToken {
|
||||
pub fn kind(&self) -> SyntaxKind {
|
||||
self.0.kind().0.into()
|
||||
}
|
||||
|
||||
pub fn text(&self) -> &SmolStr {
|
||||
self.0.text()
|
||||
}
|
||||
|
||||
pub fn text_range(&self) -> TextRange {
|
||||
self.0.text_range()
|
||||
}
|
||||
|
||||
pub fn parent(&self) -> SyntaxNode {
|
||||
SyntaxNode(self.0.parent())
|
||||
}
|
||||
|
||||
pub fn next_sibling_or_token(&self) -> Option<SyntaxElement> {
|
||||
self.0.next_sibling_or_token().map(SyntaxElement::new)
|
||||
}
|
||||
|
||||
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement> {
|
||||
self.0.prev_sibling_or_token().map(SyntaxElement::new)
|
||||
}
|
||||
|
||||
pub fn siblings_with_tokens(
|
||||
&self,
|
||||
direction: Direction,
|
||||
) -> impl Iterator<Item = SyntaxElement> {
|
||||
let me: SyntaxElement = self.clone().into();
|
||||
successors(Some(me), move |el| match direction {
|
||||
Direction::Next => el.next_sibling_or_token(),
|
||||
Direction::Prev => el.prev_sibling_or_token(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn next_token(&self) -> Option<SyntaxToken> {
|
||||
self.0.next_token().map(SyntaxToken)
|
||||
}
|
||||
|
||||
pub fn prev_token(&self) -> Option<SyntaxToken> {
|
||||
self.0.prev_token().map(SyntaxToken)
|
||||
}
|
||||
|
||||
pub(crate) fn replace_with(&self, new_token: GreenToken) -> GreenNode {
|
||||
self.0.replace_with(new_token)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Hash, Clone)]
|
||||
pub enum SyntaxElement {
|
||||
Node(SyntaxNode),
|
||||
Token(SyntaxToken),
|
||||
}
|
||||
|
||||
impl From<SyntaxNode> for SyntaxElement {
|
||||
fn from(node: SyntaxNode) -> Self {
|
||||
SyntaxElement::Node(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SyntaxToken> for SyntaxElement {
|
||||
fn from(token: SyntaxToken) -> Self {
|
||||
SyntaxElement::Token(token)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for SyntaxElement {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
SyntaxElement::Node(it) => fmt::Display::fmt(it, fmt),
|
||||
SyntaxElement::Token(it) => fmt::Display::fmt(it, fmt),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SyntaxElement {
|
||||
pub(crate) fn new(el: rowan::cursor::SyntaxElement) -> Self {
|
||||
match el {
|
||||
rowan::cursor::SyntaxElement::Node(it) => SyntaxElement::Node(SyntaxNode(it)),
|
||||
rowan::cursor::SyntaxElement::Token(it) => SyntaxElement::Token(SyntaxToken(it)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn kind(&self) -> SyntaxKind {
|
||||
match self {
|
||||
SyntaxElement::Node(it) => it.kind(),
|
||||
SyntaxElement::Token(it) => it.kind(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_node(&self) -> Option<&SyntaxNode> {
|
||||
match self {
|
||||
SyntaxElement::Node(node) => Some(node),
|
||||
SyntaxElement::Token(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_node(self) -> Option<SyntaxNode> {
|
||||
match self {
|
||||
SyntaxElement::Node(node) => Some(node),
|
||||
SyntaxElement::Token(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_token(&self) -> Option<&SyntaxToken> {
|
||||
match self {
|
||||
SyntaxElement::Node(_) => None,
|
||||
SyntaxElement::Token(token) => Some(token),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_token(self) -> Option<SyntaxToken> {
|
||||
match self {
|
||||
SyntaxElement::Node(_) => None,
|
||||
SyntaxElement::Token(token) => Some(token),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn next_sibling_or_token(&self) -> Option<SyntaxElement> {
|
||||
match self {
|
||||
SyntaxElement::Node(it) => it.next_sibling_or_token(),
|
||||
SyntaxElement::Token(it) => it.next_sibling_or_token(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement> {
|
||||
match self {
|
||||
SyntaxElement::Node(it) => it.prev_sibling_or_token(),
|
||||
SyntaxElement::Token(it) => it.prev_sibling_or_token(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ancestors(&self) -> impl Iterator<Item = SyntaxNode> {
|
||||
match self {
|
||||
SyntaxElement::Node(it) => it.clone(),
|
||||
SyntaxElement::Token(it) => it.parent(),
|
||||
}
|
||||
.ancestors()
|
||||
}
|
||||
|
||||
pub fn text_range(&self) -> TextRange {
|
||||
match self {
|
||||
SyntaxElement::Node(it) => it.text_range(),
|
||||
SyntaxElement::Token(it) => it.text_range(),
|
||||
}
|
||||
}
|
||||
|
||||
fn text_len(&self) -> TextUnit {
|
||||
match self {
|
||||
SyntaxElement::Node(node) => node.0.green().text_len(),
|
||||
SyntaxElement::Token(token) => TextUnit::of_str(token.0.text()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct SyntaxNodeChildren(rowan::cursor::SyntaxNodeChildren);
|
||||
|
||||
impl Iterator for SyntaxNodeChildren {
|
||||
type Item = SyntaxNode;
|
||||
fn next(&mut self) -> Option<SyntaxNode> {
|
||||
self.0.next().map(SyntaxNode)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct SyntaxElementChildren(rowan::cursor::SyntaxElementChildren);
|
||||
|
||||
impl Iterator for SyntaxElementChildren {
|
||||
type Item = SyntaxElement;
|
||||
fn next(&mut self) -> Option<SyntaxElement> {
|
||||
self.0.next().map(SyntaxElement::new)
|
||||
}
|
||||
}
|
||||
pub use rowan::{Direction, NodeOrToken};
|
||||
|
||||
pub struct SyntaxTreeBuilder {
|
||||
errors: Vec<SyntaxError>,
|
||||
@ -507,19 +57,21 @@ impl SyntaxTreeBuilder {
|
||||
|
||||
pub fn finish(self) -> Parse<SyntaxNode> {
|
||||
let (green, errors) = self.finish_raw();
|
||||
let node = SyntaxNode::new(green);
|
||||
let node = SyntaxNode::new_root(green);
|
||||
if cfg!(debug_assertions) {
|
||||
crate::validation::validate_block_structure(&node);
|
||||
}
|
||||
Parse::new(node.0.green().clone(), errors)
|
||||
Parse::new(node.green().clone(), errors)
|
||||
}
|
||||
|
||||
pub fn token(&mut self, kind: SyntaxKind, text: SmolStr) {
|
||||
self.inner.token(rowan::SyntaxKind(kind.into()), text)
|
||||
let kind = RustLanguage::kind_to_raw(kind);
|
||||
self.inner.token(kind, text)
|
||||
}
|
||||
|
||||
pub fn start_node(&mut self, kind: SyntaxKind) {
|
||||
self.inner.start_node(rowan::SyntaxKind(kind.into()))
|
||||
let kind = RustLanguage::kind_to_raw(kind);
|
||||
self.inner.start_node(kind)
|
||||
}
|
||||
|
||||
pub fn finish_node(&mut self) {
|
||||
|
@ -1,178 +0,0 @@
|
||||
use std::{
|
||||
fmt,
|
||||
ops::{self, Bound},
|
||||
};
|
||||
|
||||
use crate::{SmolStr, SyntaxElement, SyntaxNode, TextRange, TextUnit};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SyntaxText {
|
||||
node: SyntaxNode,
|
||||
range: TextRange,
|
||||
}
|
||||
|
||||
impl SyntaxText {
|
||||
pub(crate) fn new(node: SyntaxNode) -> SyntaxText {
|
||||
let range = node.text_range();
|
||||
SyntaxText { node, range }
|
||||
}
|
||||
|
||||
pub fn try_fold_chunks<T, F, E>(&self, init: T, mut f: F) -> Result<T, E>
|
||||
where
|
||||
F: FnMut(T, &str) -> Result<T, E>,
|
||||
{
|
||||
self.node.descendants_with_tokens().try_fold(init, move |acc, element| {
|
||||
let res = match element {
|
||||
SyntaxElement::Token(token) => {
|
||||
let range = match self.range.intersection(&token.text_range()) {
|
||||
None => return Ok(acc),
|
||||
Some(it) => it,
|
||||
};
|
||||
let slice = if range == token.text_range() {
|
||||
token.text()
|
||||
} else {
|
||||
let range = range - token.text_range().start();
|
||||
&token.text()[range]
|
||||
};
|
||||
f(acc, slice)?
|
||||
}
|
||||
SyntaxElement::Node(_) => acc,
|
||||
};
|
||||
Ok(res)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn try_for_each_chunk<F: FnMut(&str) -> Result<(), E>, E>(
|
||||
&self,
|
||||
mut f: F,
|
||||
) -> Result<(), E> {
|
||||
self.try_fold_chunks((), move |(), chunk| f(chunk))
|
||||
}
|
||||
|
||||
pub fn for_each_chunk<F: FnMut(&str)>(&self, mut f: F) {
|
||||
enum Void {}
|
||||
match self.try_for_each_chunk(|chunk| Ok::<(), Void>(f(chunk))) {
|
||||
Ok(()) => (),
|
||||
Err(void) => match void {},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_smol_string(&self) -> SmolStr {
|
||||
self.to_string().into()
|
||||
}
|
||||
|
||||
pub fn contains_char(&self, c: char) -> bool {
|
||||
self.try_for_each_chunk(|chunk| if chunk.contains(c) { Err(()) } else { Ok(()) }).is_err()
|
||||
}
|
||||
|
||||
pub fn find_char(&self, c: char) -> Option<TextUnit> {
|
||||
let mut acc: TextUnit = 0.into();
|
||||
let res = self.try_for_each_chunk(|chunk| {
|
||||
if let Some(pos) = chunk.find(c) {
|
||||
let pos: TextUnit = (pos as u32).into();
|
||||
return Err(acc + pos);
|
||||
}
|
||||
acc += TextUnit::of_str(chunk);
|
||||
Ok(())
|
||||
});
|
||||
found(res)
|
||||
}
|
||||
|
||||
pub fn len(&self) -> TextUnit {
|
||||
self.range.len()
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.range.is_empty()
|
||||
}
|
||||
|
||||
pub fn slice(&self, range: impl ops::RangeBounds<TextUnit>) -> SyntaxText {
|
||||
let start = match range.start_bound() {
|
||||
Bound::Included(&b) => b,
|
||||
Bound::Excluded(_) => panic!("utf-aware slicing can't work this way"),
|
||||
Bound::Unbounded => 0.into(),
|
||||
};
|
||||
let end = match range.end_bound() {
|
||||
Bound::Included(_) => panic!("utf-aware slicing can't work this way"),
|
||||
Bound::Excluded(&b) => b,
|
||||
Bound::Unbounded => self.len(),
|
||||
};
|
||||
assert!(start <= end);
|
||||
let len = end - start;
|
||||
let start = self.range.start() + start;
|
||||
let end = start + len;
|
||||
assert!(
|
||||
start <= end,
|
||||
"invalid slice, range: {:?}, slice: {:?}",
|
||||
self.range,
|
||||
(range.start_bound(), range.end_bound()),
|
||||
);
|
||||
let range = TextRange::from_to(start, end);
|
||||
assert!(
|
||||
range.is_subrange(&self.range),
|
||||
"invalid slice, range: {:?}, slice: {:?}",
|
||||
self.range,
|
||||
range,
|
||||
);
|
||||
SyntaxText { node: self.node.clone(), range }
|
||||
}
|
||||
|
||||
pub fn char_at(&self, offset: impl Into<TextUnit>) -> Option<char> {
|
||||
let offset = offset.into();
|
||||
let mut start: TextUnit = 0.into();
|
||||
let res = self.try_for_each_chunk(|chunk| {
|
||||
let end = start + TextUnit::of_str(chunk);
|
||||
if start <= offset && offset < end {
|
||||
let off: usize = u32::from(offset - start) as usize;
|
||||
return Err(chunk[off..].chars().next().unwrap());
|
||||
}
|
||||
start = end;
|
||||
Ok(())
|
||||
});
|
||||
found(res)
|
||||
}
|
||||
}
|
||||
|
||||
fn found<T>(res: Result<(), T>) -> Option<T> {
|
||||
match res {
|
||||
Ok(()) => None,
|
||||
Err(it) => Some(it),
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for SyntaxText {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Debug::fmt(&self.to_string(), f)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for SyntaxText {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
self.try_for_each_chunk(|chunk| fmt::Display::fmt(chunk, f))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SyntaxText> for String {
|
||||
fn from(text: SyntaxText) -> String {
|
||||
text.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<str> for SyntaxText {
|
||||
fn eq(&self, mut rhs: &str) -> bool {
|
||||
self.try_for_each_chunk(|chunk| {
|
||||
if !rhs.starts_with(chunk) {
|
||||
return Err(());
|
||||
}
|
||||
rhs = &rhs[chunk.len()..];
|
||||
Ok(())
|
||||
})
|
||||
.is_ok()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<&'_ str> for SyntaxText {
|
||||
fn eq(&self, rhs: &&str) -> bool {
|
||||
self == *rhs
|
||||
}
|
||||
}
|
@ -5,16 +5,16 @@ mod field_expr;
|
||||
|
||||
use crate::{
|
||||
algo::visit::{visitor_ctx, VisitorCtx},
|
||||
ast, AstNode, SourceFile, SyntaxError,
|
||||
ast, SyntaxError,
|
||||
SyntaxKind::{BYTE, BYTE_STRING, CHAR, STRING},
|
||||
SyntaxNode, TextUnit, T,
|
||||
};
|
||||
|
||||
pub(crate) use unescape::EscapeError;
|
||||
|
||||
pub(crate) fn validate(file: &SourceFile) -> Vec<SyntaxError> {
|
||||
pub(crate) fn validate(root: &SyntaxNode) -> Vec<SyntaxError> {
|
||||
let mut errors = Vec::new();
|
||||
for node in file.syntax().descendants() {
|
||||
for node in root.descendants() {
|
||||
let _ = visitor_ctx(&mut errors)
|
||||
.visit::<ast::Literal, _>(validate_literal)
|
||||
.visit::<ast::Block, _>(block::validate_block_node)
|
||||
|
@ -140,11 +140,6 @@
|
||||
"key": "ctrl+shift+m",
|
||||
"when": "editorTextFocus && editorLangId == rust"
|
||||
},
|
||||
{
|
||||
"command": "rust-analyzer.extendSelection",
|
||||
"key": "shift+alt+right",
|
||||
"when": "editorTextFocus && editorLangId == rust"
|
||||
},
|
||||
{
|
||||
"command": "rust-analyzer.joinLines",
|
||||
"key": "ctrl+shift+j",
|
||||
|
Loading…
x
Reference in New Issue
Block a user