fix typos in mbe tests
This commit is contained in:
parent
c8bcfe6a05
commit
61e1474ab3
@ -71,7 +71,7 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> {
|
||||
where
|
||||
F: FnOnce(AssistCtx<DB>) -> T,
|
||||
{
|
||||
let source_file = &db.parse(frange.file_id);
|
||||
let source_file = &db.parse(frange.file_id).tree;
|
||||
let assist =
|
||||
if should_compute_edit { Assist::Resolved(vec![]) } else { Assist::Unresolved(vec![]) };
|
||||
|
||||
|
@ -283,7 +283,7 @@ impl AstBuilder<ast::NameRef> {
|
||||
}
|
||||
|
||||
fn ast_node_from_file_text<N: AstNode>(text: &str) -> TreeArc<N> {
|
||||
let file = SourceFile::parse(text);
|
||||
let file = SourceFile::parse(text).tree;
|
||||
let res = file.syntax().descendants().find_map(N::cast).unwrap().to_owned();
|
||||
res
|
||||
}
|
||||
@ -292,7 +292,7 @@ mod tokens {
|
||||
use once_cell::sync::Lazy;
|
||||
use ra_syntax::{AstNode, SourceFile, TreeArc, SyntaxToken, SyntaxKind::*, T};
|
||||
|
||||
static SOURCE_FILE: Lazy<TreeArc<SourceFile>> = Lazy::new(|| SourceFile::parse(",\n; ;"));
|
||||
static SOURCE_FILE: Lazy<TreeArc<SourceFile>> = Lazy::new(|| SourceFile::parse(",\n; ;").tree);
|
||||
|
||||
pub(crate) fn comma() -> SyntaxToken<'static> {
|
||||
SOURCE_FILE
|
||||
@ -326,7 +326,7 @@ mod tokens {
|
||||
|
||||
impl WsBuilder {
|
||||
pub(crate) fn new(text: &str) -> WsBuilder {
|
||||
WsBuilder(SourceFile::parse(text))
|
||||
WsBuilder(SourceFile::parse(text).ok().unwrap())
|
||||
}
|
||||
pub(crate) fn ws(&self) -> SyntaxToken<'_> {
|
||||
self.0.syntax().first_child_or_token().unwrap().as_token().unwrap()
|
||||
|
@ -34,7 +34,7 @@ fn main() -> Result<()> {
|
||||
if !matches.is_present("no-dump") {
|
||||
println!("{}", file.syntax().debug_dump());
|
||||
}
|
||||
::std::mem::forget(file);
|
||||
std::mem::forget(file);
|
||||
}
|
||||
("symbols", _) => {
|
||||
let file = file()?;
|
||||
@ -60,11 +60,11 @@ fn main() -> Result<()> {
|
||||
|
||||
fn file() -> Result<TreeArc<SourceFile>> {
|
||||
let text = read_stdin()?;
|
||||
Ok(SourceFile::parse(&text))
|
||||
Ok(SourceFile::parse(&text).tree)
|
||||
}
|
||||
|
||||
fn read_stdin() -> Result<String> {
|
||||
let mut buff = String::new();
|
||||
::std::io::stdin().read_to_string(&mut buff)?;
|
||||
std::io::stdin().read_to_string(&mut buff)?;
|
||||
Ok(buff)
|
||||
}
|
||||
|
@ -10,7 +10,7 @@ use crate::{FilePosition, CallInfo, FunctionSignature, db::RootDatabase};
|
||||
|
||||
/// Computes parameter information for the given call expression.
|
||||
pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> {
|
||||
let file = db.parse(position.file_id);
|
||||
let file = db.parse(position.file_id).tree;
|
||||
let syntax = file.syntax();
|
||||
|
||||
// Find the calling expression and it's NameRef
|
||||
|
@ -138,7 +138,7 @@ impl LibraryData {
|
||||
files: Vec<(FileId, RelativePathBuf, Arc<String>)>,
|
||||
) -> LibraryData {
|
||||
let symbol_index = SymbolIndex::for_files(files.par_iter().map(|(file_id, _, text)| {
|
||||
let file = SourceFile::parse(text);
|
||||
let file = SourceFile::parse(text).tree;
|
||||
(*file_id, file)
|
||||
}));
|
||||
let mut root_change = RootChange::default();
|
||||
|
@ -51,8 +51,8 @@ pub use crate::completion::completion_item::{CompletionItem, CompletionItemKind,
|
||||
/// identifier prefix/fuzzy match should be done higher in the stack, together
|
||||
/// with ordering of completions (currently this is done by the client).
|
||||
pub(crate) fn completions(db: &db::RootDatabase, position: FilePosition) -> Option<Completions> {
|
||||
let original_file = db.parse(position.file_id);
|
||||
let ctx = CompletionContext::new(db, &original_file, position)?;
|
||||
let original_parse = db.parse(position.file_id);
|
||||
let ctx = CompletionContext::new(db, &original_parse, position)?;
|
||||
|
||||
let mut acc = Completions::default();
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
use ra_text_edit::AtomTextEdit;
|
||||
use ra_syntax::{
|
||||
AstNode, SyntaxNode, SourceFile, TextUnit, TextRange, SyntaxToken,
|
||||
AstNode, SyntaxNode, SourceFile, TextUnit, TextRange, SyntaxToken, Parse,
|
||||
ast,
|
||||
algo::{find_token_at_offset, find_covering_element, find_node_at_offset},
|
||||
SyntaxKind::*,
|
||||
@ -43,11 +43,12 @@ pub(crate) struct CompletionContext<'a> {
|
||||
impl<'a> CompletionContext<'a> {
|
||||
pub(super) fn new(
|
||||
db: &'a db::RootDatabase,
|
||||
original_file: &'a SourceFile,
|
||||
original_parse: &'a Parse,
|
||||
position: FilePosition,
|
||||
) -> Option<CompletionContext<'a>> {
|
||||
let module = source_binder::module_from_position(db, position);
|
||||
let token = find_token_at_offset(original_file.syntax(), position.offset).left_biased()?;
|
||||
let token =
|
||||
find_token_at_offset(original_parse.tree.syntax(), position.offset).left_biased()?;
|
||||
let analyzer =
|
||||
hir::SourceAnalyzer::new(db, position.file_id, token.parent(), Some(position.offset));
|
||||
let mut ctx = CompletionContext {
|
||||
@ -69,7 +70,7 @@ impl<'a> CompletionContext<'a> {
|
||||
dot_receiver: None,
|
||||
is_call: false,
|
||||
};
|
||||
ctx.fill(original_file, position.offset);
|
||||
ctx.fill(&original_parse, position.offset);
|
||||
Some(ctx)
|
||||
}
|
||||
|
||||
@ -82,13 +83,13 @@ impl<'a> CompletionContext<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn fill(&mut self, original_file: &'a SourceFile, offset: TextUnit) {
|
||||
fn fill(&mut self, original_parse: &'a Parse, offset: TextUnit) {
|
||||
// Insert a fake ident to get a valid parse tree. We will use this file
|
||||
// to determine context, though the original_file will be used for
|
||||
// actual completion.
|
||||
let file = {
|
||||
let edit = AtomTextEdit::insert(offset, "intellijRulezz".to_string());
|
||||
original_file.reparse(&edit)
|
||||
original_parse.reparse(&edit).tree
|
||||
};
|
||||
|
||||
// First, let's try to complete a reference to some declaration.
|
||||
@ -99,7 +100,7 @@ impl<'a> CompletionContext<'a> {
|
||||
self.is_param = true;
|
||||
return;
|
||||
}
|
||||
self.classify_name_ref(original_file, name_ref);
|
||||
self.classify_name_ref(&original_parse.tree, name_ref);
|
||||
}
|
||||
|
||||
// Otherwise, see if this is a declaration. We can use heuristics to
|
||||
|
@ -4,7 +4,7 @@ use itertools::Itertools;
|
||||
use hir::{source_binder, diagnostics::{Diagnostic as _, DiagnosticSink}};
|
||||
use ra_db::SourceDatabase;
|
||||
use ra_syntax::{
|
||||
T, Location, SourceFile, TextRange, SyntaxNode,
|
||||
T, Location, TextRange, SyntaxNode,
|
||||
ast::{self, AstNode, NamedFieldList, NamedField},
|
||||
};
|
||||
use ra_assists::ast_editor::{AstEditor, AstBuilder};
|
||||
@ -21,10 +21,17 @@ pub enum Severity {
|
||||
|
||||
pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic> {
|
||||
let _p = profile("diagnostics");
|
||||
let source_file = db.parse(file_id);
|
||||
let parse = db.parse(file_id);
|
||||
let mut res = Vec::new();
|
||||
|
||||
syntax_errors(&mut res, &source_file);
|
||||
res.extend(parse.errors.iter().map(|err| Diagnostic {
|
||||
range: location_to_range(err.location()),
|
||||
message: format!("Syntax Error: {}", err),
|
||||
severity: Severity::Error,
|
||||
fix: None,
|
||||
}));
|
||||
|
||||
let source_file = parse.tree;
|
||||
|
||||
for node in source_file.syntax().descendants() {
|
||||
check_unnecessary_braces_in_use_statement(&mut res, file_id, node);
|
||||
@ -51,8 +58,9 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
|
||||
})
|
||||
})
|
||||
.on::<hir::diagnostics::MissingFields, _>(|d| {
|
||||
//TODO: commment
|
||||
let file_id = d.file().original_file(db);
|
||||
let source_file = db.parse(file_id);
|
||||
let source_file = db.parse(file_id).tree;
|
||||
let syntax_node = d.syntax_node_ptr();
|
||||
let node = NamedFieldList::cast(syntax_node.to_node(source_file.syntax())).unwrap();
|
||||
let mut ast_editor = AstEditor::new(node);
|
||||
@ -77,21 +85,11 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
|
||||
drop(sink);
|
||||
res.into_inner()
|
||||
}
|
||||
|
||||
fn syntax_errors(acc: &mut Vec<Diagnostic>, source_file: &SourceFile) {
|
||||
fn location_to_range(location: Location) -> TextRange {
|
||||
match location {
|
||||
Location::Offset(offset) => TextRange::offset_len(offset, 1.into()),
|
||||
Location::Range(range) => range,
|
||||
}
|
||||
fn location_to_range(location: Location) -> TextRange {
|
||||
match location {
|
||||
Location::Offset(offset) => TextRange::offset_len(offset, 1.into()),
|
||||
Location::Range(range) => range,
|
||||
}
|
||||
|
||||
acc.extend(source_file.errors().into_iter().map(|err| Diagnostic {
|
||||
range: location_to_range(err.location()),
|
||||
message: format!("Syntax Error: {}", err),
|
||||
severity: Severity::Error,
|
||||
fix: None,
|
||||
}));
|
||||
}
|
||||
|
||||
fn check_unnecessary_braces_in_use_statement(
|
||||
@ -177,6 +175,7 @@ fn check_struct_shorthand_initialization(
|
||||
mod tests {
|
||||
use test_utils::assert_eq_text;
|
||||
use insta::assert_debug_snapshot_matches;
|
||||
use ra_syntax::SourceFile;
|
||||
|
||||
use crate::mock_analysis::single_file;
|
||||
|
||||
@ -185,7 +184,7 @@ mod tests {
|
||||
type DiagnosticChecker = fn(&mut Vec<Diagnostic>, FileId, &SyntaxNode) -> Option<()>;
|
||||
|
||||
fn check_not_applicable(code: &str, func: DiagnosticChecker) {
|
||||
let file = SourceFile::parse(code);
|
||||
let file = SourceFile::parse(code).tree;
|
||||
let mut diagnostics = Vec::new();
|
||||
for node in file.syntax().descendants() {
|
||||
func(&mut diagnostics, FileId(0), node);
|
||||
@ -194,7 +193,7 @@ mod tests {
|
||||
}
|
||||
|
||||
fn check_apply(before: &str, after: &str, func: DiagnosticChecker) {
|
||||
let file = SourceFile::parse(before);
|
||||
let file = SourceFile::parse(before).tree;
|
||||
let mut diagnostics = Vec::new();
|
||||
for node in file.syntax().descendants() {
|
||||
func(&mut diagnostics, FileId(0), node);
|
||||
|
@ -79,7 +79,7 @@ impl NavigationTarget {
|
||||
file_id: FileId,
|
||||
pat: AstPtr<ast::Pat>,
|
||||
) -> NavigationTarget {
|
||||
let file = db.parse(file_id);
|
||||
let file = db.parse(file_id).tree;
|
||||
let (name, full_range) = match pat.to_node(file.syntax()).kind() {
|
||||
ast::PatKind::BindPat(pat) => return NavigationTarget::from_bind_pat(file_id, &pat),
|
||||
_ => ("_".into(), pat.syntax_node_ptr().range()),
|
||||
@ -290,7 +290,7 @@ impl NavigationTarget {
|
||||
}
|
||||
|
||||
pub(crate) fn node(&self, db: &RootDatabase) -> Option<TreeArc<SyntaxNode>> {
|
||||
let source_file = db.parse(self.file_id());
|
||||
let source_file = db.parse(self.file_id()).tree;
|
||||
let source_file = source_file.syntax();
|
||||
let node = source_file
|
||||
.descendants()
|
||||
|
@ -183,7 +183,9 @@ fn obsolete() {}
|
||||
#[deprecated(note = "for awhile")]
|
||||
fn very_obsolete() {}
|
||||
"#,
|
||||
);
|
||||
)
|
||||
.ok()
|
||||
.unwrap();
|
||||
let structure = file_structure(&file);
|
||||
assert_debug_snapshot_matches!("file_structure", structure);
|
||||
}
|
||||
|
@ -11,7 +11,7 @@ use crate::{FileRange, db::RootDatabase};
|
||||
|
||||
// FIXME: restore macro support
|
||||
pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange {
|
||||
let source_file = db.parse(frange.file_id);
|
||||
let source_file = db.parse(frange.file_id).tree;
|
||||
try_extend_selection(source_file.syntax(), frange.range).unwrap_or(frange.range)
|
||||
}
|
||||
|
||||
@ -212,7 +212,7 @@ mod tests {
|
||||
|
||||
fn do_check(before: &str, afters: &[&str]) {
|
||||
let (cursor, before) = extract_offset(before);
|
||||
let file = SourceFile::parse(&before);
|
||||
let file = SourceFile::parse(&before).tree;
|
||||
let mut range = TextRange::offset_len(cursor, 0.into());
|
||||
for &after in afters {
|
||||
range = try_extend_selection(file.syntax(), range).unwrap();
|
||||
|
@ -191,7 +191,7 @@ mod tests {
|
||||
|
||||
fn do_check(text: &str, fold_kinds: &[FoldKind]) {
|
||||
let (ranges, text) = extract_ranges(text, "fold");
|
||||
let file = SourceFile::parse(&text);
|
||||
let file = SourceFile::parse(&text).tree;
|
||||
let folds = folding_ranges(&file);
|
||||
|
||||
assert_eq!(
|
||||
|
@ -19,7 +19,7 @@ pub(crate) fn goto_definition(
|
||||
db: &RootDatabase,
|
||||
position: FilePosition,
|
||||
) -> Option<RangeInfo<Vec<NavigationTarget>>> {
|
||||
let file = db.parse(position.file_id);
|
||||
let file = db.parse(position.file_id).tree;
|
||||
let syntax = file.syntax();
|
||||
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, position.offset) {
|
||||
let navs = reference_definition(db, position.file_id, name_ref).to_vec();
|
||||
|
@ -10,7 +10,7 @@ pub(crate) fn goto_type_definition(
|
||||
db: &RootDatabase,
|
||||
position: FilePosition,
|
||||
) -> Option<RangeInfo<Vec<NavigationTarget>>> {
|
||||
let file = db.parse(position.file_id);
|
||||
let file = db.parse(position.file_id).tree;
|
||||
|
||||
let node = find_token_at_offset(file.syntax(), position.offset).find_map(|token| {
|
||||
token
|
||||
|
@ -68,7 +68,7 @@ impl HoverResult {
|
||||
}
|
||||
|
||||
pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeInfo<HoverResult>> {
|
||||
let file = db.parse(position.file_id);
|
||||
let file = db.parse(position.file_id).tree;
|
||||
let mut res = HoverResult::new();
|
||||
|
||||
let mut range = None;
|
||||
@ -120,7 +120,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
|
||||
}
|
||||
|
||||
pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> {
|
||||
let file = db.parse(frange.file_id);
|
||||
let file = db.parse(frange.file_id).tree;
|
||||
let syntax = file.syntax();
|
||||
let leaf_node = find_covering_element(syntax, frange.range);
|
||||
// if we picked identifier, expand to pattern/expression
|
||||
|
@ -11,7 +11,7 @@ pub(crate) fn goto_implementation(
|
||||
db: &RootDatabase,
|
||||
position: FilePosition,
|
||||
) -> Option<RangeInfo<Vec<NavigationTarget>>> {
|
||||
let file = db.parse(position.file_id);
|
||||
let file = db.parse(position.file_id).tree;
|
||||
let syntax = file.syntax();
|
||||
|
||||
let module = source_binder::module_from_position(db, position)?;
|
||||
|
@ -506,7 +506,7 @@ fn foo() {
|
||||
|
||||
fn check_join_lines_sel(before: &str, after: &str) {
|
||||
let (sel, before) = extract_range(before);
|
||||
let file = SourceFile::parse(&before);
|
||||
let file = SourceFile::parse(&before).tree;
|
||||
let result = join_lines(&file, sel);
|
||||
let actual = result.apply(&before);
|
||||
assert_eq_text!(after, &actual);
|
||||
|
@ -314,7 +314,7 @@ impl Analysis {
|
||||
|
||||
/// Gets the syntax tree of the file.
|
||||
pub fn parse(&self, file_id: FileId) -> TreeArc<SourceFile> {
|
||||
self.db.parse(file_id).clone()
|
||||
self.db.parse(file_id).tree
|
||||
}
|
||||
|
||||
/// Gets the file's `LineIndex`: data structure to convert between absolute
|
||||
@ -331,7 +331,7 @@ impl Analysis {
|
||||
/// Returns position of the matching brace (all types of braces are
|
||||
/// supported).
|
||||
pub fn matching_brace(&self, position: FilePosition) -> Option<TextUnit> {
|
||||
let file = self.db.parse(position.file_id);
|
||||
let file = self.db.parse(position.file_id).tree;
|
||||
matching_brace::matching_brace(&file, position.offset)
|
||||
}
|
||||
|
||||
@ -344,7 +344,7 @@ impl Analysis {
|
||||
/// Returns an edit to remove all newlines in the range, cleaning up minor
|
||||
/// stuff like trailing commas.
|
||||
pub fn join_lines(&self, frange: FileRange) -> SourceChange {
|
||||
let file = self.db.parse(frange.file_id);
|
||||
let file = self.db.parse(frange.file_id).tree;
|
||||
let file_edit = SourceFileEdit {
|
||||
file_id: frange.file_id,
|
||||
edit: join_lines::join_lines(&file, frange.range),
|
||||
@ -362,7 +362,7 @@ impl Analysis {
|
||||
/// this works when adding `let =`.
|
||||
// FIXME: use a snippet completion instead of this hack here.
|
||||
pub fn on_eq_typed(&self, position: FilePosition) -> Option<SourceChange> {
|
||||
let file = self.db.parse(position.file_id);
|
||||
let file = self.db.parse(position.file_id).tree;
|
||||
let edit = typing::on_eq_typed(&file, position.offset)?;
|
||||
Some(SourceChange::source_file_edit(
|
||||
"add semicolon",
|
||||
@ -378,13 +378,13 @@ impl Analysis {
|
||||
/// Returns a tree representation of symbols in the file. Useful to draw a
|
||||
/// file outline.
|
||||
pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> {
|
||||
let file = self.db.parse(file_id);
|
||||
let file = self.db.parse(file_id).tree;
|
||||
file_structure(&file)
|
||||
}
|
||||
|
||||
/// Returns the set of folding ranges.
|
||||
pub fn folding_ranges(&self, file_id: FileId) -> Vec<Fold> {
|
||||
let file = self.db.parse(file_id);
|
||||
let file = self.db.parse(file_id).tree;
|
||||
folding_ranges::folding_ranges(&file)
|
||||
}
|
||||
|
||||
|
@ -31,7 +31,7 @@ mod tests {
|
||||
fn test_matching_brace() {
|
||||
fn do_check(before: &str, after: &str) {
|
||||
let (pos, before) = extract_offset(before);
|
||||
let file = SourceFile::parse(&before);
|
||||
let file = SourceFile::parse(&before).tree;
|
||||
let new_pos = match matching_brace(&file, pos) {
|
||||
None => pos,
|
||||
Some(pos) => pos,
|
||||
|
@ -60,7 +60,7 @@ pub(crate) fn find_all_refs(
|
||||
db: &RootDatabase,
|
||||
position: FilePosition,
|
||||
) -> Option<ReferenceSearchResult> {
|
||||
let file = db.parse(position.file_id);
|
||||
let file = db.parse(position.file_id).tree;
|
||||
let (binding, analyzer) = find_binding(db, &file, position)?;
|
||||
let declaration = NavigationTarget::from_bind_pat(position.file_id, binding);
|
||||
|
||||
@ -99,7 +99,7 @@ pub(crate) fn rename(
|
||||
position: FilePosition,
|
||||
new_name: &str,
|
||||
) -> Option<SourceChange> {
|
||||
let source_file = db.parse(position.file_id);
|
||||
let source_file = db.parse(position.file_id).tree;
|
||||
let syntax = source_file.syntax();
|
||||
|
||||
if let Some((ast_name, ast_module)) = find_name_and_module_at_offset(syntax, position) {
|
||||
|
@ -22,7 +22,7 @@ pub enum RunnableKind {
|
||||
}
|
||||
|
||||
pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
|
||||
let source_file = db.parse(file_id);
|
||||
let source_file = db.parse(file_id).tree;
|
||||
source_file.syntax().descendants().filter_map(|i| runnable(db, file_id, i)).collect()
|
||||
}
|
||||
|
||||
|
@ -4,7 +4,7 @@ use std::{
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use ra_syntax::{AstNode, TreeArc, SourceFile};
|
||||
use ra_syntax::{AstNode, Parse};
|
||||
use ra_db::{
|
||||
ParseQuery, FileTextQuery, SourceRootId,
|
||||
salsa::{Database, debug::{DebugQueryTable, TableEntry}},
|
||||
@ -72,17 +72,17 @@ impl fmt::Display for SyntaxTreeStats {
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<TableEntry<FileId, TreeArc<SourceFile>>> for SyntaxTreeStats {
|
||||
impl FromIterator<TableEntry<FileId, Parse>> for SyntaxTreeStats {
|
||||
fn from_iter<T>(iter: T) -> SyntaxTreeStats
|
||||
where
|
||||
T: IntoIterator<Item = TableEntry<FileId, TreeArc<SourceFile>>>,
|
||||
T: IntoIterator<Item = TableEntry<FileId, Parse>>,
|
||||
{
|
||||
let mut res = SyntaxTreeStats::default();
|
||||
for entry in iter {
|
||||
res.total += 1;
|
||||
if let Some(value) = entry.value {
|
||||
res.retained += 1;
|
||||
res.retained_size += value.syntax().memory_size_of_subtree();
|
||||
res.retained_size += value.tree.syntax().memory_size_of_subtree();
|
||||
}
|
||||
}
|
||||
res
|
||||
|
@ -63,7 +63,7 @@ pub(crate) trait SymbolsDatabase: hir::db::HirDatabase {
|
||||
|
||||
fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Arc<SymbolIndex> {
|
||||
db.check_canceled();
|
||||
let source_file = db.parse(file_id);
|
||||
let source_file = db.parse(file_id).tree;
|
||||
|
||||
let symbols = source_file_to_file_symbols(&source_file, file_id);
|
||||
|
||||
|
@ -30,7 +30,7 @@ fn is_control_keyword(kind: SyntaxKind) -> bool {
|
||||
|
||||
pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> {
|
||||
let _p = profile("highlight");
|
||||
let source_file = db.parse(file_id);
|
||||
let source_file = db.parse(file_id).tree;
|
||||
|
||||
fn calc_binding_hash(file_id: FileId, text: &SmolStr, shadow_count: u32) -> u64 {
|
||||
fn hash<T: std::hash::Hash + std::fmt::Debug>(x: T) -> u64 {
|
||||
@ -162,7 +162,7 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
|
||||
}
|
||||
|
||||
pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String {
|
||||
let source_file = db.parse(file_id);
|
||||
let source_file = db.parse(file_id).tree;
|
||||
|
||||
fn rainbowify(seed: u64) -> String {
|
||||
use rand::prelude::*;
|
||||
|
@ -14,7 +14,7 @@ pub(crate) fn syntax_tree(
|
||||
text_range: Option<TextRange>,
|
||||
) -> String {
|
||||
if let Some(text_range) = text_range {
|
||||
let file = db.parse(file_id);
|
||||
let file = db.parse(file_id).tree;
|
||||
let node = match algo::find_covering_element(file.syntax(), text_range) {
|
||||
SyntaxElement::Node(node) => node,
|
||||
SyntaxElement::Token(token) => {
|
||||
@ -27,7 +27,7 @@ pub(crate) fn syntax_tree(
|
||||
|
||||
node.debug_dump()
|
||||
} else {
|
||||
db.parse(file_id).syntax().debug_dump()
|
||||
db.parse(file_id).tree.syntax().debug_dump()
|
||||
}
|
||||
}
|
||||
|
||||
@ -84,8 +84,8 @@ fn syntax_tree_for_token(node: SyntaxToken, text_range: TextRange) -> Option<Str
|
||||
|
||||
// If the "file" parsed without errors,
|
||||
// return its syntax
|
||||
if parsed.errors().is_empty() {
|
||||
return Some(parsed.syntax().debug_dump());
|
||||
if parsed.errors.is_empty() {
|
||||
return Some(parsed.tree.syntax().debug_dump());
|
||||
}
|
||||
|
||||
None
|
||||
|
@ -9,7 +9,7 @@ pub fn check_action<F: Fn(&SourceFile, TextUnit) -> Option<TextEdit>>(
|
||||
f: F,
|
||||
) {
|
||||
let (before_cursor_pos, before) = extract_offset(before);
|
||||
let file = SourceFile::parse(&before);
|
||||
let file = SourceFile::parse(&before).ok().unwrap();
|
||||
let result = f(&file, before_cursor_pos).expect("code action is not applicable");
|
||||
let actual = result.apply(&before);
|
||||
let actual_cursor_pos =
|
||||
|
@ -10,7 +10,7 @@ use ra_db::{FilePosition, SourceDatabase};
|
||||
use crate::{db::RootDatabase, SourceChange, SourceFileEdit};
|
||||
|
||||
pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> {
|
||||
let file = db.parse(position.file_id);
|
||||
let file = db.parse(position.file_id).tree;
|
||||
let comment = find_token_at_offset(file.syntax(), position.offset)
|
||||
.left_biased()
|
||||
.and_then(ast::Comment::cast)?;
|
||||
@ -85,7 +85,7 @@ pub fn on_eq_typed(file: &SourceFile, eq_offset: TextUnit) -> Option<TextEdit> {
|
||||
}
|
||||
|
||||
pub(crate) fn on_dot_typed(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> {
|
||||
let file = db.parse(position.file_id);
|
||||
let file = db.parse(position.file_id).tree;
|
||||
assert_eq!(file.syntax().text().char_at(position.offset), Some('.'));
|
||||
|
||||
let whitespace = find_token_at_offset(file.syntax(), position.offset)
|
||||
@ -138,7 +138,7 @@ mod tests {
|
||||
let mut edit = TextEditBuilder::default();
|
||||
edit.insert(offset, "=".to_string());
|
||||
let before = edit.finish().apply(&before);
|
||||
let file = SourceFile::parse(&before);
|
||||
let file = SourceFile::parse(&before).tree;
|
||||
if let Some(result) = on_eq_typed(&file, offset) {
|
||||
let actual = result.apply(&before);
|
||||
assert_eq_text!(after, &actual);
|
||||
|
@ -384,7 +384,7 @@ mod tests {
|
||||
}
|
||||
"#,
|
||||
);
|
||||
let expansion = expand(&rules, "literals!(foo)");
|
||||
let expansion = expand(&rules, "literals!(foo);");
|
||||
let buffer = tt::buffer::TokenBuffer::new(&[expansion.clone().into()]);
|
||||
let mut tt_src = SubtreeTokenSource::new(&buffer);
|
||||
let mut tokens = vec![];
|
||||
@ -423,7 +423,7 @@ mod tests {
|
||||
}
|
||||
"#,
|
||||
);
|
||||
let expansion = expand(&rules, "stmts!()");
|
||||
let expansion = expand(&rules, "stmts!();");
|
||||
assert!(token_tree_to_expr(&expansion).is_err());
|
||||
}
|
||||
}
|
||||
|
@ -95,7 +95,7 @@ pub(crate) fn expand_to_expr(
|
||||
pub(crate) fn text_to_tokentree(text: &str) -> tt::Subtree {
|
||||
// wrap the given text to a macro call
|
||||
let wrapped = format!("wrap_macro!( {} )", text);
|
||||
let wrapped = ast::SourceFile::parse(&wrapped).ok().unwrap();
|
||||
let wrapped = ast::SourceFile::parse(&wrapped).tree;
|
||||
let wrapped = wrapped.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||
let mut wrapped = ast_to_token_tree(wrapped).unwrap().0;
|
||||
wrapped.delimiter = tt::Delimiter::None;
|
||||
@ -378,7 +378,7 @@ fn test_match_group_with_multichar_sep() {
|
||||
assert_expansion(
|
||||
MacroKind::Items,
|
||||
&rules,
|
||||
"foo! (fn baz {true true} )",
|
||||
"foo! (fn baz {true true} );",
|
||||
"fn baz () -> bool {true &&true}",
|
||||
);
|
||||
}
|
||||
@ -392,7 +392,7 @@ fn test_match_group_zero_match() {
|
||||
}"#,
|
||||
);
|
||||
|
||||
assert_expansion(MacroKind::Items, &rules, "foo! ()", "");
|
||||
assert_expansion(MacroKind::Items, &rules, "foo! ();", "");
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -404,7 +404,7 @@ fn test_match_group_in_group() {
|
||||
}"#,
|
||||
);
|
||||
|
||||
assert_expansion(MacroKind::Items, &rules, "foo! ( (a b) )", "(a b)");
|
||||
assert_expansion(MacroKind::Items, &rules, "foo! ( (a b) );", "(a b)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -418,7 +418,7 @@ fn test_expand_to_item_list() {
|
||||
}
|
||||
",
|
||||
);
|
||||
let expansion = expand(&rules, "structs!(Foo, Bar)");
|
||||
let expansion = expand(&rules, "structs!(Foo, Bar);");
|
||||
let tree = token_tree_to_macro_items(&expansion);
|
||||
assert_eq!(
|
||||
tree.unwrap().syntax().debug_dump().trim(),
|
||||
@ -490,7 +490,7 @@ fn test_expand_literals_to_token_tree() {
|
||||
}
|
||||
"#,
|
||||
);
|
||||
let expansion = expand(&rules, "literals!(foo)");
|
||||
let expansion = expand(&rules, "literals!(foo);");
|
||||
let stm_tokens = &to_subtree(&expansion.token_trees[0]).token_trees;
|
||||
|
||||
// [let] [a] [=] ['c'] [;]
|
||||
@ -586,7 +586,7 @@ fn test_match_literal() {
|
||||
}
|
||||
"#,
|
||||
);
|
||||
assert_expansion(MacroKind::Items, &rules, "foo! ['(']", "fn foo () {}");
|
||||
assert_expansion(MacroKind::Items, &rules, "foo! ['('];", "fn foo () {}");
|
||||
}
|
||||
|
||||
// The following tests are port from intellij-rust directly
|
||||
@ -725,7 +725,7 @@ fn test_last_expr() {
|
||||
assert_expansion(
|
||||
MacroKind::Items,
|
||||
&rules,
|
||||
"vec!(1,2,3)",
|
||||
"vec!(1,2,3);",
|
||||
"{let mut v = Vec :: new () ; v . push (1) ; v . push (2) ; v . push (3) ; v}",
|
||||
);
|
||||
}
|
||||
@ -902,7 +902,7 @@ fn test_meta_doc_comments() {
|
||||
MultiLines Doc
|
||||
*/
|
||||
}"#,
|
||||
"# [doc = \" Single Line Doc 1\"] # [doc = \" \\\\n MultiLines Doc\\\\n \"] fn bar () {}",
|
||||
"# [doc = \" Single Line Doc 1\"] # [doc = \"\\\\n MultiLines Doc\\\\n \"] fn bar () {}",
|
||||
);
|
||||
}
|
||||
|
||||
@ -950,7 +950,7 @@ fn test_literal() {
|
||||
}
|
||||
"#,
|
||||
);
|
||||
assert_expansion(MacroKind::Items, &rules, r#"foo!(u8 0)"#, r#"const VALUE : u8 = 0 ;"#);
|
||||
assert_expansion(MacroKind::Items, &rules, r#"foo!(u8 0);"#, r#"const VALUE : u8 = 0 ;"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -1017,12 +1017,12 @@ fn test_vec() {
|
||||
assert_expansion(
|
||||
MacroKind::Items,
|
||||
&rules,
|
||||
r#"vec![1u32,2]"#,
|
||||
r#"vec![1u32,2];"#,
|
||||
r#"{let mut v = Vec :: new () ; v . push (1u32) ; v . push (2) ; v}"#,
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
expand_to_expr(&rules, r#"vec![1u32,2]"#).syntax().debug_dump().trim(),
|
||||
expand_to_expr(&rules, r#"vec![1u32,2];"#).syntax().debug_dump().trim(),
|
||||
r#"BLOCK_EXPR@[0; 45)
|
||||
BLOCK@[0; 45)
|
||||
L_CURLY@[0; 1) "{"
|
||||
@ -1161,7 +1161,7 @@ macro_rules! generate_pattern_iterators {
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_expansion(MacroKind::Items, &rules, r#"generate_pattern_iterators ! ( double ended ; with # [ stable ( feature = "rust1" , since = "1.0.0" ) ] , Split , RSplit , & 'a str )"#,
|
||||
assert_expansion(MacroKind::Items, &rules, r#"generate_pattern_iterators ! ( double ended ; with # [ stable ( feature = "rust1" , since = "1.0.0" ) ] , Split , RSplit , & 'a str );"#,
|
||||
"fn foo () {}");
|
||||
}
|
||||
|
||||
@ -1208,7 +1208,6 @@ $body: block; )+
|
||||
)+
|
||||
}
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user