2018-08-29 10:23:57 -05:00
|
|
|
use std::{
|
|
|
|
sync::{
|
|
|
|
Arc,
|
|
|
|
atomic::{AtomicBool, Ordering::SeqCst},
|
|
|
|
},
|
|
|
|
fmt,
|
|
|
|
time::Instant,
|
2018-08-31 11:14:08 -05:00
|
|
|
collections::{HashMap, HashSet, VecDeque},
|
2018-08-29 10:23:57 -05:00
|
|
|
panic,
|
|
|
|
};
|
|
|
|
|
2018-08-30 05:12:49 -05:00
|
|
|
use rayon::prelude::*;
|
|
|
|
use once_cell::sync::OnceCell;
|
|
|
|
use libeditor::{self, FileSymbol, LineIndex, find_node_at_offset, LocalEdit};
|
2018-08-29 10:23:57 -05:00
|
|
|
use libsyntax2::{
|
|
|
|
TextUnit, TextRange, SmolStr, File, AstNode,
|
|
|
|
SyntaxKind::*,
|
|
|
|
ast::{self, NameOwner},
|
|
|
|
};
|
|
|
|
|
|
|
|
use {
|
2018-08-30 04:34:31 -05:00
|
|
|
FileId, FileResolver, Query, Diagnostic, SourceChange, SourceFileEdit, Position, FileSystemEdit,
|
2018-08-29 10:23:57 -05:00
|
|
|
module_map::Problem,
|
|
|
|
symbol_index::FileSymbols,
|
2018-08-30 04:51:46 -05:00
|
|
|
module_map::{ModuleMap, ChangeKind},
|
2018-08-31 11:14:08 -05:00
|
|
|
JobToken, CrateGraph, CrateId,
|
2018-08-29 10:23:57 -05:00
|
|
|
};
|
|
|
|
|
2018-08-30 04:51:46 -05:00
|
|
|
#[derive(Debug)]
|
|
|
|
pub(crate) struct AnalysisHostImpl {
|
|
|
|
data: Arc<WorldData>
|
|
|
|
}
|
|
|
|
|
|
|
|
impl AnalysisHostImpl {
|
|
|
|
pub fn new() -> AnalysisHostImpl {
|
|
|
|
AnalysisHostImpl {
|
|
|
|
data: Arc::new(WorldData::default()),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
pub fn analysis(
|
|
|
|
&self,
|
2018-08-30 08:27:09 -05:00
|
|
|
file_resolver: Arc<dyn FileResolver>,
|
2018-08-30 04:51:46 -05:00
|
|
|
) -> AnalysisImpl {
|
|
|
|
AnalysisImpl {
|
|
|
|
needs_reindex: AtomicBool::new(false),
|
2018-08-30 08:27:09 -05:00
|
|
|
file_resolver,
|
2018-08-30 05:12:49 -05:00
|
|
|
data: self.data.clone(),
|
2018-08-30 04:51:46 -05:00
|
|
|
}
|
|
|
|
}
|
2018-08-30 05:12:49 -05:00
|
|
|
pub fn change_files(&mut self, changes: &mut dyn Iterator<Item=(FileId, Option<String>)>) {
|
2018-08-30 04:51:46 -05:00
|
|
|
let data = self.data_mut();
|
|
|
|
for (file_id, text) in changes {
|
|
|
|
let change_kind = if data.file_map.remove(&file_id).is_some() {
|
|
|
|
if text.is_some() {
|
|
|
|
ChangeKind::Update
|
|
|
|
} else {
|
|
|
|
ChangeKind::Delete
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
ChangeKind::Insert
|
|
|
|
};
|
|
|
|
data.module_map.update_file(file_id, change_kind);
|
|
|
|
data.file_map.remove(&file_id);
|
|
|
|
if let Some(text) = text {
|
|
|
|
let file_data = FileData::new(text);
|
|
|
|
data.file_map.insert(file_id, Arc::new(file_data));
|
|
|
|
} else {
|
|
|
|
data.file_map.remove(&file_id);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2018-08-31 11:14:08 -05:00
|
|
|
pub fn set_crate_graph(&mut self, graph: CrateGraph) {
|
|
|
|
let mut visited = HashSet::new();
|
|
|
|
for &file_id in graph.crate_roots.values() {
|
|
|
|
if !visited.insert(file_id) {
|
|
|
|
panic!("duplicate crate root: {:?}", file_id);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
self.data_mut().crate_graph = graph;
|
|
|
|
}
|
2018-08-30 04:51:46 -05:00
|
|
|
fn data_mut(&mut self) -> &mut WorldData {
|
2018-08-30 05:12:49 -05:00
|
|
|
Arc::make_mut(&mut self.data)
|
2018-08-30 04:51:46 -05:00
|
|
|
}
|
|
|
|
}
|
2018-08-29 10:23:57 -05:00
|
|
|
|
|
|
|
pub(crate) struct AnalysisImpl {
|
2018-08-30 05:12:49 -05:00
|
|
|
needs_reindex: AtomicBool,
|
2018-08-30 08:27:09 -05:00
|
|
|
file_resolver: Arc<dyn FileResolver>,
|
2018-08-30 05:12:49 -05:00
|
|
|
data: Arc<WorldData>,
|
2018-08-29 10:23:57 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
impl fmt::Debug for AnalysisImpl {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
(&*self.data).fmt(f)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Clone for AnalysisImpl {
|
|
|
|
fn clone(&self) -> AnalysisImpl {
|
|
|
|
AnalysisImpl {
|
|
|
|
needs_reindex: AtomicBool::new(self.needs_reindex.load(SeqCst)),
|
|
|
|
file_resolver: Arc::clone(&self.file_resolver),
|
|
|
|
data: Arc::clone(&self.data),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl AnalysisImpl {
|
|
|
|
pub fn file_syntax(&self, file_id: FileId) -> File {
|
|
|
|
self.file_data(file_id).syntax().clone()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn file_line_index(&self, id: FileId) -> LineIndex {
|
|
|
|
let data = self.file_data(id);
|
|
|
|
data
|
|
|
|
.lines
|
|
|
|
.get_or_init(|| LineIndex::new(&data.text))
|
|
|
|
.clone()
|
|
|
|
}
|
|
|
|
|
2018-08-31 04:04:33 -05:00
|
|
|
pub fn world_symbols(&self, mut query: Query, token: &JobToken) -> Vec<(FileId, FileSymbol)> {
|
2018-08-29 10:23:57 -05:00
|
|
|
self.reindex();
|
|
|
|
self.data.file_map.iter()
|
2018-08-31 04:04:33 -05:00
|
|
|
.take_while(move |_| !token.is_canceled())
|
2018-08-29 10:23:57 -05:00
|
|
|
.flat_map(move |(id, data)| {
|
|
|
|
let symbols = data.symbols();
|
|
|
|
query.process(symbols).into_iter().map(move |s| (*id, s))
|
|
|
|
})
|
|
|
|
.collect()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn parent_module(&self, id: FileId) -> Vec<(FileId, FileSymbol)> {
|
|
|
|
let module_map = &self.data.module_map;
|
|
|
|
let id = module_map.file2module(id);
|
|
|
|
module_map
|
|
|
|
.parent_modules(
|
|
|
|
id,
|
|
|
|
&*self.file_resolver,
|
|
|
|
&|file_id| self.file_syntax(file_id),
|
|
|
|
)
|
|
|
|
.into_iter()
|
|
|
|
.map(|(id, name, node)| {
|
|
|
|
let id = module_map.module2file(id);
|
|
|
|
let sym = FileSymbol {
|
|
|
|
name,
|
|
|
|
node_range: node.range(),
|
|
|
|
kind: MODULE,
|
|
|
|
};
|
|
|
|
(id, sym)
|
|
|
|
})
|
|
|
|
.collect()
|
|
|
|
}
|
|
|
|
|
2018-08-31 11:14:08 -05:00
|
|
|
pub fn crate_root(&self, id: FileId) -> Vec<CrateId> {
|
|
|
|
let module_map = &self.data.module_map;
|
|
|
|
let crate_graph = &self.data.crate_graph;
|
|
|
|
let mut res = Vec::new();
|
|
|
|
let mut work = VecDeque::new();
|
|
|
|
work.push_back(id);
|
|
|
|
let mut visited = HashSet::new();
|
|
|
|
while let Some(id) = work.pop_front() {
|
|
|
|
if let Some(crate_id) = crate_graph.crate_id_for_crate_root(id) {
|
|
|
|
res.push(crate_id);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
let mid = module_map.file2module(id);
|
|
|
|
let parents = module_map
|
|
|
|
.parent_module_ids(
|
|
|
|
mid,
|
|
|
|
&*self.file_resolver,
|
|
|
|
&|file_id| self.file_syntax(file_id),
|
|
|
|
)
|
|
|
|
.into_iter()
|
|
|
|
.map(|id| module_map.module2file(id))
|
|
|
|
.filter(|&id| visited.insert(id));
|
|
|
|
work.extend(parents);
|
|
|
|
}
|
|
|
|
res
|
|
|
|
}
|
|
|
|
|
2018-08-29 10:23:57 -05:00
|
|
|
pub fn approximately_resolve_symbol(
|
|
|
|
&self,
|
|
|
|
id: FileId,
|
|
|
|
offset: TextUnit,
|
2018-08-31 04:04:33 -05:00
|
|
|
token: &JobToken,
|
2018-08-29 10:23:57 -05:00
|
|
|
) -> Vec<(FileId, FileSymbol)> {
|
|
|
|
let file = self.file_syntax(id);
|
|
|
|
let syntax = file.syntax();
|
|
|
|
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, offset) {
|
2018-08-31 04:04:33 -05:00
|
|
|
return self.index_resolve(name_ref, token);
|
2018-08-29 10:23:57 -05:00
|
|
|
}
|
|
|
|
if let Some(name) = find_node_at_offset::<ast::Name>(syntax, offset) {
|
|
|
|
if let Some(module) = name.syntax().parent().and_then(ast::Module::cast) {
|
|
|
|
if module.has_semi() {
|
|
|
|
let file_ids = self.resolve_module(id, module);
|
|
|
|
|
|
|
|
let res = file_ids.into_iter().map(|id| {
|
|
|
|
let name = module.name()
|
|
|
|
.map(|n| n.text())
|
|
|
|
.unwrap_or_else(|| SmolStr::new(""));
|
|
|
|
let symbol = FileSymbol {
|
|
|
|
name,
|
|
|
|
node_range: TextRange::offset_len(0.into(), 0.into()),
|
|
|
|
kind: MODULE,
|
|
|
|
};
|
|
|
|
(id, symbol)
|
|
|
|
}).collect();
|
|
|
|
|
|
|
|
return res;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
vec![]
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn diagnostics(&self, file_id: FileId) -> Vec<Diagnostic> {
|
|
|
|
let syntax = self.file_syntax(file_id);
|
|
|
|
let mut res = libeditor::diagnostics(&syntax)
|
|
|
|
.into_iter()
|
|
|
|
.map(|d| Diagnostic { range: d.range, message: d.msg, fix: None })
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
|
|
|
self.data.module_map.problems(
|
|
|
|
file_id,
|
|
|
|
&*self.file_resolver,
|
|
|
|
&|file_id| self.file_syntax(file_id),
|
|
|
|
|name_node, problem| {
|
|
|
|
let diag = match problem {
|
|
|
|
Problem::UnresolvedModule { candidate } => {
|
|
|
|
let create_file = FileSystemEdit::CreateFile {
|
|
|
|
anchor: file_id,
|
|
|
|
path: candidate.clone(),
|
|
|
|
};
|
|
|
|
let fix = SourceChange {
|
|
|
|
label: "create module".to_string(),
|
|
|
|
source_file_edits: Vec::new(),
|
|
|
|
file_system_edits: vec![create_file],
|
|
|
|
cursor_position: None,
|
|
|
|
};
|
|
|
|
Diagnostic {
|
|
|
|
range: name_node.syntax().range(),
|
|
|
|
message: "unresolved module".to_string(),
|
|
|
|
fix: Some(fix),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Problem::NotDirOwner { move_to, candidate } => {
|
|
|
|
let move_file = FileSystemEdit::MoveFile { file: file_id, path: move_to.clone() };
|
|
|
|
let create_file = FileSystemEdit::CreateFile { anchor: file_id, path: move_to.join(candidate) };
|
|
|
|
let fix = SourceChange {
|
|
|
|
label: "move file and create module".to_string(),
|
|
|
|
source_file_edits: Vec::new(),
|
|
|
|
file_system_edits: vec![move_file, create_file],
|
|
|
|
cursor_position: None,
|
|
|
|
};
|
|
|
|
Diagnostic {
|
|
|
|
range: name_node.syntax().range(),
|
|
|
|
message: "can't declare module at this location".to_string(),
|
|
|
|
fix: Some(fix),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
res.push(diag)
|
|
|
|
}
|
|
|
|
);
|
|
|
|
res
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn assists(&self, file_id: FileId, offset: TextUnit) -> Vec<SourceChange> {
|
|
|
|
let file = self.file_syntax(file_id);
|
|
|
|
let actions = vec![
|
|
|
|
("flip comma", libeditor::flip_comma(&file, offset).map(|f| f())),
|
|
|
|
("add `#[derive]`", libeditor::add_derive(&file, offset).map(|f| f())),
|
|
|
|
("add impl", libeditor::add_impl(&file, offset).map(|f| f())),
|
|
|
|
];
|
2018-08-30 08:27:09 -05:00
|
|
|
actions.into_iter()
|
|
|
|
.filter_map(|(name, local_edit)| {
|
|
|
|
Some(SourceChange::from_local_edit(
|
|
|
|
file_id, name, local_edit?,
|
2018-08-29 10:23:57 -05:00
|
|
|
))
|
2018-08-30 08:27:09 -05:00
|
|
|
})
|
|
|
|
.collect()
|
2018-08-29 10:23:57 -05:00
|
|
|
}
|
|
|
|
|
2018-08-31 04:04:33 -05:00
|
|
|
fn index_resolve(&self, name_ref: ast::NameRef, token: &JobToken) -> Vec<(FileId, FileSymbol)> {
|
2018-08-29 10:23:57 -05:00
|
|
|
let name = name_ref.text();
|
|
|
|
let mut query = Query::new(name.to_string());
|
|
|
|
query.exact();
|
|
|
|
query.limit(4);
|
2018-08-31 04:04:33 -05:00
|
|
|
self.world_symbols(query, token)
|
2018-08-29 10:23:57 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
fn resolve_module(&self, id: FileId, module: ast::Module) -> Vec<FileId> {
|
|
|
|
let name = match module.name() {
|
|
|
|
Some(name) => name.text(),
|
|
|
|
None => return Vec::new(),
|
|
|
|
};
|
|
|
|
let module_map = &self.data.module_map;
|
|
|
|
let id = module_map.file2module(id);
|
|
|
|
module_map
|
|
|
|
.child_module_by_name(
|
|
|
|
id, name.as_str(),
|
|
|
|
&*self.file_resolver,
|
|
|
|
&|file_id| self.file_syntax(file_id),
|
|
|
|
)
|
|
|
|
.into_iter()
|
|
|
|
.map(|id| module_map.module2file(id))
|
|
|
|
.collect()
|
|
|
|
}
|
|
|
|
|
|
|
|
fn reindex(&self) {
|
2018-08-30 05:12:49 -05:00
|
|
|
if self.needs_reindex.compare_and_swap(true, false, SeqCst) {
|
2018-08-29 10:23:57 -05:00
|
|
|
let now = Instant::now();
|
|
|
|
let data = &*self.data;
|
|
|
|
data.file_map
|
|
|
|
.par_iter()
|
|
|
|
.for_each(|(_, data)| drop(data.symbols()));
|
|
|
|
info!("parallel indexing took {:?}", now.elapsed());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn file_data(&self, file_id: FileId) -> Arc<FileData> {
|
|
|
|
match self.data.file_map.get(&file_id) {
|
|
|
|
Some(data) => data.clone(),
|
|
|
|
None => panic!("unknown file: {:?}", file_id),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-30 05:12:49 -05:00
|
|
|
#[derive(Clone, Default, Debug)]
|
|
|
|
struct WorldData {
|
2018-08-31 11:14:08 -05:00
|
|
|
crate_graph: CrateGraph,
|
2018-08-30 05:12:49 -05:00
|
|
|
file_map: HashMap<FileId, Arc<FileData>>,
|
|
|
|
module_map: ModuleMap,
|
2018-08-29 10:23:57 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug)]
|
2018-08-30 05:12:49 -05:00
|
|
|
struct FileData {
|
|
|
|
text: String,
|
|
|
|
symbols: OnceCell<FileSymbols>,
|
|
|
|
syntax: OnceCell<File>,
|
|
|
|
lines: OnceCell<LineIndex>,
|
2018-08-29 10:23:57 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
impl FileData {
|
2018-08-30 05:12:49 -05:00
|
|
|
fn new(text: String) -> FileData {
|
2018-08-29 10:23:57 -05:00
|
|
|
FileData {
|
|
|
|
text,
|
|
|
|
symbols: OnceCell::new(),
|
|
|
|
syntax: OnceCell::new(),
|
|
|
|
lines: OnceCell::new(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn syntax(&self) -> &File {
|
|
|
|
let text = &self.text;
|
|
|
|
let syntax = &self.syntax;
|
|
|
|
match panic::catch_unwind(panic::AssertUnwindSafe(|| syntax.get_or_init(|| File::parse(text)))) {
|
|
|
|
Ok(file) => file,
|
|
|
|
Err(err) => {
|
|
|
|
error!("Parser paniced on:\n------\n{}\n------\n", &self.text);
|
|
|
|
panic::resume_unwind(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn syntax_transient(&self) -> File {
|
|
|
|
self.syntax.get().map(|s| s.clone())
|
|
|
|
.unwrap_or_else(|| File::parse(&self.text))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn symbols(&self) -> &FileSymbols {
|
|
|
|
let syntax = self.syntax_transient();
|
|
|
|
self.symbols
|
|
|
|
.get_or_init(|| FileSymbols::new(&syntax))
|
|
|
|
}
|
|
|
|
}
|
2018-08-30 04:34:31 -05:00
|
|
|
|
|
|
|
impl SourceChange {
|
|
|
|
pub(crate) fn from_local_edit(file_id: FileId, label: &str, edit: LocalEdit) -> SourceChange {
|
|
|
|
let file_edit = SourceFileEdit {
|
|
|
|
file_id,
|
|
|
|
edits: edit.edit.into_atoms(),
|
|
|
|
};
|
|
|
|
SourceChange {
|
|
|
|
label: label.to_string(),
|
|
|
|
source_file_edits: vec![file_edit],
|
|
|
|
file_system_edits: vec![],
|
|
|
|
cursor_position: edit.cursor_position
|
|
|
|
.map(|offset| Position { offset, file_id })
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2018-08-31 11:14:08 -05:00
|
|
|
|
|
|
|
impl CrateGraph {
|
|
|
|
fn crate_id_for_crate_root(&self, file_id: FileId) -> Option<CrateId> {
|
|
|
|
let (&crate_id, _) = self.crate_roots
|
|
|
|
.iter()
|
|
|
|
.find(|(_crate_id, &root_id)| root_id == file_id)?;
|
|
|
|
Some(crate_id)
|
|
|
|
}
|
|
|
|
}
|