Merge #292
292: Vfs r=matklad a=matklad closes #243 Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
commit
9a16cf2bf6
465
Cargo.lock
generated
465
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -2,4 +2,5 @@
|
||||
members = [ "crates/*" ]
|
||||
|
||||
[profile.release]
|
||||
incremental = true
|
||||
debug = true
|
||||
|
@ -30,11 +30,11 @@ impl Default for RootDatabase {
|
||||
runtime: salsa::Runtime::default(),
|
||||
id_maps: Default::default(),
|
||||
};
|
||||
db.query_mut(ra_db::SourceRootQuery)
|
||||
.set(ra_db::WORKSPACE, Default::default());
|
||||
db.query_mut(ra_db::CrateGraphQuery)
|
||||
.set((), Default::default());
|
||||
db.query_mut(ra_db::LibrariesQuery)
|
||||
db.query_mut(ra_db::LocalRootsQuery)
|
||||
.set((), Default::default());
|
||||
db.query_mut(ra_db::LibraryRootsQuery)
|
||||
.set((), Default::default());
|
||||
db
|
||||
}
|
||||
@ -61,9 +61,11 @@ salsa::database_storage! {
|
||||
pub(crate) struct RootDatabaseStorage for RootDatabase {
|
||||
impl ra_db::FilesDatabase {
|
||||
fn file_text() for ra_db::FileTextQuery;
|
||||
fn file_relative_path() for ra_db::FileRelativePathQuery;
|
||||
fn file_source_root() for ra_db::FileSourceRootQuery;
|
||||
fn source_root() for ra_db::SourceRootQuery;
|
||||
fn libraries() for ra_db::LibrariesQuery;
|
||||
fn local_roots() for ra_db::LocalRootsQuery;
|
||||
fn library_roots() for ra_db::LibraryRootsQuery;
|
||||
fn crate_graph() for ra_db::CrateGraphQuery;
|
||||
}
|
||||
impl ra_db::SyntaxDatabase {
|
||||
|
@ -10,9 +10,8 @@ use ra_syntax::{
|
||||
SyntaxKind::*,
|
||||
SyntaxNodeRef, TextRange, TextUnit,
|
||||
};
|
||||
use ra_db::{FilesDatabase, SourceRoot, SourceRootId, WORKSPACE, SyntaxDatabase};
|
||||
use ra_db::{FilesDatabase, SourceRoot, SourceRootId, SyntaxDatabase};
|
||||
use rayon::prelude::*;
|
||||
use rustc_hash::FxHashSet;
|
||||
use salsa::{Database, ParallelDatabase};
|
||||
use hir::{
|
||||
self,
|
||||
@ -24,8 +23,8 @@ use hir::{
|
||||
use crate::{
|
||||
completion::{completions, CompletionItem},
|
||||
db,
|
||||
symbol_index::{SymbolIndex, SymbolsDatabase},
|
||||
AnalysisChange, Cancelable, CrateId, Diagnostic, FileId,
|
||||
symbol_index::{SymbolIndex, SymbolsDatabase, LibrarySymbolsQuery},
|
||||
AnalysisChange, RootChange, Cancelable, CrateId, Diagnostic, FileId,
|
||||
FileSystemEdit, FilePosition, Query, SourceChange, SourceFileNodeEdit,
|
||||
ReferenceResolution,
|
||||
};
|
||||
@ -44,70 +43,41 @@ impl AnalysisHostImpl {
|
||||
pub fn apply_change(&mut self, change: AnalysisChange) {
|
||||
log::info!("apply_change {:?}", change);
|
||||
// self.gc_syntax_trees();
|
||||
|
||||
for (file_id, text) in change.files_changed {
|
||||
self.db
|
||||
.query_mut(ra_db::FileTextQuery)
|
||||
.set(file_id, Arc::new(text))
|
||||
}
|
||||
if !(change.files_added.is_empty() && change.files_removed.is_empty()) {
|
||||
let file_resolver = change
|
||||
.file_resolver
|
||||
.expect("change resolver when changing set of files");
|
||||
let mut source_root = SourceRoot::clone(&self.db.source_root(WORKSPACE));
|
||||
for (file_id, text) in change.files_added {
|
||||
self.db
|
||||
.query_mut(ra_db::FileTextQuery)
|
||||
.set(file_id, Arc::new(text));
|
||||
self.db
|
||||
.query_mut(ra_db::FileSourceRootQuery)
|
||||
.set(file_id, ra_db::WORKSPACE);
|
||||
source_root.files.insert(file_id);
|
||||
}
|
||||
for file_id in change.files_removed {
|
||||
self.db
|
||||
.query_mut(ra_db::FileTextQuery)
|
||||
.set(file_id, Arc::new(String::new()));
|
||||
source_root.files.remove(&file_id);
|
||||
}
|
||||
source_root.file_resolver = file_resolver;
|
||||
self.db
|
||||
.query_mut(ra_db::SourceRootQuery)
|
||||
.set(WORKSPACE, Arc::new(source_root))
|
||||
}
|
||||
if !change.libraries_added.is_empty() {
|
||||
let mut libraries = Vec::clone(&self.db.libraries());
|
||||
for library in change.libraries_added {
|
||||
let source_root_id = SourceRootId(1 + libraries.len() as u32);
|
||||
libraries.push(source_root_id);
|
||||
let mut files = FxHashSet::default();
|
||||
for (file_id, text) in library.files {
|
||||
files.insert(file_id);
|
||||
log::debug!(
|
||||
"library file: {:?} {:?}",
|
||||
file_id,
|
||||
library.file_resolver.debug_path(file_id)
|
||||
);
|
||||
self.db
|
||||
.query_mut(ra_db::FileSourceRootQuery)
|
||||
.set_constant(file_id, source_root_id);
|
||||
self.db
|
||||
.query_mut(ra_db::FileTextQuery)
|
||||
.set_constant(file_id, Arc::new(text));
|
||||
}
|
||||
let source_root = SourceRoot {
|
||||
files,
|
||||
file_resolver: library.file_resolver,
|
||||
};
|
||||
if !change.new_roots.is_empty() {
|
||||
let mut local_roots = Vec::clone(&self.db.local_roots());
|
||||
for (root_id, is_local) in change.new_roots {
|
||||
self.db
|
||||
.query_mut(ra_db::SourceRootQuery)
|
||||
.set(source_root_id, Arc::new(source_root));
|
||||
self.db
|
||||
.query_mut(crate::symbol_index::LibrarySymbolsQuery)
|
||||
.set(source_root_id, Arc::new(library.symbol_index));
|
||||
.set(root_id, Default::default());
|
||||
if is_local {
|
||||
local_roots.push(root_id);
|
||||
}
|
||||
}
|
||||
self.db
|
||||
.query_mut(ra_db::LibrariesQuery)
|
||||
.query_mut(ra_db::LocalRootsQuery)
|
||||
.set((), Arc::new(local_roots));
|
||||
}
|
||||
|
||||
for (root_id, root_change) in change.roots_changed {
|
||||
self.apply_root_change(root_id, root_change);
|
||||
}
|
||||
for (file_id, text) in change.files_changed {
|
||||
self.db.query_mut(ra_db::FileTextQuery).set(file_id, text)
|
||||
}
|
||||
if !change.libraries_added.is_empty() {
|
||||
let mut libraries = Vec::clone(&self.db.library_roots());
|
||||
for library in change.libraries_added {
|
||||
libraries.push(library.root_id);
|
||||
self.db
|
||||
.query_mut(ra_db::SourceRootQuery)
|
||||
.set(library.root_id, Default::default());
|
||||
self.db
|
||||
.query_mut(LibrarySymbolsQuery)
|
||||
.set_constant(library.root_id, Arc::new(library.symbol_index));
|
||||
self.apply_root_change(library.root_id, library.root_change);
|
||||
}
|
||||
self.db
|
||||
.query_mut(ra_db::LibraryRootsQuery)
|
||||
.set((), Arc::new(libraries));
|
||||
}
|
||||
if let Some(crate_graph) = change.crate_graph {
|
||||
@ -117,6 +87,34 @@ impl AnalysisHostImpl {
|
||||
}
|
||||
}
|
||||
|
||||
fn apply_root_change(&mut self, root_id: SourceRootId, root_change: RootChange) {
|
||||
let mut source_root = SourceRoot::clone(&self.db.source_root(root_id));
|
||||
for add_file in root_change.added {
|
||||
self.db
|
||||
.query_mut(ra_db::FileTextQuery)
|
||||
.set(add_file.file_id, add_file.text);
|
||||
self.db
|
||||
.query_mut(ra_db::FileRelativePathQuery)
|
||||
.set(add_file.file_id, add_file.path.clone());
|
||||
self.db
|
||||
.query_mut(ra_db::FileSourceRootQuery)
|
||||
.set(add_file.file_id, root_id);
|
||||
source_root.files.insert(add_file.path, add_file.file_id);
|
||||
}
|
||||
for remove_file in root_change.removed {
|
||||
self.db
|
||||
.query_mut(ra_db::FileTextQuery)
|
||||
.set(remove_file.file_id, Default::default());
|
||||
self.db
|
||||
.query_mut(ra_db::FileRelativePathQuery)
|
||||
.set(remove_file.file_id, Default::default());
|
||||
source_root.files.remove(&remove_file.path);
|
||||
}
|
||||
self.db
|
||||
.query_mut(ra_db::SourceRootQuery)
|
||||
.set(root_id, Arc::new(source_root));
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
/// Ideally, we should call this function from time to time to collect heavy
|
||||
/// syntax trees. However, if we actually do that, everything is recomputed
|
||||
@ -156,21 +154,26 @@ impl AnalysisImpl {
|
||||
self.db.file_lines(file_id)
|
||||
}
|
||||
pub fn world_symbols(&self, query: Query) -> Cancelable<Vec<(FileId, FileSymbol)>> {
|
||||
/// Need to wrap Snapshot to provide `Clone` impl for `map_with`
|
||||
struct Snap(salsa::Snapshot<db::RootDatabase>);
|
||||
impl Clone for Snap {
|
||||
fn clone(&self) -> Snap {
|
||||
Snap(self.0.snapshot())
|
||||
}
|
||||
}
|
||||
|
||||
let buf: Vec<Arc<SymbolIndex>> = if query.libs {
|
||||
let snap = Snap(self.db.snapshot());
|
||||
self.db
|
||||
.libraries()
|
||||
.iter()
|
||||
.map(|&lib_id| self.db.library_symbols(lib_id))
|
||||
.library_roots()
|
||||
.par_iter()
|
||||
.map_with(snap, |db, &lib_id| db.0.library_symbols(lib_id))
|
||||
.collect()
|
||||
} else {
|
||||
let files = &self.db.source_root(WORKSPACE).files;
|
||||
|
||||
/// Need to wrap Snapshot to provide `Clone` impl for `map_with`
|
||||
struct Snap(salsa::Snapshot<db::RootDatabase>);
|
||||
impl Clone for Snap {
|
||||
fn clone(&self) -> Snap {
|
||||
Snap(self.0.snapshot())
|
||||
}
|
||||
let mut files = Vec::new();
|
||||
for &root in self.db.local_roots().iter() {
|
||||
let sr = self.db.source_root(root);
|
||||
files.extend(sr.files.values().map(|&it| it))
|
||||
}
|
||||
|
||||
let snap = Snap(self.db.snapshot());
|
||||
|
@ -18,9 +18,9 @@ pub mod mock_analysis;
|
||||
|
||||
use std::{fmt, sync::Arc};
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
use ra_syntax::{SourceFileNode, TextRange, TextUnit};
|
||||
use ra_text_edit::AtomTextEdit;
|
||||
use ra_db::FileResolverImp;
|
||||
use rayon::prelude::*;
|
||||
use relative_path::RelativePathBuf;
|
||||
|
||||
@ -39,28 +39,54 @@ pub use hir::FnSignatureInfo;
|
||||
|
||||
pub use ra_db::{
|
||||
Canceled, Cancelable, FilePosition,
|
||||
CrateGraph, CrateId, FileId, FileResolver
|
||||
CrateGraph, CrateId, SourceRootId, FileId
|
||||
};
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct AnalysisChange {
|
||||
files_added: Vec<(FileId, String)>,
|
||||
files_changed: Vec<(FileId, String)>,
|
||||
files_removed: Vec<(FileId)>,
|
||||
new_roots: Vec<(SourceRootId, bool)>,
|
||||
roots_changed: FxHashMap<SourceRootId, RootChange>,
|
||||
files_changed: Vec<(FileId, Arc<String>)>,
|
||||
libraries_added: Vec<LibraryData>,
|
||||
crate_graph: Option<CrateGraph>,
|
||||
file_resolver: Option<FileResolverImp>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct RootChange {
|
||||
added: Vec<AddFile>,
|
||||
removed: Vec<RemoveFile>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct AddFile {
|
||||
file_id: FileId,
|
||||
path: RelativePathBuf,
|
||||
text: Arc<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct RemoveFile {
|
||||
file_id: FileId,
|
||||
path: RelativePathBuf,
|
||||
}
|
||||
|
||||
impl fmt::Debug for AnalysisChange {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt.debug_struct("AnalysisChange")
|
||||
.field("files_added", &self.files_added.len())
|
||||
.field("new_roots", &self.new_roots)
|
||||
.field("roots_changed", &self.roots_changed)
|
||||
.field("files_changed", &self.files_changed.len())
|
||||
.field("files_removed", &self.files_removed.len())
|
||||
.field("libraries_added", &self.libraries_added.len())
|
||||
.field("crate_graph", &self.crate_graph)
|
||||
.field("file_resolver", &self.file_resolver)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for RootChange {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt.debug_struct("AnalysisChange")
|
||||
.field("added", &self.added.len())
|
||||
.field("removed", &self.removed.len())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
@ -69,14 +95,37 @@ impl AnalysisChange {
|
||||
pub fn new() -> AnalysisChange {
|
||||
AnalysisChange::default()
|
||||
}
|
||||
pub fn add_file(&mut self, file_id: FileId, text: String) {
|
||||
self.files_added.push((file_id, text))
|
||||
pub fn add_root(&mut self, root_id: SourceRootId, is_local: bool) {
|
||||
self.new_roots.push((root_id, is_local));
|
||||
}
|
||||
pub fn change_file(&mut self, file_id: FileId, new_text: String) {
|
||||
pub fn add_file(
|
||||
&mut self,
|
||||
root_id: SourceRootId,
|
||||
file_id: FileId,
|
||||
path: RelativePathBuf,
|
||||
text: Arc<String>,
|
||||
) {
|
||||
let file = AddFile {
|
||||
file_id,
|
||||
path,
|
||||
text,
|
||||
};
|
||||
self.roots_changed
|
||||
.entry(root_id)
|
||||
.or_default()
|
||||
.added
|
||||
.push(file);
|
||||
}
|
||||
pub fn change_file(&mut self, file_id: FileId, new_text: Arc<String>) {
|
||||
self.files_changed.push((file_id, new_text))
|
||||
}
|
||||
pub fn remove_file(&mut self, file_id: FileId) {
|
||||
self.files_removed.push(file_id)
|
||||
pub fn remove_file(&mut self, root_id: SourceRootId, file_id: FileId, path: RelativePathBuf) {
|
||||
let file = RemoveFile { file_id, path };
|
||||
self.roots_changed
|
||||
.entry(root_id)
|
||||
.or_default()
|
||||
.removed
|
||||
.push(file);
|
||||
}
|
||||
pub fn add_library(&mut self, data: LibraryData) {
|
||||
self.libraries_added.push(data)
|
||||
@ -84,9 +133,6 @@ impl AnalysisChange {
|
||||
pub fn set_crate_graph(&mut self, graph: CrateGraph) {
|
||||
self.crate_graph = Some(graph);
|
||||
}
|
||||
pub fn set_file_resolver(&mut self, file_resolver: Arc<FileResolver>) {
|
||||
self.file_resolver = Some(FileResolverImp::new(file_resolver));
|
||||
}
|
||||
}
|
||||
|
||||
/// `AnalysisHost` stores the current state of the world.
|
||||
@ -313,20 +359,32 @@ impl Analysis {
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct LibraryData {
|
||||
files: Vec<(FileId, String)>,
|
||||
file_resolver: FileResolverImp,
|
||||
root_id: SourceRootId,
|
||||
root_change: RootChange,
|
||||
symbol_index: SymbolIndex,
|
||||
}
|
||||
|
||||
impl LibraryData {
|
||||
pub fn prepare(files: Vec<(FileId, String)>, file_resolver: Arc<FileResolver>) -> LibraryData {
|
||||
let symbol_index = SymbolIndex::for_files(files.par_iter().map(|(file_id, text)| {
|
||||
pub fn prepare(
|
||||
root_id: SourceRootId,
|
||||
files: Vec<(FileId, RelativePathBuf, Arc<String>)>,
|
||||
) -> LibraryData {
|
||||
let symbol_index = SymbolIndex::for_files(files.par_iter().map(|(file_id, _, text)| {
|
||||
let file = SourceFileNode::parse(text);
|
||||
(*file_id, file)
|
||||
}));
|
||||
let mut root_change = RootChange::default();
|
||||
root_change.added = files
|
||||
.into_iter()
|
||||
.map(|(file_id, path, text)| AddFile {
|
||||
file_id,
|
||||
path,
|
||||
text,
|
||||
})
|
||||
.collect();
|
||||
LibraryData {
|
||||
files,
|
||||
file_resolver: FileResolverImp::new(file_resolver),
|
||||
root_id,
|
||||
root_change,
|
||||
symbol_index,
|
||||
}
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ use relative_path::{RelativePathBuf};
|
||||
use test_utils::{extract_offset, parse_fixture, CURSOR_MARKER};
|
||||
use ra_db::mock::FileMap;
|
||||
|
||||
use crate::{Analysis, AnalysisChange, AnalysisHost, FileId, FilePosition};
|
||||
use crate::{Analysis, AnalysisChange, AnalysisHost, FileId, FilePosition, SourceRootId};
|
||||
|
||||
/// Mock analysis is used in test to bootstrap an AnalysisHost/Analysis
|
||||
/// from a set of in-memory files.
|
||||
@ -78,14 +78,16 @@ impl MockAnalysis {
|
||||
pub fn analysis_host(self) -> AnalysisHost {
|
||||
let mut host = AnalysisHost::default();
|
||||
let mut file_map = FileMap::default();
|
||||
let source_root = SourceRootId(0);
|
||||
let mut change = AnalysisChange::new();
|
||||
change.add_root(source_root, true);
|
||||
for (path, contents) in self.files.into_iter() {
|
||||
assert!(path.starts_with('/'));
|
||||
let path = RelativePathBuf::from_path(&path[1..]).unwrap();
|
||||
let file_id = file_map.add(path);
|
||||
change.add_file(file_id, contents);
|
||||
let file_id = file_map.add(path.clone());
|
||||
change.add_file(source_root, file_id, path, Arc::new(contents));
|
||||
}
|
||||
change.set_file_resolver(Arc::new(file_map));
|
||||
// change.set_file_resolver(Arc::new(file_map));
|
||||
host.apply_change(change);
|
||||
host
|
||||
}
|
||||
|
@ -1,76 +0,0 @@
|
||||
use std::{
|
||||
sync::Arc,
|
||||
hash::{Hash, Hasher},
|
||||
fmt,
|
||||
};
|
||||
|
||||
use relative_path::RelativePath;
|
||||
|
||||
use crate::input::FileId;
|
||||
|
||||
pub trait FileResolver: fmt::Debug + Send + Sync + 'static {
|
||||
fn file_stem(&self, file_id: FileId) -> String;
|
||||
fn resolve(&self, file_id: FileId, path: &RelativePath) -> Option<FileId>;
|
||||
fn debug_path(&self, _1file_id: FileId) -> Option<std::path::PathBuf> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FileResolverImp {
|
||||
inner: Arc<FileResolver>,
|
||||
}
|
||||
|
||||
impl PartialEq for FileResolverImp {
|
||||
fn eq(&self, other: &FileResolverImp) -> bool {
|
||||
self.inner() == other.inner()
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for FileResolverImp {}
|
||||
|
||||
impl Hash for FileResolverImp {
|
||||
fn hash<H: Hasher>(&self, hasher: &mut H) {
|
||||
self.inner().hash(hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl FileResolverImp {
|
||||
pub fn new(inner: Arc<FileResolver>) -> FileResolverImp {
|
||||
FileResolverImp { inner }
|
||||
}
|
||||
pub fn file_stem(&self, file_id: FileId) -> String {
|
||||
self.inner.file_stem(file_id)
|
||||
}
|
||||
pub fn resolve(&self, file_id: FileId, path: &RelativePath) -> Option<FileId> {
|
||||
self.inner.resolve(file_id, path)
|
||||
}
|
||||
pub fn debug_path(&self, file_id: FileId) -> Option<std::path::PathBuf> {
|
||||
self.inner.debug_path(file_id)
|
||||
}
|
||||
fn inner(&self) -> *const FileResolver {
|
||||
&*self.inner
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for FileResolverImp {
|
||||
fn default() -> FileResolverImp {
|
||||
#[derive(Debug)]
|
||||
struct DummyResolver;
|
||||
impl FileResolver for DummyResolver {
|
||||
fn file_stem(&self, _file_: FileId) -> String {
|
||||
panic!("file resolver not set")
|
||||
}
|
||||
fn resolve(
|
||||
&self,
|
||||
_file_id: FileId,
|
||||
_path: &::relative_path::RelativePath,
|
||||
) -> Option<FileId> {
|
||||
panic!("file resolver not set")
|
||||
}
|
||||
}
|
||||
FileResolverImp {
|
||||
inner: Arc::new(DummyResolver),
|
||||
}
|
||||
}
|
||||
}
|
@ -1,10 +1,12 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use rustc_hash::{FxHashSet, FxHashMap};
|
||||
use rustc_hash::{FxHashMap};
|
||||
use relative_path::RelativePathBuf;
|
||||
use ra_syntax::SmolStr;
|
||||
use salsa;
|
||||
|
||||
use crate::file_resolver::FileResolverImp;
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||
pub struct SourceRootId(pub u32);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct FileId(pub u32);
|
||||
@ -85,6 +87,11 @@ salsa::query_group! {
|
||||
type FileTextQuery;
|
||||
storage input;
|
||||
}
|
||||
/// Path to a file, relative to the root of its source root.
|
||||
fn file_relative_path(file_id: FileId) -> RelativePathBuf {
|
||||
type FileRelativePathQuery;
|
||||
storage input;
|
||||
}
|
||||
fn file_source_root(file_id: FileId) -> SourceRootId {
|
||||
type FileSourceRootQuery;
|
||||
storage input;
|
||||
@ -93,8 +100,12 @@ salsa::query_group! {
|
||||
type SourceRootQuery;
|
||||
storage input;
|
||||
}
|
||||
fn libraries() -> Arc<Vec<SourceRootId>> {
|
||||
type LibrariesQuery;
|
||||
fn local_roots() -> Arc<Vec<SourceRootId>> {
|
||||
type LocalRootsQuery;
|
||||
storage input;
|
||||
}
|
||||
fn library_roots() -> Arc<Vec<SourceRootId>> {
|
||||
type LibraryRootsQuery;
|
||||
storage input;
|
||||
}
|
||||
fn crate_graph() -> Arc<CrateGraph> {
|
||||
@ -104,13 +115,7 @@ salsa::query_group! {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||
pub struct SourceRootId(pub u32);
|
||||
|
||||
#[derive(Default, Clone, Debug, PartialEq, Eq)]
|
||||
pub struct SourceRoot {
|
||||
pub file_resolver: FileResolverImp,
|
||||
pub files: FxHashSet<FileId>,
|
||||
pub files: FxHashMap<RelativePathBuf, FileId>,
|
||||
}
|
||||
|
||||
pub const WORKSPACE: SourceRootId = SourceRootId(0);
|
||||
|
@ -1,6 +1,5 @@
|
||||
//! ra_db defines basic database traits. Concrete DB is defined by ra_analysis.
|
||||
mod syntax_ptr;
|
||||
mod file_resolver;
|
||||
mod input;
|
||||
mod loc2id;
|
||||
pub mod mock;
|
||||
@ -24,10 +23,10 @@ impl std::error::Error for Canceled {}
|
||||
|
||||
pub use crate::{
|
||||
syntax_ptr::LocalSyntaxPtr,
|
||||
file_resolver::{FileResolver, FileResolverImp},
|
||||
input::{
|
||||
FilesDatabase, FileId, CrateId, SourceRoot, SourceRootId, CrateGraph, WORKSPACE,
|
||||
FileTextQuery, FileSourceRootQuery, SourceRootQuery, LibrariesQuery, CrateGraphQuery,
|
||||
FilesDatabase, FileId, CrateId, SourceRoot, SourceRootId, CrateGraph,
|
||||
FileTextQuery, FileSourceRootQuery, SourceRootQuery, LocalRootsQuery, LibraryRootsQuery, CrateGraphQuery,
|
||||
FileRelativePathQuery
|
||||
},
|
||||
loc2id::{LocationIntener, NumericId},
|
||||
};
|
||||
|
@ -1,9 +1,7 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use rustc_hash::FxHashSet;
|
||||
use relative_path::{RelativePath, RelativePathBuf};
|
||||
|
||||
use crate::{FileId, FileResolver, SourceRoot, FileResolverImp};
|
||||
use crate::{FileId};
|
||||
|
||||
#[derive(Default, Debug, Clone)]
|
||||
pub struct FileMap(Vec<(FileId, RelativePathBuf)>);
|
||||
@ -15,15 +13,6 @@ impl FileMap {
|
||||
file_id
|
||||
}
|
||||
|
||||
pub fn into_source_root(self) -> SourceRoot {
|
||||
let files = self.files();
|
||||
let file_resolver = FileResolverImp::new(Arc::new(self));
|
||||
SourceRoot {
|
||||
file_resolver,
|
||||
files,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn files(&self) -> FxHashSet<FileId> {
|
||||
self.iter().map(|(id, _)| id).collect()
|
||||
}
|
||||
@ -38,19 +27,4 @@ impl FileMap {
|
||||
.iter()
|
||||
.map(|(id, path)| (*id, path.as_relative_path()))
|
||||
}
|
||||
|
||||
fn path(&self, id: FileId) -> &RelativePath {
|
||||
self.iter().find(|&(it, _)| it == id).unwrap().1
|
||||
}
|
||||
}
|
||||
|
||||
impl FileResolver for FileMap {
|
||||
fn file_stem(&self, id: FileId) -> String {
|
||||
self.path(id).file_stem().unwrap().to_string()
|
||||
}
|
||||
fn resolve(&self, id: FileId, rel: &RelativePath) -> Option<FileId> {
|
||||
let path = self.path(id).join(rel).normalize();
|
||||
let id = self.iter().find(|&(_, p)| path == p)?.0;
|
||||
Some(id)
|
||||
}
|
||||
}
|
||||
|
@ -5,6 +5,7 @@ version = "0.1.0"
|
||||
authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>"]
|
||||
|
||||
[dependencies]
|
||||
arrayvec = "0.4.9"
|
||||
log = "0.4.5"
|
||||
relative-path = "0.4.0"
|
||||
salsa = "0.8.0"
|
||||
|
@ -2,12 +2,14 @@ use std::sync::Arc;
|
||||
|
||||
use parking_lot::Mutex;
|
||||
use salsa::{self, Database};
|
||||
use ra_db::{LocationIntener, BaseDatabase, FilePosition, mock::FileMap, FileId, WORKSPACE, CrateGraph};
|
||||
use ra_db::{LocationIntener, BaseDatabase, FilePosition, FileId, CrateGraph, SourceRoot, SourceRootId};
|
||||
use relative_path::RelativePathBuf;
|
||||
use test_utils::{parse_fixture, CURSOR_MARKER, extract_offset};
|
||||
|
||||
use crate::{db, DefId, DefLoc};
|
||||
|
||||
const WORKSPACE: SourceRootId = SourceRootId(0);
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct MockDatabase {
|
||||
events: Mutex<Option<Vec<salsa::Event<MockDatabase>>>>,
|
||||
@ -16,10 +18,10 @@ pub(crate) struct MockDatabase {
|
||||
}
|
||||
|
||||
impl MockDatabase {
|
||||
pub(crate) fn with_files(fixture: &str) -> (MockDatabase, FileMap) {
|
||||
let (db, file_map, position) = MockDatabase::from_fixture(fixture);
|
||||
pub(crate) fn with_files(fixture: &str) -> (MockDatabase, SourceRoot) {
|
||||
let (db, source_root, position) = MockDatabase::from_fixture(fixture);
|
||||
assert!(position.is_none());
|
||||
(db, file_map)
|
||||
(db, source_root)
|
||||
}
|
||||
|
||||
pub(crate) fn with_position(fixture: &str) -> (MockDatabase, FilePosition) {
|
||||
@ -33,48 +35,50 @@ impl MockDatabase {
|
||||
.set((), Arc::new(crate_graph));
|
||||
}
|
||||
|
||||
fn from_fixture(fixture: &str) -> (MockDatabase, FileMap, Option<FilePosition>) {
|
||||
fn from_fixture(fixture: &str) -> (MockDatabase, SourceRoot, Option<FilePosition>) {
|
||||
let mut db = MockDatabase::default();
|
||||
|
||||
let mut position = None;
|
||||
let mut file_map = FileMap::default();
|
||||
let mut source_root = SourceRoot::default();
|
||||
for entry in parse_fixture(fixture) {
|
||||
if entry.text.contains(CURSOR_MARKER) {
|
||||
assert!(
|
||||
position.is_none(),
|
||||
"only one marker (<|>) per fixture is allowed"
|
||||
);
|
||||
position = Some(db.add_file_with_position(&mut file_map, &entry.meta, &entry.text));
|
||||
position =
|
||||
Some(db.add_file_with_position(&mut source_root, &entry.meta, &entry.text));
|
||||
} else {
|
||||
db.add_file(&mut file_map, &entry.meta, &entry.text);
|
||||
db.add_file(&mut source_root, &entry.meta, &entry.text);
|
||||
}
|
||||
}
|
||||
let source_root = file_map.clone().into_source_root();
|
||||
db.query_mut(ra_db::SourceRootQuery)
|
||||
.set(WORKSPACE, Arc::new(source_root));
|
||||
(db, file_map, position)
|
||||
.set(WORKSPACE, Arc::new(source_root.clone()));
|
||||
(db, source_root, position)
|
||||
}
|
||||
|
||||
fn add_file(&mut self, file_map: &mut FileMap, path: &str, text: &str) -> FileId {
|
||||
fn add_file(&mut self, source_root: &mut SourceRoot, path: &str, text: &str) -> FileId {
|
||||
assert!(path.starts_with('/'));
|
||||
let path = RelativePathBuf::from_path(&path[1..]).unwrap();
|
||||
|
||||
let file_id = file_map.add(path);
|
||||
let file_id = FileId(source_root.files.len() as u32);
|
||||
let text = Arc::new(text.to_string());
|
||||
self.query_mut(ra_db::FileTextQuery).set(file_id, text);
|
||||
self.query_mut(ra_db::FileRelativePathQuery)
|
||||
.set(file_id, path.clone());
|
||||
self.query_mut(ra_db::FileSourceRootQuery)
|
||||
.set(file_id, WORKSPACE);
|
||||
source_root.files.insert(path, file_id);
|
||||
file_id
|
||||
}
|
||||
|
||||
fn add_file_with_position(
|
||||
&mut self,
|
||||
file_map: &mut FileMap,
|
||||
source_root: &mut SourceRoot,
|
||||
path: &str,
|
||||
text: &str,
|
||||
) -> FilePosition {
|
||||
let (offset, text) = extract_offset(text);
|
||||
let file_id = self.add_file(file_map, path, &text);
|
||||
let file_id = self.add_file(source_root, path, &text);
|
||||
FilePosition { file_id, offset }
|
||||
}
|
||||
}
|
||||
@ -104,11 +108,11 @@ impl Default for MockDatabase {
|
||||
runtime: salsa::Runtime::default(),
|
||||
id_maps: Default::default(),
|
||||
};
|
||||
db.query_mut(ra_db::SourceRootQuery)
|
||||
.set(ra_db::WORKSPACE, Default::default());
|
||||
db.query_mut(ra_db::CrateGraphQuery)
|
||||
.set((), Default::default());
|
||||
db.query_mut(ra_db::LibrariesQuery)
|
||||
db.query_mut(ra_db::LocalRootsQuery)
|
||||
.set((), Default::default());
|
||||
db.query_mut(ra_db::LibraryRootsQuery)
|
||||
.set((), Default::default());
|
||||
db
|
||||
}
|
||||
@ -158,9 +162,11 @@ salsa::database_storage! {
|
||||
pub(crate) struct MockDatabaseStorage for MockDatabase {
|
||||
impl ra_db::FilesDatabase {
|
||||
fn file_text() for ra_db::FileTextQuery;
|
||||
fn file_relative_path() for ra_db::FileRelativePathQuery;
|
||||
fn file_source_root() for ra_db::FileSourceRootQuery;
|
||||
fn source_root() for ra_db::SourceRootQuery;
|
||||
fn libraries() for ra_db::LibrariesQuery;
|
||||
fn local_roots() for ra_db::LocalRootsQuery;
|
||||
fn library_roots() for ra_db::LibraryRootsQuery;
|
||||
fn crate_graph() for ra_db::CrateGraphQuery;
|
||||
}
|
||||
impl ra_db::SyntaxDatabase {
|
||||
|
@ -4,9 +4,10 @@ use ra_syntax::{
|
||||
ast::{self, NameOwner},
|
||||
SmolStr,
|
||||
};
|
||||
use relative_path::RelativePathBuf;
|
||||
use relative_path::{RelativePathBuf, RelativePath};
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use ra_db::{SourceRoot, SourceRootId, FileResolverImp, Cancelable, FileId,};
|
||||
use arrayvec::ArrayVec;
|
||||
use ra_db::{SourceRoot, SourceRootId, Cancelable, FileId};
|
||||
|
||||
use crate::{
|
||||
HirDatabase,
|
||||
@ -65,7 +66,7 @@ fn create_module_tree<'a>(
|
||||
let mut visited = FxHashSet::default();
|
||||
|
||||
let source_root = db.source_root(source_root);
|
||||
for &file_id in source_root.files.iter() {
|
||||
for &file_id in source_root.files.values() {
|
||||
let source = ModuleSource::new_file(file_id);
|
||||
if visited.contains(&source) {
|
||||
continue; // TODO: use explicit crate_roots here
|
||||
@ -110,8 +111,7 @@ fn build_subtree(
|
||||
|
||||
let (points_to, problem) = match sub {
|
||||
Submodule::Declaration(name) => {
|
||||
let (points_to, problem) =
|
||||
resolve_submodule(source, &name, &source_root.file_resolver);
|
||||
let (points_to, problem) = resolve_submodule(db, source, &name);
|
||||
let points_to = points_to
|
||||
.into_iter()
|
||||
.map(|file_id| match roots.remove(&file_id) {
|
||||
@ -153,34 +153,42 @@ fn build_subtree(
|
||||
}
|
||||
|
||||
fn resolve_submodule(
|
||||
db: &impl HirDatabase,
|
||||
source: ModuleSource,
|
||||
name: &SmolStr,
|
||||
file_resolver: &FileResolverImp,
|
||||
) -> (Vec<FileId>, Option<Problem>) {
|
||||
// TODO: handle submodules of inline modules properly
|
||||
// FIXME: handle submodules of inline modules properly
|
||||
let file_id = source.file_id();
|
||||
let mod_name = file_resolver.file_stem(file_id);
|
||||
let source_root_id = db.file_source_root(file_id);
|
||||
let path = db.file_relative_path(file_id);
|
||||
let root = RelativePathBuf::default();
|
||||
let dir_path = path.parent().unwrap_or(&root);
|
||||
let mod_name = path.file_stem().unwrap_or("unknown");
|
||||
let is_dir_owner = mod_name == "mod" || mod_name == "lib" || mod_name == "main";
|
||||
|
||||
let file_mod = RelativePathBuf::from(format!("../{}.rs", name));
|
||||
let dir_mod = RelativePathBuf::from(format!("../{}/mod.rs", name));
|
||||
let file_dir_mod = RelativePathBuf::from(format!("../{}/{}.rs", mod_name, name));
|
||||
let tmp1;
|
||||
let tmp2;
|
||||
let candidates = if is_dir_owner {
|
||||
tmp1 = [&file_mod, &dir_mod];
|
||||
tmp1.iter()
|
||||
let file_mod = dir_path.join(format!("{}.rs", name));
|
||||
let dir_mod = dir_path.join(format!("{}/mod.rs", name));
|
||||
let file_dir_mod = dir_path.join(format!("{}/{}.rs", mod_name, name));
|
||||
let mut candidates = ArrayVec::<[_; 2]>::new();
|
||||
if is_dir_owner {
|
||||
candidates.push(file_mod.clone());
|
||||
candidates.push(dir_mod);
|
||||
} else {
|
||||
tmp2 = [&file_dir_mod];
|
||||
tmp2.iter()
|
||||
candidates.push(file_dir_mod.clone());
|
||||
};
|
||||
|
||||
let sr = db.source_root(source_root_id);
|
||||
let points_to = candidates
|
||||
.filter_map(|path| file_resolver.resolve(file_id, path))
|
||||
.into_iter()
|
||||
.filter_map(|path| sr.files.get(&path))
|
||||
.map(|&it| it)
|
||||
.collect::<Vec<_>>();
|
||||
let problem = if points_to.is_empty() {
|
||||
Some(Problem::UnresolvedModule {
|
||||
candidate: if is_dir_owner { file_mod } else { file_dir_mod },
|
||||
candidate: RelativePath::new("../").join(&if is_dir_owner {
|
||||
file_mod
|
||||
} else {
|
||||
file_dir_mod
|
||||
}),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
|
@ -32,11 +32,12 @@ use crate::{
|
||||
SourceItemId, SourceFileItemId, SourceFileItems,
|
||||
Path, PathKind,
|
||||
HirDatabase, Crate,
|
||||
module::{ModuleId, ModuleTree},
|
||||
module::{Module, ModuleId, ModuleTree},
|
||||
};
|
||||
|
||||
/// Item map is the result of the name resolution. Item map contains, for each
|
||||
/// module, the set of visible items.
|
||||
// FIXME: currenty we compute item map per source-root. We should do it per crate instead.
|
||||
#[derive(Default, Debug, PartialEq, Eq)]
|
||||
pub struct ItemMap {
|
||||
pub per_module: FxHashMap<ModuleId, ModuleScope>,
|
||||
@ -252,7 +253,8 @@ where
|
||||
let krate = Crate::new(crate_id);
|
||||
for dep in krate.dependencies(self.db) {
|
||||
if let Some(module) = dep.krate.root_module(self.db)? {
|
||||
self.add_module_item(&mut module_items, dep.name, module.module_id);
|
||||
let def_id = module.def_id(self.db);
|
||||
self.add_module_item(&mut module_items, dep.name, def_id);
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -294,21 +296,21 @@ where
|
||||
|
||||
// Populate modules
|
||||
for (name, module_id) in module_id.children(&self.module_tree) {
|
||||
self.add_module_item(&mut module_items, name, module_id);
|
||||
let def_loc = DefLoc {
|
||||
kind: DefKind::Module,
|
||||
source_root_id: self.source_root,
|
||||
module_id,
|
||||
source_item_id: module_id.source(&self.module_tree).0,
|
||||
};
|
||||
let def_id = def_loc.id(self.db);
|
||||
self.add_module_item(&mut module_items, name, def_id);
|
||||
}
|
||||
|
||||
self.result.per_module.insert(module_id, module_items);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn add_module_item(&self, module_items: &mut ModuleScope, name: SmolStr, module_id: ModuleId) {
|
||||
let def_loc = DefLoc {
|
||||
kind: DefKind::Module,
|
||||
source_root_id: self.source_root,
|
||||
module_id,
|
||||
source_item_id: module_id.source(&self.module_tree).0,
|
||||
};
|
||||
let def_id = def_loc.id(self.db);
|
||||
fn add_module_item(&self, module_items: &mut ModuleScope, name: SmolStr, def_id: DefId) {
|
||||
let resolution = Resolution {
|
||||
def_id: Some(def_id),
|
||||
import: None,
|
||||
@ -329,7 +331,7 @@ where
|
||||
ImportKind::Named(ptr) => ptr,
|
||||
};
|
||||
|
||||
let mut curr = match import.path.kind {
|
||||
let mut curr: ModuleId = match import.path.kind {
|
||||
PathKind::Plain | PathKind::Self_ => module_id,
|
||||
PathKind::Super => {
|
||||
match module_id.parent(&self.module_tree) {
|
||||
@ -356,9 +358,30 @@ where
|
||||
curr = match def_id.loc(self.db) {
|
||||
DefLoc {
|
||||
kind: DefKind::Module,
|
||||
module_id,
|
||||
module_id: target_module_id,
|
||||
source_root_id,
|
||||
..
|
||||
} => module_id,
|
||||
} => {
|
||||
if source_root_id == self.source_root {
|
||||
target_module_id
|
||||
} else {
|
||||
let module = Module::new(self.db, source_root_id, target_module_id)?;
|
||||
let path = Path {
|
||||
segments: import.path.segments[i + 1..].iter().cloned().collect(),
|
||||
kind: PathKind::Crate,
|
||||
};
|
||||
if let Some(def_id) = module.resolve_path(self.db, path)? {
|
||||
self.update(module_id, |items| {
|
||||
let res = Resolution {
|
||||
def_id: Some(def_id),
|
||||
import: Some(ptr),
|
||||
};
|
||||
items.items.insert(name.clone(), res);
|
||||
})
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
_ => return Ok(()),
|
||||
}
|
||||
} else {
|
||||
|
@ -3,6 +3,7 @@ use std::sync::Arc;
|
||||
use salsa::Database;
|
||||
use ra_db::{FilesDatabase, CrateGraph};
|
||||
use ra_syntax::SmolStr;
|
||||
use relative_path::RelativePath;
|
||||
|
||||
use crate::{
|
||||
self as hir,
|
||||
@ -44,7 +45,7 @@ fn item_map_smoke_test() {
|
||||
|
||||
#[test]
|
||||
fn item_map_across_crates() {
|
||||
let (mut db, files) = MockDatabase::with_files(
|
||||
let (mut db, sr) = MockDatabase::with_files(
|
||||
"
|
||||
//- /main.rs
|
||||
use test_crate::Baz;
|
||||
@ -53,8 +54,8 @@ fn item_map_across_crates() {
|
||||
pub struct Baz;
|
||||
",
|
||||
);
|
||||
let main_id = files.file_id("/main.rs");
|
||||
let lib_id = files.file_id("/lib.rs");
|
||||
let main_id = sr.files[RelativePath::new("/main.rs")];
|
||||
let lib_id = sr.files[RelativePath::new("/lib.rs")];
|
||||
|
||||
let mut crate_graph = CrateGraph::default();
|
||||
let main_crate = crate_graph.add_crate_root(main_id);
|
||||
|
@ -19,12 +19,13 @@ flexi_logger = "0.10.0"
|
||||
log = "0.4.3"
|
||||
url_serde = "0.2.0"
|
||||
languageserver-types = "0.53.0"
|
||||
walkdir = "2.2.0"
|
||||
walkdir = "2.2.7"
|
||||
im = "12.0.0"
|
||||
cargo_metadata = "0.6.0"
|
||||
text_unit = { version = "0.1.2", features = ["serde"] }
|
||||
smol_str = { version = "0.1.5", features = ["serde"] }
|
||||
rustc-hash = "1.0"
|
||||
parking_lot = "0.7.0"
|
||||
|
||||
thread_worker = { path = "../thread_worker" }
|
||||
ra_syntax = { path = "../ra_syntax" }
|
||||
@ -32,6 +33,7 @@ ra_editor = { path = "../ra_editor" }
|
||||
ra_text_edit = { path = "../ra_text_edit" }
|
||||
ra_analysis = { path = "../ra_analysis" }
|
||||
gen_lsp_server = { path = "../gen_lsp_server" }
|
||||
ra_vfs = { path = "../ra_vfs" }
|
||||
|
||||
[dev-dependencies]
|
||||
tempdir = "0.3.7"
|
||||
|
@ -1,11 +1,9 @@
|
||||
mod caps;
|
||||
mod conv;
|
||||
mod main_loop;
|
||||
mod path_map;
|
||||
mod project_model;
|
||||
pub mod req;
|
||||
mod server_world;
|
||||
mod vfs;
|
||||
|
||||
pub type Result<T> = ::std::result::Result<T, ::failure::Error>;
|
||||
pub use crate::{caps::server_capabilities, main_loop::main_loop, main_loop::LspError};
|
||||
|
@ -1,7 +1,10 @@
|
||||
mod handlers;
|
||||
mod subscriptions;
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::{
|
||||
path::PathBuf,
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use crossbeam_channel::{unbounded, select, Receiver, Sender};
|
||||
use gen_lsp_server::{
|
||||
@ -9,8 +12,8 @@ use gen_lsp_server::{
|
||||
};
|
||||
use languageserver_types::NumberOrString;
|
||||
use ra_analysis::{Canceled, FileId, LibraryData};
|
||||
use ra_vfs::{VfsTask};
|
||||
use rayon;
|
||||
use thread_worker::Worker;
|
||||
use threadpool::ThreadPool;
|
||||
use rustc_hash::FxHashSet;
|
||||
use serde::{de::DeserializeOwned, Serialize};
|
||||
@ -19,10 +22,9 @@ use failure_derive::Fail;
|
||||
|
||||
use crate::{
|
||||
main_loop::subscriptions::Subscriptions,
|
||||
project_model::{workspace_loader, CargoWorkspace},
|
||||
project_model::{workspace_loader},
|
||||
req,
|
||||
server_world::{ServerWorld, ServerWorldState},
|
||||
vfs::{self, FileEvent},
|
||||
Result,
|
||||
};
|
||||
|
||||
@ -50,32 +52,42 @@ enum Task {
|
||||
|
||||
pub fn main_loop(
|
||||
internal_mode: bool,
|
||||
root: PathBuf,
|
||||
ws_root: PathBuf,
|
||||
publish_decorations: bool,
|
||||
msg_receiver: &Receiver<RawMessage>,
|
||||
msg_sender: &Sender<RawMessage>,
|
||||
) -> Result<()> {
|
||||
let pool = ThreadPool::new(8);
|
||||
let (task_sender, task_receiver) = unbounded::<Task>();
|
||||
let (fs_worker, fs_watcher) = vfs::roots_loader();
|
||||
let (ws_worker, ws_watcher) = workspace_loader();
|
||||
|
||||
ws_worker.send(ws_root.clone());
|
||||
// FIXME: support dynamic workspace loading.
|
||||
let workspaces = match ws_worker.recv().unwrap() {
|
||||
Ok(ws) => vec![ws],
|
||||
Err(e) => {
|
||||
log::warn!("loading workspace failed: {}", e);
|
||||
Vec::new()
|
||||
}
|
||||
};
|
||||
ws_worker.shutdown();
|
||||
ws_watcher
|
||||
.shutdown()
|
||||
.map_err(|_| format_err!("ws watcher died"))?;
|
||||
let mut state = ServerWorldState::new(ws_root.clone(), workspaces);
|
||||
|
||||
log::info!("server initialized, serving requests");
|
||||
let mut state = ServerWorldState::default();
|
||||
|
||||
let mut pending_requests = FxHashSet::default();
|
||||
let mut subs = Subscriptions::new();
|
||||
let main_res = main_loop_inner(
|
||||
internal_mode,
|
||||
publish_decorations,
|
||||
root,
|
||||
&pool,
|
||||
msg_sender,
|
||||
msg_receiver,
|
||||
task_sender,
|
||||
task_receiver.clone(),
|
||||
fs_worker,
|
||||
ws_worker,
|
||||
&mut state,
|
||||
&mut pending_requests,
|
||||
&mut subs,
|
||||
@ -88,12 +100,11 @@ pub fn main_loop(
|
||||
drop(pool);
|
||||
log::info!("...threadpool has finished");
|
||||
|
||||
let fs_res = fs_watcher.stop();
|
||||
let ws_res = ws_watcher.stop();
|
||||
let vfs = Arc::try_unwrap(state.vfs).expect("all snapshots should be dead");
|
||||
let vfs_res = vfs.into_inner().shutdown();
|
||||
|
||||
main_res?;
|
||||
fs_res.map_err(|_| format_err!("fs watcher died"))?;
|
||||
ws_res.map_err(|_| format_err!("ws watcher died"))?;
|
||||
vfs_res.map_err(|_| format_err!("fs watcher died"))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -101,28 +112,22 @@ pub fn main_loop(
|
||||
fn main_loop_inner(
|
||||
internal_mode: bool,
|
||||
publish_decorations: bool,
|
||||
ws_root: PathBuf,
|
||||
pool: &ThreadPool,
|
||||
msg_sender: &Sender<RawMessage>,
|
||||
msg_receiver: &Receiver<RawMessage>,
|
||||
task_sender: Sender<Task>,
|
||||
task_receiver: Receiver<Task>,
|
||||
fs_worker: Worker<PathBuf, (PathBuf, Vec<FileEvent>)>,
|
||||
ws_worker: Worker<PathBuf, Result<CargoWorkspace>>,
|
||||
state: &mut ServerWorldState,
|
||||
pending_requests: &mut FxHashSet<u64>,
|
||||
subs: &mut Subscriptions,
|
||||
) -> Result<()> {
|
||||
let (libdata_sender, libdata_receiver) = unbounded();
|
||||
ws_worker.send(ws_root.clone());
|
||||
fs_worker.send(ws_root.clone());
|
||||
loop {
|
||||
#[derive(Debug)]
|
||||
enum Event {
|
||||
Msg(RawMessage),
|
||||
Task(Task),
|
||||
Fs(PathBuf, Vec<FileEvent>),
|
||||
Ws(Result<CargoWorkspace>),
|
||||
Vfs(VfsTask),
|
||||
Lib(LibraryData),
|
||||
}
|
||||
log::trace!("selecting");
|
||||
@ -132,77 +137,20 @@ fn main_loop_inner(
|
||||
None => bail!("client exited without shutdown"),
|
||||
},
|
||||
recv(task_receiver, task) => Event::Task(task.unwrap()),
|
||||
recv(fs_worker.out, events) => match events {
|
||||
None => bail!("roots watcher died"),
|
||||
Some((pb, events)) => Event::Fs(pb, events),
|
||||
}
|
||||
recv(ws_worker.out, ws) => match ws {
|
||||
None => bail!("workspace watcher died"),
|
||||
Some(ws) => Event::Ws(ws),
|
||||
recv(state.vfs.read().task_receiver(), task) => match task {
|
||||
None => bail!("vfs died"),
|
||||
Some(task) => Event::Vfs(task),
|
||||
}
|
||||
recv(libdata_receiver, data) => Event::Lib(data.unwrap())
|
||||
};
|
||||
log::info!("{:?}", event);
|
||||
let mut state_changed = false;
|
||||
match event {
|
||||
Event::Task(task) => on_task(task, msg_sender, pending_requests),
|
||||
Event::Fs(root, events) => {
|
||||
log::info!("fs change, {}, {} events", root.display(), events.len());
|
||||
if root == ws_root {
|
||||
state.apply_fs_changes(events);
|
||||
} else {
|
||||
let (files, resolver) = state.events_to_files(events);
|
||||
let sender = libdata_sender.clone();
|
||||
pool.execute(move || {
|
||||
let start = ::std::time::Instant::now();
|
||||
log::info!("indexing {} ... ", root.display());
|
||||
let data = LibraryData::prepare(files, resolver);
|
||||
log::info!("indexed {:?} {}", start.elapsed(), root.display());
|
||||
sender.send(data);
|
||||
});
|
||||
}
|
||||
Event::Vfs(task) => {
|
||||
state.vfs.write().handle_task(task);
|
||||
state_changed = true;
|
||||
}
|
||||
Event::Ws(ws) => match ws {
|
||||
Ok(ws) => {
|
||||
let workspaces = vec![ws];
|
||||
feedback(internal_mode, "workspace loaded", msg_sender);
|
||||
for ws in workspaces.iter() {
|
||||
// Add each library as constant input. If library is
|
||||
// within the workspace, don't treat it as a library.
|
||||
//
|
||||
// HACK: If source roots are nested, pick the outer one.
|
||||
|
||||
let mut roots = ws
|
||||
.packages()
|
||||
.filter(|pkg| !pkg.is_member(ws))
|
||||
.filter_map(|pkg| {
|
||||
let root = pkg.root(ws).to_path_buf();
|
||||
if root.starts_with(&ws_root) {
|
||||
None
|
||||
} else {
|
||||
Some(root)
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
roots.sort_by_key(|it| it.as_os_str().len());
|
||||
let unique = roots
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter(|&(idx, long)| {
|
||||
!roots[..idx].iter().any(|short| long.starts_with(short))
|
||||
})
|
||||
.map(|(_idx, root)| root);
|
||||
|
||||
for root in unique {
|
||||
log::debug!("sending root, {}", root.display());
|
||||
fs_worker.send(root.to_owned());
|
||||
}
|
||||
}
|
||||
state.set_workspaces(workspaces);
|
||||
state_changed = true;
|
||||
}
|
||||
Err(e) => log::warn!("loading workspace failed: {}", e),
|
||||
},
|
||||
Event::Lib(lib) => {
|
||||
feedback(internal_mode, "library loaded", msg_sender);
|
||||
state.add_lib(lib);
|
||||
@ -234,6 +182,21 @@ fn main_loop_inner(
|
||||
},
|
||||
};
|
||||
|
||||
for lib in state.process_changes() {
|
||||
let (root, files) = lib;
|
||||
let sender = libdata_sender.clone();
|
||||
pool.execute(move || {
|
||||
let start = ::std::time::Instant::now();
|
||||
log::info!("indexing {:?} ... ", root);
|
||||
let data = LibraryData::prepare(root, files);
|
||||
log::info!("indexed {:?} {:?}", start.elapsed(), root);
|
||||
sender.send(data);
|
||||
});
|
||||
}
|
||||
if state.roots_to_scan == 0 {
|
||||
feedback(internal_mode, "workspace loaded", msg_sender);
|
||||
}
|
||||
|
||||
if state_changed {
|
||||
update_file_notifications_on_threadpool(
|
||||
pool,
|
||||
@ -336,8 +299,13 @@ fn on_notification(
|
||||
let path = uri
|
||||
.to_file_path()
|
||||
.map_err(|()| format_err!("invalid uri: {}", uri))?;
|
||||
let file_id = state.add_mem_file(path, params.text_document.text);
|
||||
subs.add_sub(file_id);
|
||||
if let Some(file_id) = state
|
||||
.vfs
|
||||
.write()
|
||||
.add_file_overlay(&path, params.text_document.text)
|
||||
{
|
||||
subs.add_sub(FileId(file_id.0));
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
Err(not) => not,
|
||||
@ -353,7 +321,7 @@ fn on_notification(
|
||||
.pop()
|
||||
.ok_or_else(|| format_err!("empty changes"))?
|
||||
.text;
|
||||
state.change_mem_file(path.as_path(), text)?;
|
||||
state.vfs.write().change_file_overlay(path.as_path(), text);
|
||||
return Ok(());
|
||||
}
|
||||
Err(not) => not,
|
||||
@ -364,8 +332,9 @@ fn on_notification(
|
||||
let path = uri
|
||||
.to_file_path()
|
||||
.map_err(|()| format_err!("invalid uri: {}", uri))?;
|
||||
let file_id = state.remove_mem_file(path.as_path())?;
|
||||
subs.remove_sub(file_id);
|
||||
if let Some(file_id) = state.vfs.write().remove_file_overlay(path.as_path()) {
|
||||
subs.remove_sub(FileId(file_id.0));
|
||||
}
|
||||
let params = req::PublishDiagnosticsParams {
|
||||
uri,
|
||||
diagnostics: Vec::new(),
|
||||
|
@ -326,9 +326,9 @@ pub fn handle_runnables(
|
||||
None => return Ok(None),
|
||||
};
|
||||
let file_id = world.analysis().crate_root(crate_id)?;
|
||||
let path = world.path_map.get_path(file_id);
|
||||
let path = world.vfs.read().file2path(ra_vfs::VfsFile(file_id.0));
|
||||
let res = world.workspaces.iter().find_map(|ws| {
|
||||
let tgt = ws.target_by_root(path)?;
|
||||
let tgt = ws.target_by_root(&path)?;
|
||||
let res = CargoTargetSpec {
|
||||
package: tgt.package(ws).name(ws).to_string(),
|
||||
target: tgt.name(ws).to_string(),
|
||||
|
@ -1,126 +0,0 @@
|
||||
use std::{
|
||||
fmt,
|
||||
path::{Component, Path, PathBuf},
|
||||
};
|
||||
|
||||
use im;
|
||||
use ra_analysis::{FileId, FileResolver};
|
||||
use relative_path::RelativePath;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum Root {
|
||||
Workspace,
|
||||
Lib,
|
||||
}
|
||||
|
||||
#[derive(Default, Clone)]
|
||||
pub struct PathMap {
|
||||
next_id: u32,
|
||||
path2id: im::HashMap<PathBuf, FileId>,
|
||||
id2path: im::HashMap<FileId, PathBuf>,
|
||||
id2root: im::HashMap<FileId, Root>,
|
||||
}
|
||||
|
||||
impl fmt::Debug for PathMap {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.write_str("PathMap { ... }")
|
||||
}
|
||||
}
|
||||
|
||||
impl PathMap {
|
||||
pub fn get_or_insert(&mut self, path: PathBuf, root: Root) -> (bool, FileId) {
|
||||
let mut inserted = false;
|
||||
let file_id = self
|
||||
.path2id
|
||||
.get(path.as_path())
|
||||
.map(|&id| id)
|
||||
.unwrap_or_else(|| {
|
||||
inserted = true;
|
||||
let id = self.new_file_id();
|
||||
self.insert(path, id, root);
|
||||
id
|
||||
});
|
||||
(inserted, file_id)
|
||||
}
|
||||
pub fn get_id(&self, path: &Path) -> Option<FileId> {
|
||||
self.path2id.get(path).cloned()
|
||||
}
|
||||
pub fn get_path(&self, file_id: FileId) -> &Path {
|
||||
self.id2path.get(&file_id).unwrap().as_path()
|
||||
}
|
||||
pub fn get_root(&self, file_id: FileId) -> Root {
|
||||
self.id2root[&file_id]
|
||||
}
|
||||
fn insert(&mut self, path: PathBuf, file_id: FileId, root: Root) {
|
||||
self.path2id.insert(path.clone(), file_id);
|
||||
self.id2path.insert(file_id, path.clone());
|
||||
self.id2root.insert(file_id, root);
|
||||
}
|
||||
|
||||
fn new_file_id(&mut self) -> FileId {
|
||||
let id = FileId(self.next_id);
|
||||
self.next_id += 1;
|
||||
id
|
||||
}
|
||||
}
|
||||
|
||||
impl FileResolver for PathMap {
|
||||
fn file_stem(&self, file_id: FileId) -> String {
|
||||
self.get_path(file_id)
|
||||
.file_stem()
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.to_string()
|
||||
}
|
||||
|
||||
fn resolve(&self, file_id: FileId, path: &RelativePath) -> Option<FileId> {
|
||||
let path = path.to_path(&self.get_path(file_id));
|
||||
let path = normalize(&path);
|
||||
self.get_id(&path)
|
||||
}
|
||||
|
||||
fn debug_path(&self, file_id: FileId) -> Option<PathBuf> {
|
||||
Some(self.get_path(file_id).to_owned())
|
||||
}
|
||||
}
|
||||
|
||||
fn normalize(path: &Path) -> PathBuf {
|
||||
let mut components = path.components().peekable();
|
||||
let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() {
|
||||
components.next();
|
||||
PathBuf::from(c.as_os_str())
|
||||
} else {
|
||||
PathBuf::new()
|
||||
};
|
||||
|
||||
for component in components {
|
||||
match component {
|
||||
Component::Prefix(..) => unreachable!(),
|
||||
Component::RootDir => {
|
||||
ret.push(component.as_os_str());
|
||||
}
|
||||
Component::CurDir => {}
|
||||
Component::ParentDir => {
|
||||
ret.pop();
|
||||
}
|
||||
Component::Normal(c) => {
|
||||
ret.push(c);
|
||||
}
|
||||
}
|
||||
}
|
||||
ret
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_resolve() {
|
||||
let mut m = PathMap::default();
|
||||
let (_, id1) = m.get_or_insert(PathBuf::from("/foo"), Root::Workspace);
|
||||
let (_, id2) = m.get_or_insert(PathBuf::from("/foo/bar.rs"), Root::Workspace);
|
||||
assert_eq!(m.resolve(id1, &RelativePath::new("bar.rs")), Some(id2),)
|
||||
}
|
||||
}
|
@ -69,6 +69,7 @@ impl Package {
|
||||
pub fn targets<'a>(self, ws: &'a CargoWorkspace) -> impl Iterator<Item = Target> + 'a {
|
||||
ws.pkg(self).targets.iter().cloned()
|
||||
}
|
||||
#[allow(unused)]
|
||||
pub fn is_member(self, ws: &CargoWorkspace) -> bool {
|
||||
ws.pkg(self).is_member
|
||||
}
|
||||
|
@ -1,154 +1,66 @@
|
||||
use std::{
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
path::{PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use languageserver_types::Url;
|
||||
use ra_analysis::{
|
||||
Analysis, AnalysisChange, AnalysisHost, CrateGraph, FileId, FileResolver, LibraryData,
|
||||
Analysis, AnalysisChange, AnalysisHost, CrateGraph, FileId, LibraryData,
|
||||
SourceRootId
|
||||
};
|
||||
use ra_vfs::{Vfs, VfsChange, VfsFile};
|
||||
use rustc_hash::FxHashMap;
|
||||
use failure::{bail, format_err};
|
||||
use relative_path::RelativePathBuf;
|
||||
use parking_lot::RwLock;
|
||||
use failure::{format_err};
|
||||
|
||||
use crate::{
|
||||
path_map::{PathMap, Root},
|
||||
project_model::{CargoWorkspace, TargetKind},
|
||||
vfs::{FileEvent, FileEventKind},
|
||||
Result,
|
||||
};
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
#[derive(Debug)]
|
||||
pub struct ServerWorldState {
|
||||
pub roots_to_scan: usize,
|
||||
pub root: PathBuf,
|
||||
pub workspaces: Arc<Vec<CargoWorkspace>>,
|
||||
pub analysis_host: AnalysisHost,
|
||||
pub path_map: PathMap,
|
||||
pub mem_map: FxHashMap<FileId, Option<String>>,
|
||||
pub vfs: Arc<RwLock<Vfs>>,
|
||||
}
|
||||
|
||||
pub struct ServerWorld {
|
||||
pub workspaces: Arc<Vec<CargoWorkspace>>,
|
||||
pub analysis: Analysis,
|
||||
pub path_map: PathMap,
|
||||
pub vfs: Arc<RwLock<Vfs>>,
|
||||
}
|
||||
|
||||
impl ServerWorldState {
|
||||
pub fn apply_fs_changes(&mut self, events: Vec<FileEvent>) {
|
||||
pub fn new(root: PathBuf, workspaces: Vec<CargoWorkspace>) -> ServerWorldState {
|
||||
let mut change = AnalysisChange::new();
|
||||
let mut inserted = false;
|
||||
{
|
||||
let pm = &mut self.path_map;
|
||||
let mm = &mut self.mem_map;
|
||||
events
|
||||
.into_iter()
|
||||
.map(|event| {
|
||||
let text = match event.kind {
|
||||
FileEventKind::Add(text) => text,
|
||||
};
|
||||
(event.path, text)
|
||||
})
|
||||
.map(|(path, text)| {
|
||||
let (ins, file_id) = pm.get_or_insert(path, Root::Workspace);
|
||||
inserted |= ins;
|
||||
(file_id, text)
|
||||
})
|
||||
.filter_map(|(file_id, text)| {
|
||||
if mm.contains_key(&file_id) {
|
||||
mm.insert(file_id, Some(text));
|
||||
None
|
||||
} else {
|
||||
Some((file_id, text))
|
||||
}
|
||||
})
|
||||
.for_each(|(file_id, text)| change.add_file(file_id, text));
|
||||
}
|
||||
if inserted {
|
||||
change.set_file_resolver(Arc::new(self.path_map.clone()))
|
||||
}
|
||||
self.analysis_host.apply_change(change);
|
||||
}
|
||||
pub fn events_to_files(
|
||||
&mut self,
|
||||
events: Vec<FileEvent>,
|
||||
) -> (Vec<(FileId, String)>, Arc<FileResolver>) {
|
||||
let files = {
|
||||
let pm = &mut self.path_map;
|
||||
events
|
||||
.into_iter()
|
||||
.map(|event| {
|
||||
let FileEventKind::Add(text) = event.kind;
|
||||
(event.path, text)
|
||||
})
|
||||
.map(|(path, text)| (pm.get_or_insert(path, Root::Lib).1, text))
|
||||
.collect()
|
||||
};
|
||||
let resolver = Arc::new(self.path_map.clone());
|
||||
(files, resolver)
|
||||
}
|
||||
pub fn add_lib(&mut self, data: LibraryData) {
|
||||
let mut change = AnalysisChange::new();
|
||||
change.add_library(data);
|
||||
self.analysis_host.apply_change(change);
|
||||
}
|
||||
|
||||
pub fn add_mem_file(&mut self, path: PathBuf, text: String) -> FileId {
|
||||
let (inserted, file_id) = self.path_map.get_or_insert(path, Root::Workspace);
|
||||
if self.path_map.get_root(file_id) != Root::Lib {
|
||||
let mut change = AnalysisChange::new();
|
||||
if inserted {
|
||||
change.add_file(file_id, text);
|
||||
change.set_file_resolver(Arc::new(self.path_map.clone()));
|
||||
} else {
|
||||
change.change_file(file_id, text);
|
||||
let mut roots = Vec::new();
|
||||
roots.push(root.clone());
|
||||
for ws in workspaces.iter() {
|
||||
for pkg in ws.packages() {
|
||||
roots.push(pkg.root(&ws).to_path_buf());
|
||||
}
|
||||
self.analysis_host.apply_change(change);
|
||||
}
|
||||
self.mem_map.insert(file_id, None);
|
||||
file_id
|
||||
}
|
||||
let roots_to_scan = roots.len();
|
||||
let (mut vfs, roots) = Vfs::new(roots);
|
||||
for r in roots {
|
||||
let is_local = vfs.root2path(r).starts_with(&root);
|
||||
change.add_root(SourceRootId(r.0), is_local);
|
||||
}
|
||||
|
||||
pub fn change_mem_file(&mut self, path: &Path, text: String) -> Result<()> {
|
||||
let file_id = self
|
||||
.path_map
|
||||
.get_id(path)
|
||||
.ok_or_else(|| format_err!("change to unknown file: {}", path.display()))?;
|
||||
if self.path_map.get_root(file_id) != Root::Lib {
|
||||
let mut change = AnalysisChange::new();
|
||||
change.change_file(file_id, text);
|
||||
self.analysis_host.apply_change(change);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn remove_mem_file(&mut self, path: &Path) -> Result<FileId> {
|
||||
let file_id = self
|
||||
.path_map
|
||||
.get_id(path)
|
||||
.ok_or_else(|| format_err!("change to unknown file: {}", path.display()))?;
|
||||
match self.mem_map.remove(&file_id) {
|
||||
Some(_) => (),
|
||||
None => bail!("unmatched close notification"),
|
||||
};
|
||||
// Do this via file watcher ideally.
|
||||
let text = fs::read_to_string(path).ok();
|
||||
if self.path_map.get_root(file_id) != Root::Lib {
|
||||
let mut change = AnalysisChange::new();
|
||||
if let Some(text) = text {
|
||||
change.change_file(file_id, text);
|
||||
}
|
||||
self.analysis_host.apply_change(change);
|
||||
}
|
||||
Ok(file_id)
|
||||
}
|
||||
pub fn set_workspaces(&mut self, ws: Vec<CargoWorkspace>) {
|
||||
let mut crate_graph = CrateGraph::default();
|
||||
let mut pkg_to_lib_crate = FxHashMap::default();
|
||||
let mut pkg_crates = FxHashMap::default();
|
||||
for ws in ws.iter() {
|
||||
for ws in workspaces.iter() {
|
||||
for pkg in ws.packages() {
|
||||
for tgt in pkg.targets(ws) {
|
||||
let root = tgt.root(ws);
|
||||
if let Some(file_id) = self.path_map.get_id(root) {
|
||||
if let Some(file_id) = vfs.load(root) {
|
||||
let file_id = FileId(file_id.0);
|
||||
let crate_id = crate_graph.add_crate_root(file_id);
|
||||
if tgt.kind(ws) == TargetKind::Lib {
|
||||
pkg_to_lib_crate.insert(pkg, crate_id);
|
||||
@ -170,16 +82,80 @@ impl ServerWorldState {
|
||||
}
|
||||
}
|
||||
}
|
||||
self.workspaces = Arc::new(ws);
|
||||
let mut change = AnalysisChange::new();
|
||||
change.set_crate_graph(crate_graph);
|
||||
|
||||
let mut analysis_host = AnalysisHost::default();
|
||||
analysis_host.apply_change(change);
|
||||
ServerWorldState {
|
||||
roots_to_scan,
|
||||
root,
|
||||
workspaces: Arc::new(workspaces),
|
||||
analysis_host,
|
||||
vfs: Arc::new(RwLock::new(vfs)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a vec of libraries
|
||||
/// FIXME: better API here
|
||||
pub fn process_changes(
|
||||
&mut self,
|
||||
) -> Vec<(SourceRootId, Vec<(FileId, RelativePathBuf, Arc<String>)>)> {
|
||||
let changes = self.vfs.write().commit_changes();
|
||||
if changes.is_empty() {
|
||||
return Vec::new();
|
||||
}
|
||||
let mut libs = Vec::new();
|
||||
let mut change = AnalysisChange::new();
|
||||
for c in changes {
|
||||
log::info!("vfs change {:?}", c);
|
||||
match c {
|
||||
VfsChange::AddRoot { root, files } => {
|
||||
let root_path = self.vfs.read().root2path(root);
|
||||
if root_path.starts_with(&self.root) {
|
||||
self.roots_to_scan -= 1;
|
||||
for (file, path, text) in files {
|
||||
change.add_file(SourceRootId(root.0), FileId(file.0), path, text);
|
||||
}
|
||||
} else {
|
||||
let files = files
|
||||
.into_iter()
|
||||
.map(|(vfsfile, path, text)| (FileId(vfsfile.0), path, text))
|
||||
.collect();
|
||||
libs.push((SourceRootId(root.0), files));
|
||||
}
|
||||
}
|
||||
VfsChange::AddFile {
|
||||
root,
|
||||
file,
|
||||
path,
|
||||
text,
|
||||
} => {
|
||||
change.add_file(SourceRootId(root.0), FileId(file.0), path, text);
|
||||
}
|
||||
VfsChange::RemoveFile { root, file, path } => {
|
||||
change.remove_file(SourceRootId(root.0), FileId(file.0), path)
|
||||
}
|
||||
VfsChange::ChangeFile { file, text } => {
|
||||
change.change_file(FileId(file.0), text);
|
||||
}
|
||||
}
|
||||
}
|
||||
self.analysis_host.apply_change(change);
|
||||
libs
|
||||
}
|
||||
|
||||
pub fn add_lib(&mut self, data: LibraryData) {
|
||||
self.roots_to_scan -= 1;
|
||||
let mut change = AnalysisChange::new();
|
||||
change.add_library(data);
|
||||
self.analysis_host.apply_change(change);
|
||||
}
|
||||
|
||||
pub fn snapshot(&self) -> ServerWorld {
|
||||
ServerWorld {
|
||||
workspaces: Arc::clone(&self.workspaces),
|
||||
analysis: self.analysis_host.analysis(),
|
||||
path_map: self.path_map.clone(),
|
||||
vfs: Arc::clone(&self.vfs),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -193,15 +169,18 @@ impl ServerWorld {
|
||||
let path = uri
|
||||
.to_file_path()
|
||||
.map_err(|()| format_err!("invalid uri: {}", uri))?;
|
||||
self.path_map
|
||||
.get_id(&path)
|
||||
.ok_or_else(|| format_err!("unknown file: {}", path.display()))
|
||||
let file = self
|
||||
.vfs
|
||||
.read()
|
||||
.path2file(&path)
|
||||
.ok_or_else(|| format_err!("unknown file: {}", path.display()))?;
|
||||
Ok(FileId(file.0))
|
||||
}
|
||||
|
||||
pub fn file_id_to_uri(&self, id: FileId) -> Result<Url> {
|
||||
let path = self.path_map.get_path(id);
|
||||
let url = Url::from_file_path(path)
|
||||
.map_err(|()| format_err!("can't convert path to url: {}", path.display()))?;
|
||||
let path = self.vfs.read().file2path(VfsFile(id.0));
|
||||
let url = Url::from_file_path(&path)
|
||||
.map_err(|_| format_err!("can't convert path to url: {}", path.display()))?;
|
||||
Ok(url)
|
||||
}
|
||||
}
|
||||
|
@ -1,67 +0,0 @@
|
||||
use std::{
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use walkdir::WalkDir;
|
||||
use thread_worker::{WorkerHandle, Worker};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct FileEvent {
|
||||
pub path: PathBuf,
|
||||
pub kind: FileEventKind,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum FileEventKind {
|
||||
Add(String),
|
||||
}
|
||||
|
||||
pub fn roots_loader() -> (Worker<PathBuf, (PathBuf, Vec<FileEvent>)>, WorkerHandle) {
|
||||
thread_worker::spawn::<PathBuf, (PathBuf, Vec<FileEvent>), _>(
|
||||
"roots loader",
|
||||
128,
|
||||
|input_receiver, output_sender| {
|
||||
input_receiver
|
||||
.map(|path| {
|
||||
log::debug!("loading {} ...", path.as_path().display());
|
||||
let events = load_root(path.as_path());
|
||||
log::debug!("... loaded {}", path.as_path().display());
|
||||
(path, events)
|
||||
})
|
||||
.for_each(|it| output_sender.send(it))
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn load_root(path: &Path) -> Vec<FileEvent> {
|
||||
let mut res = Vec::new();
|
||||
for entry in WalkDir::new(path) {
|
||||
let entry = match entry {
|
||||
Ok(entry) => entry,
|
||||
Err(e) => {
|
||||
log::warn!("watcher error: {}", e);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
if !entry.file_type().is_file() {
|
||||
continue;
|
||||
}
|
||||
let path = entry.path();
|
||||
if path.extension().and_then(|os| os.to_str()) != Some("rs") {
|
||||
continue;
|
||||
}
|
||||
let text = match fs::read_to_string(path) {
|
||||
Ok(text) => text,
|
||||
Err(e) => {
|
||||
log::warn!("watcher error: {}", e);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
res.push(FileEvent {
|
||||
path: path.to_owned(),
|
||||
kind: FileEventKind::Add(text),
|
||||
})
|
||||
}
|
||||
res
|
||||
}
|
@ -1,9 +1,7 @@
|
||||
mod support;
|
||||
|
||||
use serde_json::json;
|
||||
|
||||
use ra_lsp_server::req::{Runnables, RunnablesParams, CodeActionRequest, CodeActionParams};
|
||||
|
||||
use languageserver_types::{Position, Range, CodeActionContext};
|
||||
|
||||
use crate::support::project;
|
||||
@ -20,6 +18,7 @@ fn foo() {
|
||||
}
|
||||
",
|
||||
);
|
||||
server.wait_for_feedback("workspace loaded");
|
||||
server.request::<Runnables>(
|
||||
RunnablesParams {
|
||||
text_document: server.doc_id("lib.rs"),
|
||||
|
@ -174,11 +174,11 @@ impl Server {
|
||||
impl Drop for Server {
|
||||
fn drop(&mut self) {
|
||||
self.send_request::<Shutdown>(666, ());
|
||||
let receiver = self.worker.take().unwrap().stop();
|
||||
let receiver = self.worker.take().unwrap().shutdown();
|
||||
while let Some(msg) = recv_timeout(&receiver) {
|
||||
drop(msg);
|
||||
}
|
||||
self.watcher.take().unwrap().stop().unwrap();
|
||||
self.watcher.take().unwrap().shutdown().unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
|
17
crates/ra_vfs/Cargo.toml
Normal file
17
crates/ra_vfs/Cargo.toml
Normal file
@ -0,0 +1,17 @@
|
||||
[package]
|
||||
edition = "2018"
|
||||
name = "ra_vfs"
|
||||
version = "0.1.0"
|
||||
authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>"]
|
||||
|
||||
[dependencies]
|
||||
walkdir = "2.2.7"
|
||||
relative-path = "0.4.0"
|
||||
rustc-hash = "1.0"
|
||||
crossbeam-channel = "0.2.4"
|
||||
log = "0.4.6"
|
||||
|
||||
thread_worker = { path = "../thread_worker" }
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3"
|
53
crates/ra_vfs/src/arena.rs
Normal file
53
crates/ra_vfs/src/arena.rs
Normal file
@ -0,0 +1,53 @@
|
||||
use std::{
|
||||
marker::PhantomData,
|
||||
ops::{Index, IndexMut},
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct Arena<ID: ArenaId, T> {
|
||||
data: Vec<T>,
|
||||
_ty: PhantomData<ID>,
|
||||
}
|
||||
|
||||
pub(crate) trait ArenaId {
|
||||
fn from_u32(id: u32) -> Self;
|
||||
fn to_u32(self) -> u32;
|
||||
}
|
||||
|
||||
impl<ID: ArenaId, T> Arena<ID, T> {
|
||||
pub fn alloc(&mut self, value: T) -> ID {
|
||||
let id = self.data.len() as u32;
|
||||
self.data.push(value);
|
||||
ID::from_u32(id)
|
||||
}
|
||||
pub fn iter<'a>(&'a self) -> impl Iterator<Item = (ID, &'a T)> {
|
||||
self.data
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, value)| (ID::from_u32(idx as u32), value))
|
||||
}
|
||||
}
|
||||
|
||||
impl<ID: ArenaId, T> Default for Arena<ID, T> {
|
||||
fn default() -> Arena<ID, T> {
|
||||
Arena {
|
||||
data: Vec::new(),
|
||||
_ty: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<ID: ArenaId, T> Index<ID> for Arena<ID, T> {
|
||||
type Output = T;
|
||||
fn index(&self, idx: ID) -> &T {
|
||||
let idx = idx.to_u32() as usize;
|
||||
&self.data[idx]
|
||||
}
|
||||
}
|
||||
|
||||
impl<ID: ArenaId, T> IndexMut<ID> for Arena<ID, T> {
|
||||
fn index_mut(&mut self, idx: ID) -> &mut T {
|
||||
let idx = idx.to_u32() as usize;
|
||||
&mut self.data[idx]
|
||||
}
|
||||
}
|
76
crates/ra_vfs/src/io.rs
Normal file
76
crates/ra_vfs/src/io.rs
Normal file
@ -0,0 +1,76 @@
|
||||
use std::{
|
||||
fmt,
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use walkdir::{DirEntry, WalkDir};
|
||||
use thread_worker::{WorkerHandle};
|
||||
use relative_path::RelativePathBuf;
|
||||
|
||||
use crate::VfsRoot;
|
||||
|
||||
pub(crate) struct Task {
|
||||
pub(crate) root: VfsRoot,
|
||||
pub(crate) path: PathBuf,
|
||||
pub(crate) filter: Box<Fn(&DirEntry) -> bool + Send>,
|
||||
}
|
||||
|
||||
pub struct TaskResult {
|
||||
pub(crate) root: VfsRoot,
|
||||
pub(crate) files: Vec<(RelativePathBuf, String)>,
|
||||
}
|
||||
|
||||
impl fmt::Debug for TaskResult {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.write_str("TaskResult { ... }")
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) type Worker = thread_worker::Worker<Task, TaskResult>;
|
||||
|
||||
pub(crate) fn start() -> (Worker, WorkerHandle) {
|
||||
thread_worker::spawn("vfs", 128, |input_receiver, output_sender| {
|
||||
input_receiver
|
||||
.map(handle_task)
|
||||
.for_each(|it| output_sender.send(it))
|
||||
})
|
||||
}
|
||||
|
||||
fn handle_task(task: Task) -> TaskResult {
|
||||
let Task { root, path, filter } = task;
|
||||
log::debug!("loading {} ...", path.as_path().display());
|
||||
let files = load_root(path.as_path(), &*filter);
|
||||
log::debug!("... loaded {}", path.as_path().display());
|
||||
TaskResult { root, files }
|
||||
}
|
||||
|
||||
fn load_root(root: &Path, filter: &dyn Fn(&DirEntry) -> bool) -> Vec<(RelativePathBuf, String)> {
|
||||
let mut res = Vec::new();
|
||||
for entry in WalkDir::new(root).into_iter().filter_entry(filter) {
|
||||
let entry = match entry {
|
||||
Ok(entry) => entry,
|
||||
Err(e) => {
|
||||
log::warn!("watcher error: {}", e);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
if !entry.file_type().is_file() {
|
||||
continue;
|
||||
}
|
||||
let path = entry.path();
|
||||
if path.extension().and_then(|os| os.to_str()) != Some("rs") {
|
||||
continue;
|
||||
}
|
||||
let text = match fs::read_to_string(path) {
|
||||
Ok(text) => text,
|
||||
Err(e) => {
|
||||
log::warn!("watcher error: {}", e);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
let path = RelativePathBuf::from_path(path.strip_prefix(root).unwrap()).unwrap();
|
||||
res.push((path.to_owned(), text))
|
||||
}
|
||||
res
|
||||
}
|
350
crates/ra_vfs/src/lib.rs
Normal file
350
crates/ra_vfs/src/lib.rs
Normal file
@ -0,0 +1,350 @@
|
||||
//! VFS stands for Virtual File System.
|
||||
//!
|
||||
//! When doing analysis, we don't want to do any IO, we want to keep all source
|
||||
//! code in memory. However, the actual source code is stored on disk, so you
|
||||
//! component which does this.
|
||||
//! need to get it into the memory in the first place somehow. VFS is the
|
||||
//!
|
||||
//! It also is responsible for watching the disk for changes, and for merging
|
||||
//! editor state (modified, unsaved files) with disk state.
|
||||
//!
|
||||
//! VFS is based on a concept of roots: a set of directories on the file system
|
||||
//! whihc are watched for changes. Typically, there will be a root for each
|
||||
//! Cargo package.
|
||||
mod arena;
|
||||
mod io;
|
||||
|
||||
use std::{
|
||||
fmt,
|
||||
mem,
|
||||
thread,
|
||||
cmp::Reverse,
|
||||
path::{Path, PathBuf},
|
||||
ffi::OsStr,
|
||||
sync::Arc,
|
||||
fs,
|
||||
};
|
||||
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use relative_path::RelativePathBuf;
|
||||
use crossbeam_channel::Receiver;
|
||||
use walkdir::DirEntry;
|
||||
use thread_worker::{WorkerHandle};
|
||||
|
||||
use crate::{
|
||||
arena::{ArenaId, Arena},
|
||||
};
|
||||
|
||||
pub use crate::io::TaskResult as VfsTask;
|
||||
|
||||
/// `RootFilter` is a predicate that checks if a file can belong to a root. If
|
||||
/// several filters match a file (nested dirs), the most nested one wins.
|
||||
struct RootFilter {
|
||||
root: PathBuf,
|
||||
file_filter: fn(&Path) -> bool,
|
||||
}
|
||||
|
||||
impl RootFilter {
|
||||
fn new(root: PathBuf) -> RootFilter {
|
||||
RootFilter {
|
||||
root,
|
||||
file_filter: has_rs_extension,
|
||||
}
|
||||
}
|
||||
/// Check if this root can contain `path`. NB: even if this returns
|
||||
/// true, the `path` might actually be conained in some nested root.
|
||||
fn can_contain(&self, path: &Path) -> Option<RelativePathBuf> {
|
||||
if !(self.file_filter)(path) {
|
||||
return None;
|
||||
}
|
||||
if !(path.starts_with(&self.root)) {
|
||||
return None;
|
||||
}
|
||||
let path = path.strip_prefix(&self.root).unwrap();
|
||||
let path = RelativePathBuf::from_path(path).unwrap();
|
||||
Some(path)
|
||||
}
|
||||
}
|
||||
|
||||
fn has_rs_extension(p: &Path) -> bool {
|
||||
p.extension() == Some(OsStr::new("rs"))
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
|
||||
pub struct VfsRoot(pub u32);
|
||||
|
||||
impl ArenaId for VfsRoot {
|
||||
fn from_u32(idx: u32) -> VfsRoot {
|
||||
VfsRoot(idx)
|
||||
}
|
||||
fn to_u32(self) -> u32 {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
|
||||
pub struct VfsFile(pub u32);
|
||||
|
||||
impl ArenaId for VfsFile {
|
||||
fn from_u32(idx: u32) -> VfsFile {
|
||||
VfsFile(idx)
|
||||
}
|
||||
fn to_u32(self) -> u32 {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
struct VfsFileData {
|
||||
root: VfsRoot,
|
||||
path: RelativePathBuf,
|
||||
text: Arc<String>,
|
||||
}
|
||||
|
||||
pub struct Vfs {
|
||||
roots: Arena<VfsRoot, RootFilter>,
|
||||
files: Arena<VfsFile, VfsFileData>,
|
||||
root2files: FxHashMap<VfsRoot, FxHashSet<VfsFile>>,
|
||||
pending_changes: Vec<VfsChange>,
|
||||
worker: io::Worker,
|
||||
worker_handle: WorkerHandle,
|
||||
}
|
||||
|
||||
impl fmt::Debug for Vfs {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.write_str("Vfs { ... }")
|
||||
}
|
||||
}
|
||||
|
||||
impl Vfs {
|
||||
pub fn new(mut roots: Vec<PathBuf>) -> (Vfs, Vec<VfsRoot>) {
|
||||
let (worker, worker_handle) = io::start();
|
||||
|
||||
let mut res = Vfs {
|
||||
roots: Arena::default(),
|
||||
files: Arena::default(),
|
||||
root2files: FxHashMap::default(),
|
||||
worker,
|
||||
worker_handle,
|
||||
pending_changes: Vec::new(),
|
||||
};
|
||||
|
||||
// A hack to make nesting work.
|
||||
roots.sort_by_key(|it| Reverse(it.as_os_str().len()));
|
||||
for (i, path) in roots.iter().enumerate() {
|
||||
let root = res.roots.alloc(RootFilter::new(path.clone()));
|
||||
res.root2files.insert(root, Default::default());
|
||||
let nested = roots[..i]
|
||||
.iter()
|
||||
.filter(|it| it.starts_with(path))
|
||||
.map(|it| it.clone())
|
||||
.collect::<Vec<_>>();
|
||||
let filter = move |entry: &DirEntry| {
|
||||
if entry.file_type().is_file() {
|
||||
has_rs_extension(entry.path())
|
||||
} else {
|
||||
nested.iter().all(|it| it != entry.path())
|
||||
}
|
||||
};
|
||||
let task = io::Task {
|
||||
root,
|
||||
path: path.clone(),
|
||||
filter: Box::new(filter),
|
||||
};
|
||||
res.worker.inp.send(task);
|
||||
}
|
||||
let roots = res.roots.iter().map(|(id, _)| id).collect();
|
||||
(res, roots)
|
||||
}
|
||||
|
||||
pub fn root2path(&self, root: VfsRoot) -> PathBuf {
|
||||
self.roots[root].root.clone()
|
||||
}
|
||||
|
||||
pub fn path2file(&self, path: &Path) -> Option<VfsFile> {
|
||||
if let Some((_root, _path, Some(file))) = self.find_root(path) {
|
||||
return Some(file);
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub fn file2path(&self, file: VfsFile) -> PathBuf {
|
||||
let rel_path = &self.files[file].path;
|
||||
let root_path = &self.roots[self.files[file].root].root;
|
||||
rel_path.to_path(root_path)
|
||||
}
|
||||
|
||||
pub fn file_for_path(&self, path: &Path) -> Option<VfsFile> {
|
||||
if let Some((_root, _path, Some(file))) = self.find_root(path) {
|
||||
return Some(file);
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub fn load(&mut self, path: &Path) -> Option<VfsFile> {
|
||||
if let Some((root, rel_path, file)) = self.find_root(path) {
|
||||
return if let Some(file) = file {
|
||||
Some(file)
|
||||
} else {
|
||||
let text = fs::read_to_string(path).unwrap_or_default();
|
||||
let text = Arc::new(text);
|
||||
let file = self.add_file(root, rel_path.clone(), Arc::clone(&text));
|
||||
let change = VfsChange::AddFile {
|
||||
file,
|
||||
text,
|
||||
root,
|
||||
path: rel_path,
|
||||
};
|
||||
self.pending_changes.push(change);
|
||||
Some(file)
|
||||
};
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub fn task_receiver(&self) -> &Receiver<io::TaskResult> {
|
||||
&self.worker.out
|
||||
}
|
||||
|
||||
pub fn handle_task(&mut self, task: io::TaskResult) {
|
||||
let mut files = Vec::new();
|
||||
// While we were scanning the root in the backgound, a file might have
|
||||
// been open in the editor, so we need to account for that.
|
||||
let exising = self.root2files[&task.root]
|
||||
.iter()
|
||||
.map(|&file| (self.files[file].path.clone(), file))
|
||||
.collect::<FxHashMap<_, _>>();
|
||||
for (path, text) in task.files {
|
||||
if let Some(&file) = exising.get(&path) {
|
||||
let text = Arc::clone(&self.files[file].text);
|
||||
files.push((file, path, text));
|
||||
continue;
|
||||
}
|
||||
let text = Arc::new(text);
|
||||
let file = self.add_file(task.root, path.clone(), Arc::clone(&text));
|
||||
files.push((file, path, text));
|
||||
}
|
||||
|
||||
let change = VfsChange::AddRoot {
|
||||
root: task.root,
|
||||
files,
|
||||
};
|
||||
self.pending_changes.push(change);
|
||||
}
|
||||
|
||||
pub fn add_file_overlay(&mut self, path: &Path, text: String) -> Option<VfsFile> {
|
||||
let mut res = None;
|
||||
if let Some((root, path, file)) = self.find_root(path) {
|
||||
let text = Arc::new(text);
|
||||
let change = if let Some(file) = file {
|
||||
res = Some(file);
|
||||
self.change_file(file, Arc::clone(&text));
|
||||
VfsChange::ChangeFile { file, text }
|
||||
} else {
|
||||
let file = self.add_file(root, path.clone(), Arc::clone(&text));
|
||||
res = Some(file);
|
||||
VfsChange::AddFile {
|
||||
file,
|
||||
text,
|
||||
root,
|
||||
path,
|
||||
}
|
||||
};
|
||||
self.pending_changes.push(change);
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
pub fn change_file_overlay(&mut self, path: &Path, new_text: String) {
|
||||
if let Some((_root, _path, file)) = self.find_root(path) {
|
||||
let file = file.expect("can't change a file which wasn't added");
|
||||
let text = Arc::new(new_text);
|
||||
self.change_file(file, Arc::clone(&text));
|
||||
let change = VfsChange::ChangeFile { file, text };
|
||||
self.pending_changes.push(change);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn remove_file_overlay(&mut self, path: &Path) -> Option<VfsFile> {
|
||||
let mut res = None;
|
||||
if let Some((root, path, file)) = self.find_root(path) {
|
||||
let file = file.expect("can't remove a file which wasn't added");
|
||||
res = Some(file);
|
||||
let full_path = path.to_path(&self.roots[root].root);
|
||||
let change = if let Ok(text) = fs::read_to_string(&full_path) {
|
||||
let text = Arc::new(text);
|
||||
self.change_file(file, Arc::clone(&text));
|
||||
VfsChange::ChangeFile { file, text }
|
||||
} else {
|
||||
self.remove_file(file);
|
||||
VfsChange::RemoveFile { root, file, path }
|
||||
};
|
||||
self.pending_changes.push(change);
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
pub fn commit_changes(&mut self) -> Vec<VfsChange> {
|
||||
mem::replace(&mut self.pending_changes, Vec::new())
|
||||
}
|
||||
|
||||
/// Sutdown the VFS and terminate the background watching thread.
|
||||
pub fn shutdown(self) -> thread::Result<()> {
|
||||
let _ = self.worker.shutdown();
|
||||
self.worker_handle.shutdown()
|
||||
}
|
||||
|
||||
fn add_file(&mut self, root: VfsRoot, path: RelativePathBuf, text: Arc<String>) -> VfsFile {
|
||||
let data = VfsFileData { root, path, text };
|
||||
let file = self.files.alloc(data);
|
||||
self.root2files.get_mut(&root).unwrap().insert(file);
|
||||
file
|
||||
}
|
||||
|
||||
fn change_file(&mut self, file: VfsFile, new_text: Arc<String>) {
|
||||
self.files[file].text = new_text;
|
||||
}
|
||||
|
||||
fn remove_file(&mut self, file: VfsFile) {
|
||||
//FIXME: use arena with removal
|
||||
self.files[file].text = Default::default();
|
||||
self.files[file].path = Default::default();
|
||||
let root = self.files[file].root;
|
||||
let removed = self.root2files.get_mut(&root).unwrap().remove(&file);
|
||||
assert!(removed);
|
||||
}
|
||||
|
||||
fn find_root(&self, path: &Path) -> Option<(VfsRoot, RelativePathBuf, Option<VfsFile>)> {
|
||||
let (root, path) = self
|
||||
.roots
|
||||
.iter()
|
||||
.find_map(|(root, data)| data.can_contain(path).map(|it| (root, it)))?;
|
||||
let file = self.root2files[&root]
|
||||
.iter()
|
||||
.map(|&it| it)
|
||||
.find(|&file| self.files[file].path == path);
|
||||
Some((root, path, file))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum VfsChange {
|
||||
AddRoot {
|
||||
root: VfsRoot,
|
||||
files: Vec<(VfsFile, RelativePathBuf, Arc<String>)>,
|
||||
},
|
||||
AddFile {
|
||||
root: VfsRoot,
|
||||
file: VfsFile,
|
||||
path: RelativePathBuf,
|
||||
text: Arc<String>,
|
||||
},
|
||||
RemoveFile {
|
||||
root: VfsRoot,
|
||||
file: VfsFile,
|
||||
path: RelativePathBuf,
|
||||
},
|
||||
ChangeFile {
|
||||
file: VfsFile,
|
||||
text: Arc<String>,
|
||||
},
|
||||
}
|
101
crates/ra_vfs/tests/vfs.rs
Normal file
101
crates/ra_vfs/tests/vfs.rs
Normal file
@ -0,0 +1,101 @@
|
||||
use std::{
|
||||
fs,
|
||||
collections::HashSet,
|
||||
};
|
||||
|
||||
use tempfile::tempdir;
|
||||
|
||||
use ra_vfs::{Vfs, VfsChange};
|
||||
|
||||
#[test]
|
||||
fn test_vfs_works() -> std::io::Result<()> {
|
||||
let files = [
|
||||
("a/foo.rs", "hello"),
|
||||
("a/bar.rs", "world"),
|
||||
("a/b/baz.rs", "nested hello"),
|
||||
];
|
||||
|
||||
let dir = tempdir()?;
|
||||
for (path, text) in files.iter() {
|
||||
let file_path = dir.path().join(path);
|
||||
fs::create_dir_all(file_path.parent().unwrap())?;
|
||||
fs::write(file_path, text)?
|
||||
}
|
||||
|
||||
let a_root = dir.path().join("a");
|
||||
let b_root = dir.path().join("a/b");
|
||||
|
||||
let (mut vfs, _) = Vfs::new(vec![a_root, b_root]);
|
||||
for _ in 0..2 {
|
||||
let task = vfs.task_receiver().recv().unwrap();
|
||||
vfs.handle_task(task);
|
||||
}
|
||||
{
|
||||
let files = vfs
|
||||
.commit_changes()
|
||||
.into_iter()
|
||||
.flat_map(|change| {
|
||||
let files = match change {
|
||||
VfsChange::AddRoot { files, .. } => files,
|
||||
_ => panic!("unexpected change"),
|
||||
};
|
||||
files.into_iter().map(|(_id, path, text)| {
|
||||
let text: String = (&*text).clone();
|
||||
(format!("{}", path.display()), text)
|
||||
})
|
||||
})
|
||||
.collect::<HashSet<_>>();
|
||||
|
||||
let expected_files = [
|
||||
("foo.rs", "hello"),
|
||||
("bar.rs", "world"),
|
||||
("baz.rs", "nested hello"),
|
||||
]
|
||||
.iter()
|
||||
.map(|(path, text)| (path.to_string(), text.to_string()))
|
||||
.collect::<HashSet<_>>();
|
||||
|
||||
assert_eq!(files, expected_files);
|
||||
}
|
||||
|
||||
vfs.add_file_overlay(&dir.path().join("a/b/baz.rs"), "quux".to_string());
|
||||
let change = vfs.commit_changes().pop().unwrap();
|
||||
match change {
|
||||
VfsChange::ChangeFile { text, .. } => assert_eq!(&*text, "quux"),
|
||||
_ => panic!("unexpected change"),
|
||||
}
|
||||
|
||||
vfs.change_file_overlay(&dir.path().join("a/b/baz.rs"), "m".to_string());
|
||||
let change = vfs.commit_changes().pop().unwrap();
|
||||
match change {
|
||||
VfsChange::ChangeFile { text, .. } => assert_eq!(&*text, "m"),
|
||||
_ => panic!("unexpected change"),
|
||||
}
|
||||
|
||||
vfs.remove_file_overlay(&dir.path().join("a/b/baz.rs"));
|
||||
let change = vfs.commit_changes().pop().unwrap();
|
||||
match change {
|
||||
VfsChange::ChangeFile { text, .. } => assert_eq!(&*text, "nested hello"),
|
||||
_ => panic!("unexpected change"),
|
||||
}
|
||||
|
||||
vfs.add_file_overlay(&dir.path().join("a/b/spam.rs"), "spam".to_string());
|
||||
let change = vfs.commit_changes().pop().unwrap();
|
||||
match change {
|
||||
VfsChange::AddFile { text, path, .. } => {
|
||||
assert_eq!(&*text, "spam");
|
||||
assert_eq!(path, "spam.rs");
|
||||
}
|
||||
_ => panic!("unexpected change"),
|
||||
}
|
||||
|
||||
vfs.remove_file_overlay(&dir.path().join("a/b/spam.rs"));
|
||||
let change = vfs.commit_changes().pop().unwrap();
|
||||
match change {
|
||||
VfsChange::RemoveFile { .. } => (),
|
||||
_ => panic!("unexpected change"),
|
||||
}
|
||||
|
||||
vfs.shutdown().unwrap();
|
||||
Ok(())
|
||||
}
|
@ -30,13 +30,16 @@ where
|
||||
impl<I, O> Worker<I, O> {
|
||||
/// Stops the worker. Returns the message receiver to fetch results which
|
||||
/// have become ready before the worker is stopped.
|
||||
pub fn stop(self) -> Receiver<O> {
|
||||
pub fn shutdown(self) -> Receiver<O> {
|
||||
self.out
|
||||
}
|
||||
|
||||
pub fn send(&self, item: I) {
|
||||
self.inp.send(item)
|
||||
}
|
||||
pub fn recv(&self) -> Option<O> {
|
||||
self.out.recv()
|
||||
}
|
||||
}
|
||||
|
||||
impl WorkerHandle {
|
||||
@ -45,11 +48,11 @@ impl WorkerHandle {
|
||||
WorkerHandle {
|
||||
name,
|
||||
thread,
|
||||
bomb: DropBomb::new(format!("WorkerHandle {} was not stopped", name)),
|
||||
bomb: DropBomb::new(format!("WorkerHandle {} was not shutdown", name)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn stop(mut self) -> thread::Result<()> {
|
||||
pub fn shutdown(mut self) -> thread::Result<()> {
|
||||
log::info!("waiting for {} to finish ...", self.name);
|
||||
let name = self.name;
|
||||
self.bomb.defuse();
|
||||
|
Loading…
x
Reference in New Issue
Block a user