2020-08-13 10:42:52 -05:00
|
|
|
//! base_db defines basic database traits. The concrete DB is defined by ide.
|
2019-01-08 17:47:12 -06:00
|
|
|
mod cancellation;
|
2018-11-27 18:25:20 -06:00
|
|
|
mod input;
|
2020-10-02 08:45:09 -05:00
|
|
|
mod change;
|
2019-11-03 11:53:17 -06:00
|
|
|
pub mod fixture;
|
2018-11-27 18:25:20 -06:00
|
|
|
|
2019-04-09 14:52:06 -05:00
|
|
|
use std::{panic, sync::Arc};
|
2019-01-09 13:51:05 -06:00
|
|
|
|
2020-06-11 04:30:06 -05:00
|
|
|
use rustc_hash::FxHashSet;
|
2020-08-12 11:26:51 -05:00
|
|
|
use syntax::{ast, Parse, SourceFile, TextRange, TextSize};
|
2018-11-27 18:25:20 -06:00
|
|
|
|
|
|
|
pub use crate::{
|
2019-01-15 12:02:42 -06:00
|
|
|
cancellation::Canceled,
|
2020-10-02 08:45:09 -05:00
|
|
|
change::Change,
|
2020-02-05 04:47:28 -06:00
|
|
|
input::{
|
2020-10-20 10:04:38 -05:00
|
|
|
CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, Dependency, Edition, Env,
|
2020-12-07 10:06:14 -06:00
|
|
|
ProcMacro, ProcMacroId, ProcMacroKind, SourceRoot, SourceRootId,
|
2020-02-05 04:47:28 -06:00
|
|
|
},
|
2018-11-27 18:25:20 -06:00
|
|
|
};
|
2019-09-05 14:36:04 -05:00
|
|
|
pub use salsa;
|
2020-11-02 06:13:32 -06:00
|
|
|
pub use vfs::{file_set::FileSet, FileId, VfsPath};
|
2018-11-27 18:25:20 -06:00
|
|
|
|
2019-11-24 05:13:51 -06:00
|
|
|
#[macro_export]
|
|
|
|
macro_rules! impl_intern_key {
|
|
|
|
($name:ident) => {
|
|
|
|
impl $crate::salsa::InternKey for $name {
|
|
|
|
fn from_intern_id(v: $crate::salsa::InternId) -> Self {
|
|
|
|
$name(v)
|
|
|
|
}
|
|
|
|
fn as_intern_id(&self) -> $crate::salsa::InternId {
|
|
|
|
self.0
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2020-03-13 10:05:46 -05:00
|
|
|
pub trait Upcast<T: ?Sized> {
|
|
|
|
fn upcast(&self) -> &T;
|
|
|
|
}
|
|
|
|
|
2019-05-18 05:04:09 -05:00
|
|
|
pub trait CheckCanceled {
|
2019-01-15 06:45:48 -06:00
|
|
|
/// Aborts current query if there are pending changes.
|
|
|
|
///
|
|
|
|
/// rust-analyzer needs to be able to answer semantic questions about the
|
|
|
|
/// code while the code is being modified. A common problem is that a
|
|
|
|
/// long-running query is being calculated when a new change arrives.
|
|
|
|
///
|
|
|
|
/// We can't just apply the change immediately: this will cause the pending
|
|
|
|
/// query to see inconsistent state (it will observe an absence of
|
|
|
|
/// repeatable read). So what we do is we **cancel** all pending queries
|
|
|
|
/// before applying the change.
|
|
|
|
///
|
|
|
|
/// We implement cancellation by panicking with a special value and catching
|
|
|
|
/// it on the API boundary. Salsa explicitly supports this use-case.
|
2019-02-03 13:15:31 -06:00
|
|
|
fn check_canceled(&self);
|
2019-01-09 13:51:05 -06:00
|
|
|
|
2019-02-03 13:15:31 -06:00
|
|
|
fn catch_canceled<F, T>(&self, f: F) -> Result<T, Canceled>
|
|
|
|
where
|
2019-09-27 12:47:36 -05:00
|
|
|
Self: Sized + panic::RefUnwindSafe,
|
2019-02-03 13:15:31 -06:00
|
|
|
F: FnOnce(&Self) -> T + panic::UnwindSafe,
|
|
|
|
{
|
2019-09-27 12:47:36 -05:00
|
|
|
panic::catch_unwind(|| f(self)).map_err(|err| match err.downcast::<Canceled>() {
|
2019-01-10 03:20:32 -06:00
|
|
|
Ok(canceled) => *canceled,
|
2019-01-09 13:51:05 -06:00
|
|
|
Err(payload) => panic::resume_unwind(payload),
|
|
|
|
})
|
2018-11-27 18:25:20 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-05-18 05:04:09 -05:00
|
|
|
impl<T: salsa::Database> CheckCanceled for T {
|
2019-02-03 13:15:31 -06:00
|
|
|
fn check_canceled(&self) {
|
|
|
|
if self.salsa_runtime().is_current_revision_canceled() {
|
|
|
|
Canceled::throw()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-11-27 18:42:26 -06:00
|
|
|
#[derive(Clone, Copy, Debug)]
|
|
|
|
pub struct FilePosition {
|
|
|
|
pub file_id: FileId,
|
2020-04-24 16:40:41 -05:00
|
|
|
pub offset: TextSize,
|
2018-11-27 18:42:26 -06:00
|
|
|
}
|
2018-12-28 09:03:03 -06:00
|
|
|
|
2020-06-30 06:03:08 -05:00
|
|
|
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
|
2018-12-28 09:03:03 -06:00
|
|
|
pub struct FileRange {
|
|
|
|
pub file_id: FileId,
|
|
|
|
pub range: TextRange,
|
|
|
|
}
|
2019-01-25 06:16:50 -06:00
|
|
|
|
2019-06-07 04:46:49 -05:00
|
|
|
pub const DEFAULT_LRU_CAP: usize = 128;
|
|
|
|
|
2019-10-14 08:20:55 -05:00
|
|
|
pub trait FileLoader {
|
2019-01-25 06:16:50 -06:00
|
|
|
/// Text of the file.
|
|
|
|
fn file_text(&self, file_id: FileId) -> Arc<String>;
|
2020-06-05 09:45:20 -05:00
|
|
|
/// Note that we intentionally accept a `&str` and not a `&Path` here. This
|
|
|
|
/// method exists to handle `#[path = "/some/path.rs"] mod foo;` and such,
|
2020-07-08 11:17:45 -05:00
|
|
|
/// so the input is guaranteed to be utf-8 string. One might be tempted to
|
|
|
|
/// introduce some kind of "utf-8 path with / separators", but that's a bad idea. Behold
|
|
|
|
/// `#[path = "C://no/way"]`
|
2020-06-05 08:07:30 -05:00
|
|
|
fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId>;
|
2020-06-11 04:30:06 -05:00
|
|
|
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>>;
|
2019-10-14 08:20:55 -05:00
|
|
|
}
|
2019-10-09 06:27:37 -05:00
|
|
|
|
2019-10-14 08:20:55 -05:00
|
|
|
/// Database which stores all significant input facts: source code and project
|
|
|
|
/// model. Everything else in rust-analyzer is derived from these queries.
|
|
|
|
#[salsa::query_group(SourceDatabaseStorage)]
|
|
|
|
pub trait SourceDatabase: CheckCanceled + FileLoader + std::fmt::Debug {
|
2019-01-26 02:09:39 -06:00
|
|
|
// Parses the file into the syntax tree.
|
2019-05-22 03:31:07 -05:00
|
|
|
#[salsa::invoke(parse_query)]
|
2019-07-18 14:29:20 -05:00
|
|
|
fn parse(&self, file_id: FileId) -> Parse<ast::SourceFile>;
|
2019-10-14 08:20:55 -05:00
|
|
|
|
|
|
|
/// The crate graph.
|
|
|
|
#[salsa::input]
|
|
|
|
fn crate_graph(&self) -> Arc<CrateGraph>;
|
|
|
|
}
|
|
|
|
|
2020-07-07 03:14:48 -05:00
|
|
|
fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
|
2020-08-12 09:32:36 -05:00
|
|
|
let _p = profile::span("parse_query").detail(|| format!("{:?}", file_id));
|
2019-10-14 08:20:55 -05:00
|
|
|
let text = db.file_text(file_id);
|
|
|
|
SourceFile::parse(&*text)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// We don't want to give HIR knowledge of source roots, hence we extract these
|
|
|
|
/// methods into a separate DB.
|
|
|
|
#[salsa::query_group(SourceDatabaseExtStorage)]
|
|
|
|
pub trait SourceDatabaseExt: SourceDatabase {
|
|
|
|
#[salsa::input]
|
|
|
|
fn file_text(&self, file_id: FileId) -> Arc<String>;
|
2019-01-25 06:16:50 -06:00
|
|
|
/// Path to a file, relative to the root of its source root.
|
|
|
|
/// Source root of the file.
|
|
|
|
#[salsa::input]
|
|
|
|
fn file_source_root(&self, file_id: FileId) -> SourceRootId;
|
|
|
|
/// Contents of the source root.
|
|
|
|
#[salsa::input]
|
|
|
|
fn source_root(&self, id: SourceRootId) -> Arc<SourceRoot>;
|
|
|
|
|
2020-06-11 04:30:06 -05:00
|
|
|
fn source_root_crates(&self, id: SourceRootId) -> Arc<FxHashSet<CrateId>>;
|
2019-10-09 06:27:37 -05:00
|
|
|
}
|
|
|
|
|
2020-07-07 03:14:48 -05:00
|
|
|
fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<FxHashSet<CrateId>> {
|
2019-01-25 06:16:50 -06:00
|
|
|
let graph = db.crate_graph();
|
2020-06-11 04:30:06 -05:00
|
|
|
let res = graph
|
|
|
|
.iter()
|
|
|
|
.filter(|&krate| {
|
|
|
|
let root_file = graph[krate].root_file_id;
|
|
|
|
db.file_source_root(root_file) == id
|
|
|
|
})
|
|
|
|
.collect::<FxHashSet<_>>();
|
2019-01-25 06:16:50 -06:00
|
|
|
Arc::new(res)
|
|
|
|
}
|
|
|
|
|
2019-10-14 08:20:55 -05:00
|
|
|
/// Silly workaround for cyclic deps between the traits
|
|
|
|
pub struct FileLoaderDelegate<T>(pub T);
|
|
|
|
|
|
|
|
impl<T: SourceDatabaseExt> FileLoader for FileLoaderDelegate<&'_ T> {
|
|
|
|
fn file_text(&self, file_id: FileId) -> Arc<String> {
|
|
|
|
SourceDatabaseExt::file_text(self.0, file_id)
|
|
|
|
}
|
2020-06-05 08:07:30 -05:00
|
|
|
fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId> {
|
2020-06-05 09:45:20 -05:00
|
|
|
// FIXME: this *somehow* should be platform agnostic...
|
2020-06-11 04:04:09 -05:00
|
|
|
let source_root = self.0.file_source_root(anchor);
|
|
|
|
let source_root = self.0.source_root(source_root);
|
|
|
|
source_root.file_set.resolve_path(anchor, path)
|
2019-10-14 08:20:55 -05:00
|
|
|
}
|
|
|
|
|
2020-06-11 04:30:06 -05:00
|
|
|
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
|
2019-10-14 08:20:55 -05:00
|
|
|
let source_root = self.0.file_source_root(file_id);
|
|
|
|
self.0.source_root_crates(source_root)
|
|
|
|
}
|
2019-01-25 06:16:50 -06:00
|
|
|
}
|