2019-01-08 17:47:12 -06:00
|
|
|
//! ra_db defines basic database traits. The concrete DB is defined by ra_ide_api.
|
|
|
|
mod cancellation;
|
2018-11-27 18:25:20 -06:00
|
|
|
mod input;
|
|
|
|
|
2019-04-09 14:52:06 -05:00
|
|
|
use std::{panic, sync::Arc};
|
2019-01-09 13:51:05 -06:00
|
|
|
|
2019-05-28 10:07:39 -05:00
|
|
|
use ra_syntax::{TextUnit, TextRange, SourceFile, Parse};
|
2019-01-25 06:16:50 -06:00
|
|
|
use relative_path::RelativePathBuf;
|
2019-05-22 03:31:07 -05:00
|
|
|
use ra_prof::profile;
|
2018-11-27 18:25:20 -06:00
|
|
|
|
2019-01-17 05:11:00 -06:00
|
|
|
pub use ::salsa as salsa;
|
2018-11-27 18:25:20 -06:00
|
|
|
pub use crate::{
|
2019-01-15 12:02:42 -06:00
|
|
|
cancellation::Canceled,
|
2018-11-27 18:25:20 -06:00
|
|
|
input::{
|
2019-02-10 15:34:29 -06:00
|
|
|
FileId, CrateId, SourceRoot, SourceRootId, CrateGraph, Dependency, Edition,
|
2018-11-27 18:25:20 -06:00
|
|
|
},
|
|
|
|
};
|
|
|
|
|
2019-05-18 05:04:09 -05:00
|
|
|
pub trait CheckCanceled {
|
2019-01-15 06:45:48 -06:00
|
|
|
/// Aborts current query if there are pending changes.
|
|
|
|
///
|
|
|
|
/// rust-analyzer needs to be able to answer semantic questions about the
|
|
|
|
/// code while the code is being modified. A common problem is that a
|
|
|
|
/// long-running query is being calculated when a new change arrives.
|
|
|
|
///
|
|
|
|
/// We can't just apply the change immediately: this will cause the pending
|
|
|
|
/// query to see inconsistent state (it will observe an absence of
|
|
|
|
/// repeatable read). So what we do is we **cancel** all pending queries
|
|
|
|
/// before applying the change.
|
|
|
|
///
|
|
|
|
/// We implement cancellation by panicking with a special value and catching
|
|
|
|
/// it on the API boundary. Salsa explicitly supports this use-case.
|
2019-02-03 13:15:31 -06:00
|
|
|
fn check_canceled(&self);
|
2019-01-09 13:51:05 -06:00
|
|
|
|
2019-02-03 13:15:31 -06:00
|
|
|
fn catch_canceled<F, T>(&self, f: F) -> Result<T, Canceled>
|
|
|
|
where
|
|
|
|
Self: Sized,
|
|
|
|
F: FnOnce(&Self) -> T + panic::UnwindSafe,
|
|
|
|
{
|
2019-05-18 05:04:09 -05:00
|
|
|
let this = panic::AssertUnwindSafe(self);
|
|
|
|
panic::catch_unwind(|| f(*this)).map_err(|err| match err.downcast::<Canceled>() {
|
2019-01-10 03:20:32 -06:00
|
|
|
Ok(canceled) => *canceled,
|
2019-01-09 13:51:05 -06:00
|
|
|
Err(payload) => panic::resume_unwind(payload),
|
|
|
|
})
|
2018-11-27 18:25:20 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-05-18 05:04:09 -05:00
|
|
|
impl<T: salsa::Database> CheckCanceled for T {
|
2019-02-03 13:15:31 -06:00
|
|
|
fn check_canceled(&self) {
|
|
|
|
if self.salsa_runtime().is_current_revision_canceled() {
|
|
|
|
Canceled::throw()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-11-27 18:42:26 -06:00
|
|
|
#[derive(Clone, Copy, Debug)]
|
|
|
|
pub struct FilePosition {
|
|
|
|
pub file_id: FileId,
|
|
|
|
pub offset: TextUnit,
|
|
|
|
}
|
2018-12-28 09:03:03 -06:00
|
|
|
|
|
|
|
#[derive(Clone, Copy, Debug)]
|
|
|
|
pub struct FileRange {
|
|
|
|
pub file_id: FileId,
|
|
|
|
pub range: TextRange,
|
|
|
|
}
|
2019-01-25 06:16:50 -06:00
|
|
|
|
2019-01-26 02:20:30 -06:00
|
|
|
/// Database which stores all significant input facts: source code and project
|
|
|
|
/// model. Everything else in rust-analyzer is derived from these queries.
|
|
|
|
#[salsa::query_group(SourceDatabaseStorage)]
|
2019-02-03 12:26:35 -06:00
|
|
|
pub trait SourceDatabase: CheckCanceled + std::fmt::Debug {
|
2019-01-25 06:16:50 -06:00
|
|
|
/// Text of the file.
|
|
|
|
#[salsa::input]
|
|
|
|
fn file_text(&self, file_id: FileId) -> Arc<String>;
|
2019-01-26 02:09:39 -06:00
|
|
|
// Parses the file into the syntax tree.
|
2019-05-22 03:31:07 -05:00
|
|
|
#[salsa::invoke(parse_query)]
|
2019-05-28 10:07:39 -05:00
|
|
|
fn parse(&self, file_id: FileId) -> Parse;
|
2019-01-25 06:16:50 -06:00
|
|
|
/// Path to a file, relative to the root of its source root.
|
|
|
|
#[salsa::input]
|
|
|
|
fn file_relative_path(&self, file_id: FileId) -> RelativePathBuf;
|
|
|
|
/// Source root of the file.
|
|
|
|
#[salsa::input]
|
|
|
|
fn file_source_root(&self, file_id: FileId) -> SourceRootId;
|
|
|
|
/// Contents of the source root.
|
|
|
|
#[salsa::input]
|
|
|
|
fn source_root(&self, id: SourceRootId) -> Arc<SourceRoot>;
|
|
|
|
fn source_root_crates(&self, id: SourceRootId) -> Arc<Vec<CrateId>>;
|
|
|
|
/// The crate graph.
|
|
|
|
#[salsa::input]
|
|
|
|
fn crate_graph(&self) -> Arc<CrateGraph>;
|
|
|
|
}
|
|
|
|
|
2019-01-26 02:20:30 -06:00
|
|
|
fn source_root_crates(db: &impl SourceDatabase, id: SourceRootId) -> Arc<Vec<CrateId>> {
|
2019-01-25 06:16:50 -06:00
|
|
|
let root = db.source_root(id);
|
|
|
|
let graph = db.crate_graph();
|
2019-02-08 05:49:43 -06:00
|
|
|
let res =
|
|
|
|
root.files.values().filter_map(|&it| graph.crate_id_for_crate_root(it)).collect::<Vec<_>>();
|
2019-01-25 06:16:50 -06:00
|
|
|
Arc::new(res)
|
|
|
|
}
|
|
|
|
|
2019-05-28 10:07:39 -05:00
|
|
|
fn parse_query(db: &impl SourceDatabase, file_id: FileId) -> Parse {
|
2019-05-22 03:31:07 -05:00
|
|
|
let _p = profile("parse_query");
|
2019-01-25 06:16:50 -06:00
|
|
|
let text = db.file_text(file_id);
|
|
|
|
SourceFile::parse(&*text)
|
|
|
|
}
|