2019-11-03 15:11:37 -06:00
|
|
|
//! Database used for testing `hir_def`.
|
|
|
|
|
2019-11-03 14:35:48 -06:00
|
|
|
use std::{
|
2020-07-07 03:14:48 -05:00
|
|
|
fmt, panic,
|
2019-11-03 14:35:48 -06:00
|
|
|
sync::{Arc, Mutex},
|
|
|
|
};
|
2019-11-03 11:53:17 -06:00
|
|
|
|
internal: move diagnostics to hir
The idea here is to eventually get rid of `dyn Diagnostic` and
`DiagnosticSink` infrastructure altogether, and just have a `enum
hir::Diagnostic` instead.
The problem with `dyn Diagnostic` is that it is defined in the lowest
level of the stack (hir_expand), but is used by the highest level (ide).
As a first step, we free hir_expand and hir_def from `dyn Diagnostic`
and kick the can up to `hir_ty`, as an intermediate state. The plan is
then to move DiagnosticSink similarly to the hir crate, and, as final
third step, remove its usage from the ide.
One currently unsolved problem is testing. You can notice that the test
which checks precise diagnostic ranges, unresolved_import_in_use_tree,
was moved to the ide layer. Logically, only IDE should have the infra to
render a specific range.
At the same time, the range is determined with the data produced in
hir_def and hir crates, so this layering is rather unfortunate. Working
on hir_def shouldn't require compiling `ide` for testing.
2021-05-23 15:31:59 -05:00
|
|
|
use base_db::{
|
2021-06-13 10:45:38 -05:00
|
|
|
salsa, AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, FilePosition,
|
|
|
|
SourceDatabase, Upcast,
|
internal: move diagnostics to hir
The idea here is to eventually get rid of `dyn Diagnostic` and
`DiagnosticSink` infrastructure altogether, and just have a `enum
hir::Diagnostic` instead.
The problem with `dyn Diagnostic` is that it is defined in the lowest
level of the stack (hir_expand), but is used by the highest level (ide).
As a first step, we free hir_expand and hir_def from `dyn Diagnostic`
and kick the can up to `hir_ty`, as an intermediate state. The plan is
then to move DiagnosticSink similarly to the hir crate, and, as final
third step, remove its usage from the ide.
One currently unsolved problem is testing. You can notice that the test
which checks precise diagnostic ranges, unresolved_import_in_use_tree,
was moved to the ide layer. Logically, only IDE should have the infra to
render a specific range.
At the same time, the range is determined with the data produced in
hir_def and hir crates, so this layering is rather unfortunate. Working
on hir_def shouldn't require compiling `ide` for testing.
2021-05-23 15:31:59 -05:00
|
|
|
};
|
2021-02-09 10:22:57 -06:00
|
|
|
use hir_expand::{db::AstDatabase, InFile};
|
2020-06-11 04:30:06 -05:00
|
|
|
use rustc_hash::FxHashSet;
|
2021-06-13 10:45:38 -05:00
|
|
|
use syntax::{algo, ast, AstNode};
|
2020-03-13 10:05:46 -05:00
|
|
|
|
2021-04-16 20:34:05 -05:00
|
|
|
use crate::{
|
|
|
|
db::DefDatabase,
|
2021-06-13 10:45:38 -05:00
|
|
|
nameres::{DefMap, ModuleSource},
|
2021-04-16 20:34:05 -05:00
|
|
|
src::HasSource,
|
|
|
|
LocalModuleId, Lookup, ModuleDefId, ModuleId,
|
|
|
|
};
|
2019-11-03 11:53:17 -06:00
|
|
|
|
|
|
|
#[salsa::database(
|
2020-08-13 09:25:38 -05:00
|
|
|
base_db::SourceDatabaseExtStorage,
|
|
|
|
base_db::SourceDatabaseStorage,
|
2019-11-03 11:53:17 -06:00
|
|
|
hir_expand::db::AstDatabaseStorage,
|
|
|
|
crate::db::InternDatabaseStorage,
|
2019-11-23 05:44:43 -06:00
|
|
|
crate::db::DefDatabaseStorage
|
2019-11-03 11:53:17 -06:00
|
|
|
)]
|
2020-11-02 06:13:32 -06:00
|
|
|
pub(crate) struct TestDB {
|
2020-07-07 03:14:48 -05:00
|
|
|
storage: salsa::Storage<TestDB>,
|
|
|
|
events: Mutex<Option<Vec<salsa::Event>>>,
|
2019-11-03 11:53:17 -06:00
|
|
|
}
|
|
|
|
|
2021-06-03 09:11:20 -05:00
|
|
|
impl Default for TestDB {
|
|
|
|
fn default() -> Self {
|
|
|
|
let mut this = Self { storage: Default::default(), events: Default::default() };
|
|
|
|
this.set_enable_proc_attr_macros(true);
|
|
|
|
this
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-03-13 10:05:46 -05:00
|
|
|
impl Upcast<dyn AstDatabase> for TestDB {
|
|
|
|
fn upcast(&self) -> &(dyn AstDatabase + 'static) {
|
|
|
|
&*self
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Upcast<dyn DefDatabase> for TestDB {
|
|
|
|
fn upcast(&self) -> &(dyn DefDatabase + 'static) {
|
|
|
|
&*self
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-03 11:53:17 -06:00
|
|
|
impl salsa::Database for TestDB {
|
2020-07-07 03:14:48 -05:00
|
|
|
fn salsa_event(&self, event: salsa::Event) {
|
2019-11-03 14:35:48 -06:00
|
|
|
let mut events = self.events.lock().unwrap();
|
|
|
|
if let Some(events) = &mut *events {
|
2020-07-07 03:14:48 -05:00
|
|
|
events.push(event);
|
2019-11-03 14:35:48 -06:00
|
|
|
}
|
|
|
|
}
|
2019-11-03 11:53:17 -06:00
|
|
|
}
|
|
|
|
|
2020-07-07 03:14:48 -05:00
|
|
|
impl fmt::Debug for TestDB {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
|
|
f.debug_struct("TestDB").finish()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-03 11:53:17 -06:00
|
|
|
impl panic::RefUnwindSafe for TestDB {}
|
|
|
|
|
|
|
|
impl FileLoader for TestDB {
|
|
|
|
fn file_text(&self, file_id: FileId) -> Arc<String> {
|
|
|
|
FileLoaderDelegate(self).file_text(file_id)
|
|
|
|
}
|
2020-12-09 10:01:15 -06:00
|
|
|
fn resolve_path(&self, path: AnchoredPath) -> Option<FileId> {
|
|
|
|
FileLoaderDelegate(self).resolve_path(path)
|
2019-11-03 11:53:17 -06:00
|
|
|
}
|
2020-06-11 04:30:06 -05:00
|
|
|
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
|
2019-11-03 11:53:17 -06:00
|
|
|
FileLoaderDelegate(self).relevant_crates(file_id)
|
|
|
|
}
|
|
|
|
}
|
2019-11-03 14:35:48 -06:00
|
|
|
|
|
|
|
impl TestDB {
|
2021-01-25 08:21:33 -06:00
|
|
|
pub(crate) fn module_for_file(&self, file_id: FileId) -> ModuleId {
|
2019-12-30 07:25:19 -06:00
|
|
|
for &krate in self.relevant_crates(file_id).iter() {
|
|
|
|
let crate_def_map = self.crate_def_map(krate);
|
2021-01-20 08:41:18 -06:00
|
|
|
for (local_id, data) in crate_def_map.modules() {
|
2019-12-30 07:25:19 -06:00
|
|
|
if data.origin.file_id() == Some(file_id) {
|
2021-01-25 08:21:33 -06:00
|
|
|
return crate_def_map.module_id(local_id);
|
2019-12-30 07:25:19 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
panic!("Can't find module for file")
|
|
|
|
}
|
|
|
|
|
2021-02-09 10:22:57 -06:00
|
|
|
pub(crate) fn module_at_position(&self, position: FilePosition) -> ModuleId {
|
|
|
|
let file_module = self.module_for_file(position.file_id);
|
|
|
|
let mut def_map = file_module.def_map(self);
|
2021-04-16 20:34:05 -05:00
|
|
|
let module = self.mod_at_position(&def_map, position);
|
2021-02-09 10:22:57 -06:00
|
|
|
|
|
|
|
def_map = match self.block_at_position(&def_map, position) {
|
|
|
|
Some(it) => it,
|
2021-04-16 20:34:05 -05:00
|
|
|
None => return def_map.module_id(module),
|
2021-02-09 10:22:57 -06:00
|
|
|
};
|
|
|
|
loop {
|
|
|
|
let new_map = self.block_at_position(&def_map, position);
|
|
|
|
match new_map {
|
|
|
|
Some(new_block) if !Arc::ptr_eq(&new_block, &def_map) => {
|
|
|
|
def_map = new_block;
|
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
// FIXME: handle `mod` inside block expression
|
|
|
|
return def_map.module_id(def_map.root());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-04-16 20:34:05 -05:00
|
|
|
/// Finds the smallest/innermost module in `def_map` containing `position`.
|
|
|
|
fn mod_at_position(&self, def_map: &DefMap, position: FilePosition) -> LocalModuleId {
|
|
|
|
let mut size = None;
|
|
|
|
let mut res = def_map.root();
|
|
|
|
for (module, data) in def_map.modules() {
|
|
|
|
let src = data.definition_source(self);
|
|
|
|
if src.file_id != position.file_id.into() {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
let range = match src.value {
|
|
|
|
ModuleSource::SourceFile(it) => it.syntax().text_range(),
|
|
|
|
ModuleSource::Module(it) => it.syntax().text_range(),
|
|
|
|
ModuleSource::BlockExpr(it) => it.syntax().text_range(),
|
|
|
|
};
|
|
|
|
|
|
|
|
if !range.contains(position.offset) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
let new_size = match size {
|
|
|
|
None => range.len(),
|
|
|
|
Some(size) => {
|
|
|
|
if range.len() < size {
|
|
|
|
range.len()
|
|
|
|
} else {
|
|
|
|
size
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
if size != Some(new_size) {
|
|
|
|
cov_mark::hit!(submodule_in_testdb);
|
|
|
|
size = Some(new_size);
|
|
|
|
res = module;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
res
|
|
|
|
}
|
|
|
|
|
2021-02-09 10:22:57 -06:00
|
|
|
fn block_at_position(&self, def_map: &DefMap, position: FilePosition) -> Option<Arc<DefMap>> {
|
|
|
|
// Find the smallest (innermost) function in `def_map` containing the cursor.
|
|
|
|
let mut size = None;
|
|
|
|
let mut fn_def = None;
|
|
|
|
for (_, module) in def_map.modules() {
|
|
|
|
let file_id = module.definition_source(self).file_id;
|
|
|
|
if file_id != position.file_id.into() {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
for decl in module.scope.declarations() {
|
|
|
|
if let ModuleDefId::FunctionId(it) = decl {
|
2021-03-12 19:23:47 -06:00
|
|
|
let range = it.lookup(self).source(self).value.syntax().text_range();
|
2021-02-09 10:22:57 -06:00
|
|
|
|
|
|
|
if !range.contains(position.offset) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
let new_size = match size {
|
|
|
|
None => range.len(),
|
|
|
|
Some(size) => {
|
|
|
|
if range.len() < size {
|
|
|
|
range.len()
|
|
|
|
} else {
|
|
|
|
size
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
if size != Some(new_size) {
|
|
|
|
size = Some(new_size);
|
|
|
|
fn_def = Some(it);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Find the innermost block expression that has a `DefMap`.
|
|
|
|
let def_with_body = fn_def?.into();
|
|
|
|
let (_, source_map) = self.body_with_source_map(def_with_body);
|
|
|
|
let scopes = self.expr_scopes(def_with_body);
|
|
|
|
let root = self.parse(position.file_id);
|
|
|
|
|
|
|
|
let scope_iter = algo::ancestors_at_offset(&root.syntax_node(), position.offset)
|
|
|
|
.filter_map(|node| {
|
|
|
|
let block = ast::BlockExpr::cast(node)?;
|
|
|
|
let expr = ast::Expr::from(block);
|
|
|
|
let expr_id = source_map.node_expr(InFile::new(position.file_id.into(), &expr))?;
|
|
|
|
let scope = scopes.scope_for(expr_id).unwrap();
|
|
|
|
Some(scope)
|
|
|
|
});
|
|
|
|
|
|
|
|
for scope in scope_iter {
|
|
|
|
let containing_blocks =
|
|
|
|
scopes.scope_chain(Some(scope)).filter_map(|scope| scopes.block(scope));
|
|
|
|
|
|
|
|
for block in containing_blocks {
|
|
|
|
if let Some(def_map) = self.block_def_map(block) {
|
|
|
|
return Some(def_map);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
None
|
|
|
|
}
|
|
|
|
|
2020-11-02 06:13:32 -06:00
|
|
|
pub(crate) fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event> {
|
2019-11-03 14:35:48 -06:00
|
|
|
*self.events.lock().unwrap() = Some(Vec::new());
|
|
|
|
f();
|
|
|
|
self.events.lock().unwrap().take().unwrap()
|
|
|
|
}
|
|
|
|
|
2020-11-02 06:13:32 -06:00
|
|
|
pub(crate) fn log_executed(&self, f: impl FnOnce()) -> Vec<String> {
|
2019-11-03 14:35:48 -06:00
|
|
|
let events = self.log(f);
|
|
|
|
events
|
|
|
|
.into_iter()
|
|
|
|
.filter_map(|e| match e.kind {
|
2021-07-27 17:00:22 -05:00
|
|
|
// This is pretty horrible, but `Debug` is the only way to inspect
|
2019-11-03 14:35:48 -06:00
|
|
|
// QueryDescriptor at the moment.
|
|
|
|
salsa::EventKind::WillExecute { database_key } => {
|
2020-07-07 03:14:48 -05:00
|
|
|
Some(format!("{:?}", database_key.debug(self)))
|
2019-11-03 14:35:48 -06:00
|
|
|
}
|
|
|
|
_ => None,
|
|
|
|
})
|
|
|
|
.collect()
|
|
|
|
}
|
|
|
|
}
|