2021-09-08 10:56:06 -05:00
|
|
|
//! This module provides `StaticIndex` which is used for powering
|
|
|
|
//! read-only code browsers and emitting LSIF
|
|
|
|
|
2021-09-18 12:44:47 -05:00
|
|
|
use std::collections::HashMap;
|
|
|
|
|
2021-09-23 07:58:21 -05:00
|
|
|
use hir::Semantics;
|
2021-09-08 06:35:28 -05:00
|
|
|
use hir::{db::HirDatabase, Crate, Module};
|
2021-09-23 07:58:21 -05:00
|
|
|
use ide_db::base_db::{FileId, FileRange, SourceDatabaseExt};
|
2021-09-18 12:44:47 -05:00
|
|
|
use ide_db::defs::Definition;
|
2021-09-23 07:58:21 -05:00
|
|
|
use ide_db::RootDatabase;
|
2021-09-08 06:35:28 -05:00
|
|
|
use rustc_hash::FxHashSet;
|
2021-09-10 10:30:53 -05:00
|
|
|
use syntax::{AstNode, SyntaxKind::*, T};
|
2021-09-23 07:58:21 -05:00
|
|
|
use syntax::{SyntaxToken, TextRange};
|
2021-09-08 06:35:28 -05:00
|
|
|
|
2021-09-23 07:58:21 -05:00
|
|
|
use crate::display::TryToNav;
|
|
|
|
use crate::hover::hover_for_definition;
|
2021-09-29 07:41:58 -05:00
|
|
|
use crate::{Analysis, Fold, HoverConfig, HoverDocFormat, HoverResult};
|
2021-09-08 06:35:28 -05:00
|
|
|
|
|
|
|
/// A static representation of fully analyzed source code.
|
|
|
|
///
|
|
|
|
/// The intended use-case is powering read-only code browsers and emitting LSIF
|
2021-09-18 12:44:47 -05:00
|
|
|
pub struct StaticIndex<'a> {
|
2021-09-08 06:35:28 -05:00
|
|
|
pub files: Vec<StaticIndexedFile>,
|
2021-09-18 12:44:47 -05:00
|
|
|
pub tokens: TokenStore,
|
|
|
|
analysis: &'a Analysis,
|
|
|
|
db: &'a RootDatabase,
|
|
|
|
def_map: HashMap<Definition, TokenId>,
|
2021-09-08 06:35:28 -05:00
|
|
|
}
|
|
|
|
|
2021-09-23 07:58:21 -05:00
|
|
|
pub struct ReferenceData {
|
|
|
|
pub range: FileRange,
|
|
|
|
pub is_definition: bool,
|
|
|
|
}
|
|
|
|
|
2021-09-10 10:30:53 -05:00
|
|
|
pub struct TokenStaticData {
|
|
|
|
pub hover: Option<HoverResult>,
|
2021-09-23 07:58:21 -05:00
|
|
|
pub definition: Option<FileRange>,
|
|
|
|
pub references: Vec<ReferenceData>,
|
2021-09-10 10:30:53 -05:00
|
|
|
}
|
|
|
|
|
2021-09-18 12:44:47 -05:00
|
|
|
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
|
|
|
|
pub struct TokenId(usize);
|
|
|
|
|
|
|
|
#[derive(Default)]
|
|
|
|
pub struct TokenStore(Vec<TokenStaticData>);
|
|
|
|
|
|
|
|
impl TokenStore {
|
|
|
|
pub fn insert(&mut self, data: TokenStaticData) -> TokenId {
|
|
|
|
let id = TokenId(self.0.len());
|
|
|
|
self.0.push(data);
|
|
|
|
id
|
|
|
|
}
|
|
|
|
|
2021-09-23 07:58:21 -05:00
|
|
|
pub fn get_mut(&mut self, id: TokenId) -> Option<&mut TokenStaticData> {
|
|
|
|
self.0.get_mut(id.0)
|
|
|
|
}
|
|
|
|
|
2021-09-18 12:44:47 -05:00
|
|
|
pub fn get(&self, id: TokenId) -> Option<&TokenStaticData> {
|
|
|
|
self.0.get(id.0)
|
|
|
|
}
|
2021-09-23 07:58:21 -05:00
|
|
|
|
|
|
|
pub fn iter(self) -> impl Iterator<Item = (TokenId, TokenStaticData)> {
|
|
|
|
self.0.into_iter().enumerate().map(|(i, x)| (TokenId(i), x))
|
2021-09-18 12:44:47 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-08 06:35:28 -05:00
|
|
|
pub struct StaticIndexedFile {
|
|
|
|
pub file_id: FileId,
|
|
|
|
pub folds: Vec<Fold>,
|
2021-09-18 12:44:47 -05:00
|
|
|
pub tokens: Vec<(TextRange, TokenId)>,
|
2021-09-08 06:35:28 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
fn all_modules(db: &dyn HirDatabase) -> Vec<Module> {
|
|
|
|
let mut worklist: Vec<_> =
|
|
|
|
Crate::all(db).into_iter().map(|krate| krate.root_module(db)).collect();
|
|
|
|
let mut modules = Vec::new();
|
|
|
|
|
|
|
|
while let Some(module) = worklist.pop() {
|
|
|
|
modules.push(module);
|
|
|
|
worklist.extend(module.children(db));
|
|
|
|
}
|
|
|
|
|
|
|
|
modules
|
|
|
|
}
|
|
|
|
|
2021-09-18 12:44:47 -05:00
|
|
|
impl StaticIndex<'_> {
|
2021-09-29 07:41:58 -05:00
|
|
|
fn add_file(&mut self, file_id: FileId) {
|
|
|
|
let folds = self.analysis.folding_ranges(file_id).unwrap();
|
2021-09-18 12:44:47 -05:00
|
|
|
// hovers
|
|
|
|
let sema = hir::Semantics::new(self.db);
|
|
|
|
let tokens_or_nodes = sema.parse(file_id).syntax().clone();
|
|
|
|
let tokens = tokens_or_nodes.descendants_with_tokens().filter_map(|x| match x {
|
|
|
|
syntax::NodeOrToken::Node(_) => None,
|
|
|
|
syntax::NodeOrToken::Token(x) => Some(x),
|
|
|
|
});
|
|
|
|
let hover_config =
|
|
|
|
HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) };
|
2021-09-23 07:58:21 -05:00
|
|
|
let tokens = tokens.filter(|token| match token.kind() {
|
|
|
|
IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] => true,
|
|
|
|
_ => false,
|
|
|
|
});
|
|
|
|
let mut result = StaticIndexedFile { file_id, folds, tokens: vec![] };
|
2021-09-18 12:44:47 -05:00
|
|
|
for token in tokens {
|
|
|
|
let range = token.text_range();
|
|
|
|
let node = token.parent().unwrap();
|
2021-10-03 07:53:01 -05:00
|
|
|
let def = match get_definition(&sema, token.clone()) {
|
|
|
|
Some(x) => x,
|
|
|
|
None => continue,
|
2021-09-18 12:44:47 -05:00
|
|
|
};
|
|
|
|
let id = if let Some(x) = self.def_map.get(&def) {
|
|
|
|
*x
|
|
|
|
} else {
|
|
|
|
let x = self.tokens.insert(TokenStaticData {
|
2021-09-23 07:58:21 -05:00
|
|
|
hover: hover_for_definition(&sema, file_id, def, &node, &hover_config),
|
|
|
|
definition: def
|
|
|
|
.try_to_nav(self.db)
|
|
|
|
.map(|x| FileRange { file_id: x.file_id, range: x.focus_or_full_range() }),
|
|
|
|
references: vec![],
|
2021-09-18 12:44:47 -05:00
|
|
|
});
|
|
|
|
self.def_map.insert(def, x);
|
|
|
|
x
|
|
|
|
};
|
2021-09-23 07:58:21 -05:00
|
|
|
let token = self.tokens.get_mut(id).unwrap();
|
|
|
|
token.references.push(ReferenceData {
|
|
|
|
range: FileRange { range, file_id },
|
2021-10-03 07:53:01 -05:00
|
|
|
is_definition: match def.try_to_nav(self.db) {
|
|
|
|
Some(x) => x.file_id == file_id && x.focus_or_full_range() == range,
|
|
|
|
None => false,
|
2021-09-23 07:58:21 -05:00
|
|
|
},
|
|
|
|
});
|
2021-09-18 12:44:47 -05:00
|
|
|
result.tokens.push((range, id));
|
|
|
|
}
|
|
|
|
self.files.push(result);
|
|
|
|
}
|
2021-09-23 07:58:21 -05:00
|
|
|
|
2021-09-29 07:41:58 -05:00
|
|
|
pub fn compute<'a>(db: &'a RootDatabase, analysis: &'a Analysis) -> StaticIndex<'a> {
|
2021-09-08 06:35:28 -05:00
|
|
|
let work = all_modules(db).into_iter().filter(|module| {
|
|
|
|
let file_id = module.definition_source(db).file_id.original_file(db);
|
|
|
|
let source_root = db.file_source_root(file_id);
|
|
|
|
let source_root = db.source_root(source_root);
|
|
|
|
!source_root.is_library
|
|
|
|
});
|
2021-09-18 12:44:47 -05:00
|
|
|
let mut this = StaticIndex {
|
|
|
|
files: vec![],
|
|
|
|
tokens: Default::default(),
|
2021-09-23 07:58:21 -05:00
|
|
|
analysis,
|
|
|
|
db,
|
2021-09-18 12:44:47 -05:00
|
|
|
def_map: Default::default(),
|
|
|
|
};
|
2021-09-08 06:35:28 -05:00
|
|
|
let mut visited_files = FxHashSet::default();
|
|
|
|
for module in work {
|
|
|
|
let file_id = module.definition_source(db).file_id.original_file(db);
|
2021-09-10 10:30:53 -05:00
|
|
|
if visited_files.contains(&file_id) {
|
|
|
|
continue;
|
2021-09-08 06:35:28 -05:00
|
|
|
}
|
2021-09-29 07:41:58 -05:00
|
|
|
this.add_file(file_id);
|
2021-09-10 10:30:53 -05:00
|
|
|
// mark the file
|
|
|
|
visited_files.insert(file_id);
|
2021-09-08 06:35:28 -05:00
|
|
|
}
|
2021-09-29 07:41:58 -05:00
|
|
|
this
|
2021-09-08 06:35:28 -05:00
|
|
|
}
|
|
|
|
}
|
2021-09-23 07:58:21 -05:00
|
|
|
|
|
|
|
fn get_definition(sema: &Semantics<RootDatabase>, token: SyntaxToken) -> Option<Definition> {
|
|
|
|
for token in sema.descend_into_macros_many(token) {
|
|
|
|
let def = Definition::from_token(&sema, &token);
|
|
|
|
if let [x] = def.as_slice() {
|
|
|
|
return Some(*x);
|
|
|
|
} else {
|
|
|
|
continue;
|
|
|
|
};
|
|
|
|
}
|
|
|
|
None
|
|
|
|
}
|
2021-09-26 04:17:57 -05:00
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
|
|
|
use crate::{fixture, StaticIndex};
|
|
|
|
use ide_db::base_db::FileRange;
|
|
|
|
use std::collections::HashSet;
|
|
|
|
|
|
|
|
fn check_all_ranges(ra_fixture: &str) {
|
|
|
|
let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
|
2021-09-29 07:41:58 -05:00
|
|
|
let s = StaticIndex::compute(&*analysis.db, &analysis);
|
2021-09-26 04:17:57 -05:00
|
|
|
let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect();
|
|
|
|
for f in s.files {
|
|
|
|
for (range, _) in f.tokens {
|
|
|
|
let x = FileRange { file_id: f.file_id, range };
|
|
|
|
if !range_set.contains(&x) {
|
|
|
|
panic!("additional range {:?}", x);
|
|
|
|
}
|
|
|
|
range_set.remove(&x);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if !range_set.is_empty() {
|
|
|
|
panic!("unfound ranges {:?}", range_set);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn check_definitions(ra_fixture: &str) {
|
|
|
|
let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
|
2021-09-29 07:41:58 -05:00
|
|
|
let s = StaticIndex::compute(&*analysis.db, &analysis);
|
2021-09-26 04:17:57 -05:00
|
|
|
let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect();
|
|
|
|
for (_, t) in s.tokens.iter() {
|
|
|
|
if let Some(x) = t.definition {
|
|
|
|
if !range_set.contains(&x) {
|
|
|
|
panic!("additional definition {:?}", x);
|
|
|
|
}
|
|
|
|
range_set.remove(&x);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if !range_set.is_empty() {
|
|
|
|
panic!("unfound definitions {:?}", range_set);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn struct_and_enum() {
|
|
|
|
check_all_ranges(
|
|
|
|
r#"
|
|
|
|
struct Foo;
|
|
|
|
//^^^
|
|
|
|
enum E { X(Foo) }
|
|
|
|
//^ ^ ^^^
|
|
|
|
"#,
|
|
|
|
);
|
|
|
|
check_definitions(
|
|
|
|
r#"
|
|
|
|
struct Foo;
|
|
|
|
//^^^
|
|
|
|
enum E { X(Foo) }
|
|
|
|
//^ ^
|
|
|
|
"#,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn derives() {
|
|
|
|
check_all_ranges(
|
|
|
|
r#"
|
|
|
|
#[rustc_builtin_macro]
|
|
|
|
pub macro Copy {}
|
|
|
|
//^^^^
|
|
|
|
#[rustc_builtin_macro]
|
|
|
|
pub macro derive {}
|
|
|
|
//^^^^^^
|
|
|
|
#[derive(Copy)]
|
|
|
|
//^^^^^^ ^^^^
|
|
|
|
struct Hello(i32);
|
|
|
|
//^^^^^ ^^^
|
|
|
|
"#,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|