//! This module provides `StaticIndex` which is used for powering //! read-only code browsers and emitting LSIF use std::collections::HashMap; use hir::Semantics; use hir::{db::HirDatabase, Crate, Module}; use ide_db::base_db::{FileId, FileRange, SourceDatabaseExt}; use ide_db::defs::Definition; use ide_db::RootDatabase; use rustc_hash::FxHashSet; use syntax::{AstNode, SyntaxKind::*, T}; use syntax::{SyntaxToken, TextRange}; use crate::display::TryToNav; use crate::hover::hover_for_definition; use crate::{Analysis, Cancellable, Fold, HoverConfig, HoverDocFormat, HoverResult}; /// A static representation of fully analyzed source code. /// /// The intended use-case is powering read-only code browsers and emitting LSIF pub struct StaticIndex<'a> { pub files: Vec, pub tokens: TokenStore, analysis: &'a Analysis, db: &'a RootDatabase, def_map: HashMap, } pub struct ReferenceData { pub range: FileRange, pub is_definition: bool, } pub struct TokenStaticData { pub hover: Option, pub definition: Option, pub references: Vec, } #[derive(Clone, Copy, PartialEq, Eq, Hash)] pub struct TokenId(usize); #[derive(Default)] pub struct TokenStore(Vec); impl TokenStore { pub fn insert(&mut self, data: TokenStaticData) -> TokenId { let id = TokenId(self.0.len()); self.0.push(data); id } pub fn get_mut(&mut self, id: TokenId) -> Option<&mut TokenStaticData> { self.0.get_mut(id.0) } pub fn get(&self, id: TokenId) -> Option<&TokenStaticData> { self.0.get(id.0) } pub fn iter(self) -> impl Iterator { self.0.into_iter().enumerate().map(|(i, x)| (TokenId(i), x)) } } pub struct StaticIndexedFile { pub file_id: FileId, pub folds: Vec, pub tokens: Vec<(TextRange, TokenId)>, } fn all_modules(db: &dyn HirDatabase) -> Vec { let mut worklist: Vec<_> = Crate::all(db).into_iter().map(|krate| krate.root_module(db)).collect(); let mut modules = Vec::new(); while let Some(module) = worklist.pop() { modules.push(module); worklist.extend(module.children(db)); } modules } impl StaticIndex<'_> { fn add_file(&mut self, file_id: FileId) -> Cancellable<()> { let folds = self.analysis.folding_ranges(file_id)?; // hovers let sema = hir::Semantics::new(self.db); let tokens_or_nodes = sema.parse(file_id).syntax().clone(); let tokens = tokens_or_nodes.descendants_with_tokens().filter_map(|x| match x { syntax::NodeOrToken::Node(_) => None, syntax::NodeOrToken::Token(x) => Some(x), }); let hover_config = HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) }; let tokens = tokens.filter(|token| match token.kind() { IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] => true, _ => false, }); let mut result = StaticIndexedFile { file_id, folds, tokens: vec![] }; for token in tokens { let range = token.text_range(); let node = token.parent().unwrap(); let def = if let Some(x) = get_definition(&sema, token.clone()) { x } else { continue; }; let id = if let Some(x) = self.def_map.get(&def) { *x } else { let x = self.tokens.insert(TokenStaticData { hover: hover_for_definition(&sema, file_id, def, &node, &hover_config), definition: def .try_to_nav(self.db) .map(|x| FileRange { file_id: x.file_id, range: x.focus_or_full_range() }), references: vec![], }); self.def_map.insert(def, x); x }; let token = self.tokens.get_mut(id).unwrap(); token.references.push(ReferenceData { range: FileRange { range, file_id }, is_definition: if let Some(x) = def.try_to_nav(self.db) { x.file_id == file_id && x.focus_or_full_range() == range } else { false }, }); result.tokens.push((range, id)); } self.files.push(result); Ok(()) } pub fn compute<'a>( db: &'a RootDatabase, analysis: &'a Analysis, ) -> Cancellable> { let work = all_modules(db).into_iter().filter(|module| { let file_id = module.definition_source(db).file_id.original_file(db); let source_root = db.file_source_root(file_id); let source_root = db.source_root(source_root); !source_root.is_library }); let mut this = StaticIndex { files: vec![], tokens: Default::default(), analysis, db, def_map: Default::default(), }; let mut visited_files = FxHashSet::default(); for module in work { let file_id = module.definition_source(db).file_id.original_file(db); if visited_files.contains(&file_id) { continue; } this.add_file(file_id)?; // mark the file visited_files.insert(file_id); } //eprintln!("{:#?}", token_map); Ok(this) } } fn get_definition(sema: &Semantics, token: SyntaxToken) -> Option { for token in sema.descend_into_macros_many(token) { let def = Definition::from_token(&sema, &token); if let [x] = def.as_slice() { return Some(*x); } else { continue; }; } None }