diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index 5b2384a054c..d50680ce14e 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -87,7 +87,7 @@ macro_rules! eprintln { references::ReferenceSearchResult, rename::RenameError, runnables::{Runnable, RunnableKind, TestId}, - static_index::{StaticIndex, StaticIndexedFile, TokenStaticData}, + static_index::{StaticIndex, StaticIndexedFile, TokenStaticData, TokenId}, syntax_highlighting::{ tags::{Highlight, HlMod, HlMods, HlOperator, HlPunct, HlTag}, HlRange, diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index ab7a829bcab..bd71177990b 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -1,31 +1,62 @@ //! This module provides `StaticIndex` which is used for powering //! read-only code browsers and emitting LSIF +use std::collections::HashMap; + use hir::{db::HirDatabase, Crate, Module}; -use ide_db::base_db::{FileId, FileRange, SourceDatabaseExt}; +use ide_db::base_db::{FileId, SourceDatabaseExt}; use ide_db::RootDatabase; +use ide_db::defs::Definition; use rustc_hash::FxHashSet; use syntax::TextRange; use syntax::{AstNode, SyntaxKind::*, T}; +use crate::hover::{get_definition_of_token, hover_for_definition}; use crate::{Analysis, Cancellable, Fold, HoverConfig, HoverDocFormat, HoverResult}; /// A static representation of fully analyzed source code. /// /// The intended use-case is powering read-only code browsers and emitting LSIF -pub struct StaticIndex { +pub struct StaticIndex<'a> { pub files: Vec, + pub tokens: TokenStore, + analysis: &'a Analysis, + db: &'a RootDatabase, + def_map: HashMap, } pub struct TokenStaticData { - pub range: TextRange, pub hover: Option, } +#[derive(Clone, Copy, PartialEq, Eq, Hash)] +pub struct TokenId(usize); + +#[derive(Default)] +pub struct TokenStore(Vec); + +impl TokenStore { + pub fn insert(&mut self, data: TokenStaticData) -> TokenId { + let id = TokenId(self.0.len()); + self.0.push(data); + id + } + + pub fn get(&self, id: TokenId) -> Option<&TokenStaticData> { + self.0.get(id.0) + } + + pub fn iter(self) -> impl Iterator { + self.0.into_iter().enumerate().map(|(i, x)| { + (TokenId(i), x) + }) + } +} + pub struct StaticIndexedFile { pub file_id: FileId, pub folds: Vec, - pub tokens: Vec, + pub tokens: Vec<(TextRange, TokenId)>, } fn all_modules(db: &dyn HirDatabase) -> Vec { @@ -41,62 +72,81 @@ fn all_modules(db: &dyn HirDatabase) -> Vec { modules } -impl StaticIndex { - pub fn compute(db: &RootDatabase, analysis: &Analysis) -> Cancellable { +impl StaticIndex<'_> { + fn add_file(&mut self, file_id: FileId) -> Cancellable<()> { + let folds = self.analysis.folding_ranges(file_id)?; + // hovers + let sema = hir::Semantics::new(self.db); + let tokens_or_nodes = sema.parse(file_id).syntax().clone(); + let tokens = tokens_or_nodes.descendants_with_tokens().filter_map(|x| match x { + syntax::NodeOrToken::Node(_) => None, + syntax::NodeOrToken::Token(x) => Some(x), + }); + let hover_config = + HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) }; + let tokens = tokens + .filter(|token| match token.kind() { + IDENT + | INT_NUMBER + | LIFETIME_IDENT + | T![self] + | T![super] + | T![crate] => true, + _ => false, + }); + let mut result = StaticIndexedFile { + file_id, + folds, + tokens: vec![], + }; + for token in tokens { + let range = token.text_range(); + let node = token.parent().unwrap(); + let def = get_definition_of_token(self.db, &sema, &sema.descend_into_macros(token), file_id, range.start(), &mut None); + let def = if let Some(x) = def { + x + } else { + continue; + }; + let id = if let Some(x) = self.def_map.get(&def) { + *x + } else { + let x = self.tokens.insert(TokenStaticData { + hover: hover_for_definition(self.db, file_id, &sema, def, node, &hover_config), + }); + self.def_map.insert(def, x); + x + }; + result.tokens.push((range, id)); + } + self.files.push(result); + Ok(()) + } + + pub fn compute<'a>(db: &'a RootDatabase, analysis: &'a Analysis) -> Cancellable> { let work = all_modules(db).into_iter().filter(|module| { let file_id = module.definition_source(db).file_id.original_file(db); let source_root = db.file_source_root(file_id); let source_root = db.source_root(source_root); !source_root.is_library }); - + let mut this = StaticIndex { + files: vec![], + tokens: Default::default(), + analysis, db, + def_map: Default::default(), + }; let mut visited_files = FxHashSet::default(); - let mut result_files = Vec::::new(); for module in work { let file_id = module.definition_source(db).file_id.original_file(db); if visited_files.contains(&file_id) { continue; } - let folds = analysis.folding_ranges(file_id)?; - // hovers - let sema = hir::Semantics::new(db); - let tokens_or_nodes = sema.parse(file_id).syntax().clone(); - let tokens = tokens_or_nodes.descendants_with_tokens().filter_map(|x| match x { - syntax::NodeOrToken::Node(_) => None, - syntax::NodeOrToken::Token(x) => Some(x), - }); - let hover_config = - HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) }; - let tokens = tokens - .filter(|token| match token.kind() { - IDENT - | INT_NUMBER - | LIFETIME_IDENT - | T![self] - | T![super] - | T![crate] - | T!['('] - | T![')'] => true, - _ => false, - }) - .map(|token| { - let range = token.text_range(); - let hover = analysis - .hover( - &hover_config, - FileRange { - file_id, - range: TextRange::new(range.start(), range.start()), - }, - )? - .map(|x| x.info); - Ok(TokenStaticData { range, hover }) - }) - .collect::, _>>()?; - result_files.push(StaticIndexedFile { file_id, folds, tokens }); + this.add_file(file_id)?; // mark the file visited_files.insert(file_id); } - Ok(StaticIndex { files: result_files }) + //eprintln!("{:#?}", token_map); + Ok(this) } } diff --git a/crates/rust-analyzer/src/cli/lsif.rs b/crates/rust-analyzer/src/cli/lsif.rs index 9d7d48f0bbf..509842516a8 100644 --- a/crates/rust-analyzer/src/cli/lsif.rs +++ b/crates/rust-analyzer/src/cli/lsif.rs @@ -1,15 +1,16 @@ //! Lsif generator +use std::collections::HashMap; use std::env; use std::time::Instant; -use ide::{StaticIndex, StaticIndexedFile, TokenStaticData}; +use ide::{Analysis, Cancellable, RootDatabase, StaticIndex, StaticIndexedFile, TokenId, TokenStaticData}; use ide_db::LineIndexDatabase; use ide_db::base_db::salsa::{self, ParallelDatabase}; use lsp_types::{lsif::*, Hover, HoverContents, NumberOrString}; use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace}; -use vfs::AbsPathBuf; +use vfs::{AbsPathBuf, Vfs}; use crate::cli::{ flags, @@ -27,9 +28,12 @@ fn clone(&self) -> Snap> { } } -#[derive(Default)] -struct LsifManager { +struct LsifManager<'a> { count: i32, + token_map: HashMap, + analysis: &'a Analysis, + db: &'a RootDatabase, + vfs: &'a Vfs, } #[derive(Clone, Copy)] @@ -41,7 +45,17 @@ fn from(Id(x): Id) -> Self { } } -impl LsifManager { +impl LsifManager<'_> { + fn new<'a>(analysis: &'a Analysis, db: &'a RootDatabase, vfs: &'a Vfs) -> LsifManager<'a> { + LsifManager { + count: 0, + token_map: HashMap::default(), + analysis, + db, + vfs, + } + } + fn add(&mut self, data: Element) -> Id { let id = Id(self.count); self.emit(&serde_json::to_string(&Entry { id: id.into(), data }).unwrap()); @@ -54,33 +68,67 @@ fn emit(&self, data: &str) { println!("{}", data); } - fn add_tokens(&mut self, line_index: &LineIndex, doc_id: Id, tokens: Vec) { + fn add_token(&mut self, id: TokenId, token: TokenStaticData) { + let result_set_id = self.add(Element::Vertex(Vertex::ResultSet(ResultSet { key: None }))); + self.token_map.insert(id, result_set_id); + if let Some(hover) = token.hover { + let hover_id = self.add(Element::Vertex(Vertex::HoverResult { + result: Hover { + contents: HoverContents::Markup(to_proto::markup_content(hover.markup)), + range: None, + }, + })); + self.add(Element::Edge(Edge::Hover(EdgeData { + in_v: hover_id.into(), + out_v: result_set_id.into(), + }))); + } + } + + fn add_file(&mut self, file: StaticIndexedFile) -> Cancellable<()> { + let StaticIndexedFile { file_id, tokens, folds} = file; + let path = self.vfs.file_path(file_id); + let path = path.as_path().unwrap(); + let doc_id = self.add(Element::Vertex(Vertex::Document(Document { + language_id: "rust".to_string(), + uri: lsp_types::Url::from_file_path(path).unwrap(), + }))); + let text = self.analysis.file_text(file_id)?; + let line_index = self.db.line_index(file_id); + let line_index = LineIndex { + index: line_index.clone(), + encoding: OffsetEncoding::Utf16, + endings: LineEndings::Unix, + }; + let result = folds + .into_iter() + .map(|it| to_proto::folding_range(&*text, &line_index, false, it)) + .collect(); + let folding_id = self.add(Element::Vertex(Vertex::FoldingRangeResult { result })); + self.add(Element::Edge(Edge::FoldingRange(EdgeData { + in_v: folding_id.into(), + out_v: doc_id.into(), + }))); let tokens_id = tokens .into_iter() - .map(|token| { - let token_id = self.add(Element::Vertex(Vertex::Range { - range: to_proto::range(line_index, token.range), + .map(|(range, id)| { + let range_id = self.add(Element::Vertex(Vertex::Range { + range: to_proto::range(&line_index, range), tag: None, })); - if let Some(hover) = token.hover { - let hover_id = self.add(Element::Vertex(Vertex::HoverResult { - result: Hover { - contents: HoverContents::Markup(to_proto::markup_content(hover.markup)), - range: None, - }, - })); - self.add(Element::Edge(Edge::Hover(EdgeData { - in_v: hover_id.into(), - out_v: token_id.into(), - }))); - } - token_id.into() + let result_set_id = *self.token_map.get(&id).expect("token map doesn't contain id"); + self.add(Element::Edge(Edge::Next(EdgeData { + in_v: result_set_id.into(), + out_v: range_id.into(), + }))); + range_id.into() }) .collect(); self.add(Element::Edge(Edge::Contains(EdgeDataMultiIn { in_vs: tokens_id, out_v: doc_id.into(), }))); + Ok(()) } } @@ -106,37 +154,18 @@ pub fn run(self) -> Result<()> { let si = StaticIndex::compute(db, &analysis)?; - let mut lsif = LsifManager::default(); + let mut lsif = LsifManager::new(&analysis, db, &vfs); lsif.add(Element::Vertex(Vertex::MetaData(MetaData { version: String::from("0.5.0"), project_root: lsp_types::Url::from_file_path(path).unwrap(), position_encoding: Encoding::Utf16, tool_info: None, }))); - for StaticIndexedFile { file_id, folds, tokens } in si.files { - let path = vfs.file_path(file_id); - let path = path.as_path().unwrap(); - let doc_id = lsif.add(Element::Vertex(Vertex::Document(Document { - language_id: "rust".to_string(), - uri: lsp_types::Url::from_file_path(path).unwrap(), - }))); - let text = analysis.file_text(file_id)?; - let line_index = db.line_index(file_id); - let line_index = LineIndex { - index: line_index.clone(), - encoding: OffsetEncoding::Utf16, - endings: LineEndings::Unix, - }; - let result = folds - .into_iter() - .map(|it| to_proto::folding_range(&*text, &line_index, false, it)) - .collect(); - let folding_id = lsif.add(Element::Vertex(Vertex::FoldingRangeResult { result })); - lsif.add(Element::Edge(Edge::FoldingRange(EdgeData { - in_v: folding_id.into(), - out_v: doc_id.into(), - }))); - lsif.add_tokens(&line_index, doc_id, tokens); + for (id, token) in si.tokens.iter() { + lsif.add_token(id, token); + } + for file in si.files { + lsif.add_file(file)?; } eprintln!("Generating LSIF finished in {:?}", now.elapsed()); Ok(())