fold ScopeWithSyntax into SourceAnalyzer

This commit is contained in:
Aleksey Kladov 2019-04-13 10:49:01 +03:00
parent 30481808fb
commit f4a94e74bc
7 changed files with 159 additions and 193 deletions

View File

@ -27,7 +27,7 @@ pub(crate) fn inline_local_varialbe(mut ctx: AssistCtx<impl HirDatabase>) -> Opt
let_stmt.syntax().range()
};
let analyzer = hir::SourceAnalyzer::new(ctx.db, ctx.frange.file_id, bind_pat.syntax(), None);
let refs = analyzer.find_all_refs(bind_pat)?;
let refs = analyzer.find_all_refs(bind_pat);
let mut wrap_in_parens = vec![true; refs.len()];

View File

@ -4,7 +4,7 @@ use ra_db::{CrateId, SourceRootId, Edition};
use ra_syntax::{ast::self, TreeArc};
use crate::{
Name, ScopesWithSourceMap, Ty, HirFileId, Either,
Name, Ty, HirFileId, Either,
HirDatabase, DefDatabase,
type_ref::TypeRef,
nameres::{ModuleScope, Namespace, ImportId, CrateModuleId},
@ -466,12 +466,6 @@ impl DefWithBody {
DefWithBody::Static(ref s) => s.resolver(db),
}
}
pub fn scopes(&self, db: &impl HirDatabase) -> ScopesWithSourceMap {
let scopes = db.expr_scopes(*self);
let source_map = db.body_with_source_map(*self).1;
ScopesWithSourceMap { scopes, source_map }
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -535,12 +529,6 @@ impl Function {
db.type_for_def((*self).into(), Namespace::Values)
}
pub fn scopes(&self, db: &impl HirDatabase) -> ScopesWithSourceMap {
let scopes = db.expr_scopes((*self).into());
let source_map = db.body_with_source_map((*self).into()).1;
ScopesWithSourceMap { scopes, source_map }
}
pub fn signature(&self, db: &impl HirDatabase) -> Arc<FnSignature> {
db.fn_signature(*self)
}

View File

@ -16,7 +16,7 @@ use crate::{
};
use crate::{ path::GenericArgs, ty::primitive::{IntTy, UncertainIntTy, FloatTy, UncertainFloatTy}};
pub use self::scope::{ExprScopes, ScopesWithSourceMap, ScopeEntryWithSyntax};
pub use self::scope::{ExprScopes, ScopeEntryWithSyntax};
pub(crate) mod scope;
@ -93,7 +93,7 @@ pub fn resolver_for_scope(
) -> Resolver {
let mut r = body.owner.resolver(db);
let scopes = db.expr_scopes(body.owner);
let scope_chain = scopes.scope_chain_for(scope_id).collect::<Vec<_>>();
let scope_chain = scopes.scope_chain(scope_id).collect::<Vec<_>>();
for scope in scope_chain.into_iter().rev() {
r = r.push_expr_scope(Arc::clone(&scopes), scope);
}

View File

@ -1,17 +1,16 @@
use std::sync::Arc;
use rustc_hash::{FxHashMap, FxHashSet};
use rustc_hash::{FxHashMap};
use ra_syntax::{
AstNode, SyntaxNode, TextUnit, TextRange, SyntaxNodePtr, AstPtr,
TextRange, AstPtr,
algo::generate,
ast,
};
use ra_arena::{Arena, RawId, impl_arena_id};
use crate::{
Name, AsName,DefWithBody, Either,
expr::{PatId, ExprId, Pat, Expr, Body, Statement, BodySourceMap},
Name, DefWithBody, Either,
expr::{PatId, ExprId, Pat, Expr, Body, Statement},
HirDatabase,
};
@ -23,7 +22,7 @@ impl_arena_id!(ScopeId);
pub struct ExprScopes {
body: Arc<Body>,
scopes: Arena<ScopeId, ScopeData>,
scope_for: FxHashMap<ExprId, ScopeId>,
pub(crate) scope_for: FxHashMap<ExprId, ScopeId>,
}
#[derive(Debug, PartialEq, Eq)]
@ -66,10 +65,7 @@ impl ExprScopes {
&self.scopes[scope].entries
}
pub fn scope_chain_for<'a>(
&'a self,
scope: Option<ScopeId>,
) -> impl Iterator<Item = ScopeId> + 'a {
pub fn scope_chain<'a>(&'a self, scope: Option<ScopeId>) -> impl Iterator<Item = ScopeId> + 'a {
generate(scope, move |&scope| self.scopes[scope].parent)
}
@ -107,16 +103,10 @@ impl ExprScopes {
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ScopesWithSourceMap {
pub(crate) source_map: Arc<BodySourceMap>,
pub(crate) scopes: Arc<ExprScopes>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ScopeEntryWithSyntax {
name: Name,
ptr: Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>,
pub(crate) name: Name,
pub(crate) ptr: Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>,
}
impl ScopeEntryWithSyntax {
@ -129,96 +119,6 @@ impl ScopeEntryWithSyntax {
}
}
impl ScopesWithSourceMap {
fn scope_chain<'a>(&'a self, node: &SyntaxNode) -> impl Iterator<Item = ScopeId> + 'a {
generate(self.scope_for(node), move |&scope| self.scopes.scopes[scope].parent)
}
pub(crate) fn scope_for_offset(&self, offset: TextUnit) -> Option<ScopeId> {
self.scopes
.scope_for
.iter()
.filter_map(|(id, scope)| Some((self.source_map.expr_syntax(*id)?, scope)))
// find containing scope
.min_by_key(|(ptr, _scope)| {
(!(ptr.range().start() <= offset && offset <= ptr.range().end()), ptr.range().len())
})
.map(|(ptr, scope)| self.adjust(ptr, *scope, offset))
}
// XXX: during completion, cursor might be outside of any particular
// expression. Try to figure out the correct scope...
// FIXME: move this to source binder?
fn adjust(&self, ptr: SyntaxNodePtr, original_scope: ScopeId, offset: TextUnit) -> ScopeId {
let r = ptr.range();
let child_scopes = self
.scopes
.scope_for
.iter()
.filter_map(|(id, scope)| Some((self.source_map.expr_syntax(*id)?, scope)))
.map(|(ptr, scope)| (ptr.range(), scope))
.filter(|(range, _)| range.start() <= offset && range.is_subrange(&r) && *range != r);
child_scopes
.max_by(|(r1, _), (r2, _)| {
if r2.is_subrange(&r1) {
std::cmp::Ordering::Greater
} else if r1.is_subrange(&r2) {
std::cmp::Ordering::Less
} else {
r1.start().cmp(&r2.start())
}
})
.map(|(_ptr, scope)| *scope)
.unwrap_or(original_scope)
}
pub(crate) fn resolve_local_name(
&self,
name_ref: &ast::NameRef,
) -> Option<ScopeEntryWithSyntax> {
let mut shadowed = FxHashSet::default();
let name = name_ref.as_name();
let ret = self
.scope_chain(name_ref.syntax())
.flat_map(|scope| self.scopes.entries(scope).iter())
.filter(|entry| shadowed.insert(entry.name()))
.filter(|entry| entry.name() == &name)
.nth(0);
ret.and_then(|entry| {
Some(ScopeEntryWithSyntax {
name: entry.name().clone(),
ptr: self.source_map.pat_syntax(entry.pat())?,
})
})
}
pub(crate) fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> {
let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap();
let ptr = Either::A(AstPtr::new(pat.into()));
fn_def
.syntax()
.descendants()
.filter_map(ast::NameRef::cast)
.filter(|name_ref| match self.resolve_local_name(*name_ref) {
None => false,
Some(entry) => entry.ptr() == ptr,
})
.map(|name_ref| ReferenceDescriptor {
name: name_ref.syntax().text().to_string(),
range: name_ref.syntax().range(),
})
.collect()
}
pub(crate) fn scope_for(&self, node: &SyntaxNode) -> Option<ScopeId> {
node.ancestors()
.map(SyntaxNodePtr::new)
.filter_map(|ptr| self.source_map.syntax_expr(ptr))
.find_map(|it| self.scopes.scope_for(it))
}
}
impl ScopeEntry {
pub fn name(&self) -> &Name {
&self.name
@ -297,12 +197,11 @@ pub struct ReferenceDescriptor {
#[cfg(test)]
mod tests {
use ra_db::salsa::InternKey;
use ra_syntax::{SourceFile, algo::find_node_at_offset};
use ra_db::SourceDatabase;
use ra_syntax::{algo::find_node_at_offset, AstNode, SyntaxNodePtr};
use test_utils::{extract_offset, assert_eq_text};
use crate::Function;
use crate::expr::{ExprCollector};
use crate::{source_binder::SourceAnalyzer, mock::MockDatabase};
use super::*;
@ -316,18 +215,20 @@ mod tests {
buf.push_str(&code[off..]);
buf
};
let file = SourceFile::parse(&code);
let (db, _source_root, file_id) = MockDatabase::with_single_file(&code);
let file = db.parse(file_id);
let marker: &ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap();
let fn_def: &ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap();
let irrelevant_function =
Function { id: crate::ids::FunctionId::from_intern_id(0u32.into()) };
let (body, source_map) = collect_fn_body_syntax(irrelevant_function, fn_def);
let scopes = ExprScopes::new(Arc::new(body));
let scopes =
ScopesWithSourceMap { scopes: Arc::new(scopes), source_map: Arc::new(source_map) };
let analyzer = SourceAnalyzer::new(&db, file_id, marker.syntax(), None);
let scopes = analyzer.scopes();
let expr_id =
analyzer.body_source_map().syntax_expr(SyntaxNodePtr::new(marker.syntax())).unwrap();
let scope = scopes.scope_for(expr_id);
let actual = scopes
.scope_chain(marker.syntax())
.flat_map(|scope| scopes.scopes.entries(scope))
.scope_chain(scope)
.flat_map(|scope| scopes.entries(scope))
.map(|it| it.name().to_string())
.collect::<Vec<_>>()
.join("\n");
@ -410,28 +311,17 @@ mod tests {
);
}
fn collect_fn_body_syntax(function: Function, node: &ast::FnDef) -> (Body, BodySourceMap) {
let mut collector = ExprCollector::new(DefWithBody::Function(function));
collector.collect_fn_body(node);
collector.finish()
}
fn do_check_local_name(code: &str, expected_offset: u32) {
let (off, code) = extract_offset(code);
let file = SourceFile::parse(&code);
let (db, _source_root, file_id) = MockDatabase::with_single_file(&code);
let file = db.parse(file_id);
let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into())
.expect("failed to find a name at the target offset");
let fn_def: &ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap();
let name_ref: &ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap();
let analyzer = SourceAnalyzer::new(&db, file_id, name_ref.syntax(), None);
let irrelevant_function =
Function { id: crate::ids::FunctionId::from_intern_id(0u32.into()) };
let (body, source_map) = collect_fn_body_syntax(irrelevant_function, fn_def);
let scopes = ExprScopes::new(Arc::new(body));
let scopes =
ScopesWithSourceMap { scopes: Arc::new(scopes), source_map: Arc::new(source_map) };
let local_name_entry = scopes.resolve_local_name(name_ref).unwrap();
let local_name_entry = analyzer.resolve_local_name(name_ref).unwrap();
let local_name =
local_name_entry.ptr().either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr());
assert_eq!(local_name.range(), expected_name.syntax().range());

View File

@ -64,7 +64,7 @@ pub use self::{
impl_block::{ImplBlock, ImplItem},
docs::{Docs, Documentation},
adt::AdtDef,
expr::{ExprScopes, ScopesWithSourceMap, ScopeEntryWithSyntax},
expr::{ExprScopes, ScopeEntryWithSyntax},
resolve::{Resolver, Resolution},
source_binder::{SourceAnalyzer, PathResolution},
};

View File

@ -7,9 +7,10 @@
/// purely for "IDE needs".
use std::sync::Arc;
use rustc_hash::FxHashSet;
use ra_db::{FileId, FilePosition};
use ra_syntax::{
SyntaxNode, AstPtr, TextUnit,
SyntaxNode, AstPtr, TextUnit, SyntaxNodePtr,
ast::{self, AstNode, NameOwner},
algo::find_node_at_offset,
SyntaxKind::*,
@ -18,7 +19,7 @@ use ra_syntax::{
use crate::{
HirDatabase, Function, Struct, Enum, Const, Static, Either, DefWithBody,
AsName, Module, HirFileId, Crate, Trait, Resolver,
expr::scope::{ReferenceDescriptor, ScopeEntryWithSyntax},
expr::{BodySourceMap, scope::{ReferenceDescriptor, ScopeEntryWithSyntax, ScopeId, ExprScopes}},
ids::LocationCtx,
expr, AstId
};
@ -120,29 +121,6 @@ pub fn trait_from_module(
Trait { id: ctx.to_def(trait_def) }
}
fn resolver_for_node(
db: &impl HirDatabase,
file_id: FileId,
node: &SyntaxNode,
offset: Option<TextUnit>,
) -> Resolver {
node.ancestors()
.find_map(|node| {
if ast::Expr::cast(node).is_some() || ast::Block::cast(node).is_some() {
let def = def_with_body_from_child_node(db, file_id, node)?;
let scopes = def.scopes(db);
let scope = match offset {
None => scopes.scope_for(&node),
Some(offset) => scopes.scope_for_offset(offset),
};
Some(expr::resolver_for_scope(def.body(db), db, scope))
} else {
try_get_resolver_for_node(db, file_id, node)
}
})
.unwrap_or_default()
}
fn try_get_resolver_for_node(
db: &impl HirDatabase,
file_id: FileId,
@ -192,9 +170,9 @@ fn def_with_body_from_child_node(
#[derive(Debug)]
pub struct SourceAnalyzer {
resolver: Resolver,
body_source_map: Option<Arc<crate::expr::BodySourceMap>>,
body_source_map: Option<Arc<BodySourceMap>>,
infer: Option<Arc<crate::ty::InferenceResult>>,
scopes: Option<crate::expr::ScopesWithSourceMap>,
scopes: Option<Arc<crate::expr::ExprScopes>>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
@ -217,11 +195,30 @@ impl SourceAnalyzer {
offset: Option<TextUnit>,
) -> SourceAnalyzer {
let def_with_body = def_with_body_from_child_node(db, file_id, node);
SourceAnalyzer {
resolver: resolver_for_node(db, file_id, node, offset),
body_source_map: def_with_body.map(|it| it.body_source_map(db)),
infer: def_with_body.map(|it| it.infer(db)),
scopes: def_with_body.map(|it| it.scopes(db)),
if let Some(def) = def_with_body {
let source_map = def.body_source_map(db);
let scopes = db.expr_scopes(def);
let scope = match offset {
None => scope_for(&scopes, &source_map, &node),
Some(offset) => scope_for_offset(&scopes, &source_map, offset),
};
let resolver = expr::resolver_for_scope(def.body(db), db, scope);
SourceAnalyzer {
resolver,
body_source_map: Some(source_map),
infer: Some(def.infer(db)),
scopes: Some(scopes),
}
} else {
SourceAnalyzer {
resolver: node
.ancestors()
.find_map(|node| try_get_resolver_for_node(db, file_id, node))
.unwrap_or_default(),
body_source_map: None,
infer: None,
scopes: None,
}
}
}
@ -276,16 +273,46 @@ impl SourceAnalyzer {
Some(res)
}
pub fn find_all_refs(&self, pat: &ast::BindPat) -> Option<Vec<ReferenceDescriptor>> {
self.scopes.as_ref().map(|it| it.find_all_refs(pat))
pub fn resolve_local_name(&self, name_ref: &ast::NameRef) -> Option<ScopeEntryWithSyntax> {
let mut shadowed = FxHashSet::default();
let name = name_ref.as_name();
let source_map = self.body_source_map.as_ref()?;
let scopes = self.scopes.as_ref()?;
let scope = scope_for(scopes, source_map, name_ref.syntax());
let ret = scopes
.scope_chain(scope)
.flat_map(|scope| scopes.entries(scope).iter())
.filter(|entry| shadowed.insert(entry.name()))
.filter(|entry| entry.name() == &name)
.nth(0);
ret.and_then(|entry| {
Some(ScopeEntryWithSyntax {
name: entry.name().clone(),
ptr: source_map.pat_syntax(entry.pat())?,
})
})
}
pub fn resolve_local_name(&self, name_ref: &ast::NameRef) -> Option<ScopeEntryWithSyntax> {
self.scopes.as_ref()?.resolve_local_name(name_ref)
pub fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> {
let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap();
let ptr = Either::A(AstPtr::new(pat.into()));
fn_def
.syntax()
.descendants()
.filter_map(ast::NameRef::cast)
.filter(|name_ref| match self.resolve_local_name(*name_ref) {
None => false,
Some(entry) => entry.ptr() == ptr,
})
.map(|name_ref| ReferenceDescriptor {
name: name_ref.syntax().text().to_string(),
range: name_ref.syntax().range(),
})
.collect()
}
#[cfg(test)]
pub(crate) fn body_source_map(&self) -> Arc<crate::expr::BodySourceMap> {
pub(crate) fn body_source_map(&self) -> Arc<BodySourceMap> {
self.body_source_map.clone().unwrap()
}
@ -293,4 +320,65 @@ impl SourceAnalyzer {
pub(crate) fn inference_result(&self) -> Arc<crate::ty::InferenceResult> {
self.infer.clone().unwrap()
}
#[cfg(test)]
pub(crate) fn scopes(&self) -> Arc<ExprScopes> {
self.scopes.clone().unwrap()
}
}
fn scope_for(
scopes: &ExprScopes,
source_map: &BodySourceMap,
node: &SyntaxNode,
) -> Option<ScopeId> {
node.ancestors()
.map(SyntaxNodePtr::new)
.filter_map(|ptr| source_map.syntax_expr(ptr))
.find_map(|it| scopes.scope_for(it))
}
fn scope_for_offset(
scopes: &ExprScopes,
source_map: &BodySourceMap,
offset: TextUnit,
) -> Option<ScopeId> {
scopes
.scope_for
.iter()
.filter_map(|(id, scope)| Some((source_map.expr_syntax(*id)?, scope)))
// find containing scope
.min_by_key(|(ptr, _scope)| {
(!(ptr.range().start() <= offset && offset <= ptr.range().end()), ptr.range().len())
})
.map(|(ptr, scope)| adjust(scopes, source_map, ptr, offset).unwrap_or(*scope))
}
// XXX: during completion, cursor might be outside of any particular
// expression. Try to figure out the correct scope...
fn adjust(
scopes: &ExprScopes,
source_map: &BodySourceMap,
ptr: SyntaxNodePtr,
offset: TextUnit,
) -> Option<ScopeId> {
let r = ptr.range();
let child_scopes = scopes
.scope_for
.iter()
.filter_map(|(id, scope)| Some((source_map.expr_syntax(*id)?, scope)))
.map(|(ptr, scope)| (ptr.range(), scope))
.filter(|(range, _)| range.start() <= offset && range.is_subrange(&r) && *range != r);
child_scopes
.max_by(|(r1, _), (r2, _)| {
if r2.is_subrange(&r1) {
std::cmp::Ordering::Greater
} else if r1.is_subrange(&r2) {
std::cmp::Ordering::Less
} else {
r1.start().cmp(&r2.start())
}
})
.map(|(_ptr, scope)| *scope)
}

View File

@ -65,7 +65,7 @@ pub(crate) fn find_all_refs(
let declaration = NavigationTarget::from_bind_pat(position.file_id, binding);
let references = analyzer
.find_all_refs(binding)?
.find_all_refs(binding)
.into_iter()
.map(move |ref_desc| FileRange { file_id: position.file_id, range: ref_desc.range })
.collect::<Vec<_>>();