Merge #1129
1129: introduce SourceAnalyzer API for ides r=matklad a=matklad Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
commit
8887782c4a
@ -1,7 +1,6 @@
|
||||
use hir::{
|
||||
HirDisplay, Ty,
|
||||
db::HirDatabase,
|
||||
source_binder::function_from_child_node,
|
||||
};
|
||||
use ra_syntax::{
|
||||
SyntaxKind,
|
||||
@ -30,11 +29,8 @@ pub(crate) fn add_explicit_type(mut ctx: AssistCtx<impl HirDatabase>) -> Option<
|
||||
}
|
||||
// Infer type
|
||||
let db = ctx.db;
|
||||
let func = function_from_child_node(db, ctx.frange.file_id, pat.syntax())?;
|
||||
let inference_res = func.infer(db);
|
||||
let source_map = func.body_source_map(db);
|
||||
let expr_id = source_map.node_expr(expr.into())?;
|
||||
let ty = inference_res[expr_id].clone();
|
||||
let analyzer = hir::SourceAnalyzer::new(db, ctx.frange.file_id, stmt.syntax(), None);
|
||||
let ty = analyzer.type_of(db, expr)?;
|
||||
// Assist not applicable if the type is unknown
|
||||
if is_unknown(&ty) {
|
||||
return None;
|
||||
|
@ -2,7 +2,6 @@ use std::fmt::Write;
|
||||
|
||||
use crate::{Assist, AssistId, AssistCtx};
|
||||
|
||||
use hir::Resolver;
|
||||
use hir::db::HirDatabase;
|
||||
use ra_syntax::{SmolStr, SyntaxKind, TextRange, TextUnit, TreeArc};
|
||||
use ra_syntax::ast::{self, AstNode, AstToken, FnDef, ImplItem, ImplItemKind, NameOwner};
|
||||
@ -46,9 +45,9 @@ fn add_missing_impl_members_inner(
|
||||
let trait_def = {
|
||||
let file_id = ctx.frange.file_id;
|
||||
let position = FilePosition { file_id, offset: impl_node.syntax().range().start() };
|
||||
let resolver = hir::source_binder::resolver_for_position(ctx.db, position);
|
||||
let analyzer = hir::SourceAnalyzer::new(ctx.db, position.file_id, impl_node.syntax(), None);
|
||||
|
||||
resolve_target_trait_def(ctx.db, &resolver, impl_node)?
|
||||
resolve_target_trait_def(ctx.db, &analyzer, impl_node)?
|
||||
};
|
||||
|
||||
let missing_fns: Vec<_> = {
|
||||
@ -122,14 +121,14 @@ fn add_missing_impl_members_inner(
|
||||
/// implemented) to a `ast::TraitDef`.
|
||||
fn resolve_target_trait_def(
|
||||
db: &impl HirDatabase,
|
||||
resolver: &Resolver,
|
||||
analyzer: &hir::SourceAnalyzer,
|
||||
impl_block: &ast::ImplBlock,
|
||||
) -> Option<TreeArc<ast::TraitDef>> {
|
||||
let ast_path = impl_block.target_trait().map(AstNode::syntax).and_then(ast::PathType::cast)?;
|
||||
let hir_path = ast_path.path().and_then(hir::Path::from_ast)?;
|
||||
let ast_path =
|
||||
impl_block.target_trait().map(AstNode::syntax).and_then(ast::PathType::cast)?.path()?;
|
||||
|
||||
match resolver.resolve_path(db, &hir_path).take_types() {
|
||||
Some(hir::Resolution::Def(hir::ModuleDef::Trait(def))) => Some(def.source(db).1),
|
||||
match analyzer.resolve_path(db, &ast_path) {
|
||||
Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => Some(def.source(db).1),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
use std::fmt::Write;
|
||||
|
||||
use hir::{
|
||||
AdtDef, FieldSource, source_binder,
|
||||
AdtDef, FieldSource,
|
||||
db::HirDatabase,
|
||||
};
|
||||
use ra_syntax::ast::{self, AstNode};
|
||||
@ -20,12 +20,8 @@ pub(crate) fn fill_match_arms(mut ctx: AssistCtx<impl HirDatabase>) -> Option<As
|
||||
}
|
||||
|
||||
let expr = match_expr.expr()?;
|
||||
let function =
|
||||
source_binder::function_from_child_node(ctx.db, ctx.frange.file_id, expr.syntax())?;
|
||||
let infer_result = function.infer(ctx.db);
|
||||
let source_map = function.body_source_map(ctx.db);
|
||||
let node_expr = source_map.node_expr(expr)?;
|
||||
let match_expr_ty = infer_result[node_expr].clone();
|
||||
let analyzer = hir::SourceAnalyzer::new(ctx.db, ctx.frange.file_id, expr.syntax(), None);
|
||||
let match_expr_ty = analyzer.type_of(ctx.db, expr)?;
|
||||
let enum_def = match_expr_ty.autoderef(ctx.db).find_map(|ty| match ty.as_adt() {
|
||||
Some((AdtDef::Enum(e), _)) => Some(e),
|
||||
_ => None,
|
||||
|
@ -1,6 +1,6 @@
|
||||
use std::fmt::Write;
|
||||
|
||||
use hir::{AdtDef, db::HirDatabase, source_binder::function_from_child_node};
|
||||
use hir::{AdtDef, db::HirDatabase};
|
||||
|
||||
use ra_syntax::ast::{self, AstNode};
|
||||
|
||||
@ -51,15 +51,13 @@ where
|
||||
}
|
||||
|
||||
fn evaluate_struct_def_fields(&mut self) -> Option<()> {
|
||||
let function = function_from_child_node(
|
||||
let analyzer = hir::SourceAnalyzer::new(
|
||||
self.ctx.db,
|
||||
self.ctx.frange.file_id,
|
||||
self.struct_lit.syntax(),
|
||||
)?;
|
||||
let infer_result = function.infer(self.ctx.db);
|
||||
let source_map = function.body_source_map(self.ctx.db);
|
||||
let node_expr = source_map.node_expr(self.struct_lit.into())?;
|
||||
let struct_lit_ty = infer_result[node_expr].clone();
|
||||
None,
|
||||
);
|
||||
let struct_lit_ty = analyzer.type_of(self.ctx.db, self.struct_lit.into())?;
|
||||
let struct_def = match struct_lit_ty.as_adt() {
|
||||
Some((AdtDef::Struct(s), _)) => s,
|
||||
_ => return None,
|
||||
|
@ -1,7 +1,4 @@
|
||||
use hir::{
|
||||
db::HirDatabase,
|
||||
source_binder::function_from_child_node,
|
||||
};
|
||||
use hir::db::HirDatabase;
|
||||
use ra_syntax::{
|
||||
ast::{self, AstNode, AstToken, PatKind, ExprKind},
|
||||
TextRange,
|
||||
@ -29,10 +26,8 @@ pub(crate) fn inline_local_varialbe(mut ctx: AssistCtx<impl HirDatabase>) -> Opt
|
||||
} else {
|
||||
let_stmt.syntax().range()
|
||||
};
|
||||
|
||||
let function = function_from_child_node(ctx.db, ctx.frange.file_id, bind_pat.syntax())?;
|
||||
let scope = function.scopes(ctx.db);
|
||||
let refs = scope.find_all_refs(bind_pat);
|
||||
let analyzer = hir::SourceAnalyzer::new(ctx.db, ctx.frange.file_id, bind_pat.syntax(), None);
|
||||
let refs = analyzer.find_all_refs(bind_pat);
|
||||
|
||||
let mut wrap_in_parens = vec![true; refs.len()];
|
||||
|
||||
|
@ -4,7 +4,7 @@ use ra_db::{CrateId, SourceRootId, Edition};
|
||||
use ra_syntax::{ast::self, TreeArc};
|
||||
|
||||
use crate::{
|
||||
Name, ScopesWithSourceMap, Ty, HirFileId, Either,
|
||||
Name, Ty, HirFileId, Either,
|
||||
HirDatabase, DefDatabase,
|
||||
type_ref::TypeRef,
|
||||
nameres::{ModuleScope, Namespace, ImportId, CrateModuleId},
|
||||
@ -189,7 +189,7 @@ impl Module {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolver(&self, db: &impl HirDatabase) -> Resolver {
|
||||
pub(crate) fn resolver(&self, db: &impl HirDatabase) -> Resolver {
|
||||
let def_map = db.crate_def_map(self.krate);
|
||||
Resolver::default().push_module_scope(def_map, self.module_id)
|
||||
}
|
||||
@ -313,7 +313,7 @@ impl Struct {
|
||||
|
||||
// FIXME move to a more general type
|
||||
/// Builds a resolver for type references inside this struct.
|
||||
pub fn resolver(&self, db: &impl HirDatabase) -> Resolver {
|
||||
pub(crate) fn resolver(&self, db: &impl HirDatabase) -> Resolver {
|
||||
// take the outer scope...
|
||||
let r = self.module(db).resolver(db);
|
||||
// ...and add generic params, if present
|
||||
@ -373,7 +373,7 @@ impl Enum {
|
||||
|
||||
// FIXME: move to a more general type
|
||||
/// Builds a resolver for type references inside this struct.
|
||||
pub fn resolver(&self, db: &impl HirDatabase) -> Resolver {
|
||||
pub(crate) fn resolver(&self, db: &impl HirDatabase) -> Resolver {
|
||||
// take the outer scope...
|
||||
let r = self.module(db).resolver(db);
|
||||
// ...and add generic params, if present
|
||||
@ -450,28 +450,22 @@ impl DefWithBody {
|
||||
db.infer(*self)
|
||||
}
|
||||
|
||||
pub fn body_source_map(&self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
|
||||
db.body_with_source_map(*self).1
|
||||
}
|
||||
|
||||
pub fn body(&self, db: &impl HirDatabase) -> Arc<Body> {
|
||||
db.body_hir(*self)
|
||||
}
|
||||
|
||||
pub fn body_source_map(&self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
|
||||
db.body_with_source_map(*self).1
|
||||
}
|
||||
|
||||
/// Builds a resolver for code inside this item.
|
||||
pub fn resolver(&self, db: &impl HirDatabase) -> Resolver {
|
||||
pub(crate) fn resolver(&self, db: &impl HirDatabase) -> Resolver {
|
||||
match *self {
|
||||
DefWithBody::Const(ref c) => c.resolver(db),
|
||||
DefWithBody::Function(ref f) => f.resolver(db),
|
||||
DefWithBody::Static(ref s) => s.resolver(db),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn scopes(&self, db: &impl HirDatabase) -> ScopesWithSourceMap {
|
||||
let scopes = db.expr_scopes(*self);
|
||||
let source_map = db.body_with_source_map(*self).1;
|
||||
ScopesWithSourceMap { scopes, source_map }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
@ -523,7 +517,7 @@ impl Function {
|
||||
self.signature(db).name.clone()
|
||||
}
|
||||
|
||||
pub fn body_source_map(&self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
|
||||
pub(crate) fn body_source_map(&self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
|
||||
db.body_with_source_map((*self).into()).1
|
||||
}
|
||||
|
||||
@ -535,12 +529,6 @@ impl Function {
|
||||
db.type_for_def((*self).into(), Namespace::Values)
|
||||
}
|
||||
|
||||
pub fn scopes(&self, db: &impl HirDatabase) -> ScopesWithSourceMap {
|
||||
let scopes = db.expr_scopes((*self).into());
|
||||
let source_map = db.body_with_source_map((*self).into()).1;
|
||||
ScopesWithSourceMap { scopes, source_map }
|
||||
}
|
||||
|
||||
pub fn signature(&self, db: &impl HirDatabase) -> Arc<FnSignature> {
|
||||
db.fn_signature(*self)
|
||||
}
|
||||
@ -561,7 +549,7 @@ impl Function {
|
||||
|
||||
// FIXME: move to a more general type for 'body-having' items
|
||||
/// Builds a resolver for code inside this item.
|
||||
pub fn resolver(&self, db: &impl HirDatabase) -> Resolver {
|
||||
pub(crate) fn resolver(&self, db: &impl HirDatabase) -> Resolver {
|
||||
// take the outer scope...
|
||||
let r = self
|
||||
.impl_block(db)
|
||||
@ -606,10 +594,6 @@ impl Const {
|
||||
db.infer((*self).into())
|
||||
}
|
||||
|
||||
pub fn body_source_map(&self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
|
||||
db.body_with_source_map((*self).into()).1
|
||||
}
|
||||
|
||||
/// The containing impl block, if this is a method.
|
||||
pub fn impl_block(&self, db: &impl DefDatabase) -> Option<ImplBlock> {
|
||||
let module_impls = db.impls_in_module(self.module(db));
|
||||
@ -618,7 +602,7 @@ impl Const {
|
||||
|
||||
// FIXME: move to a more general type for 'body-having' items
|
||||
/// Builds a resolver for code inside this item.
|
||||
pub fn resolver(&self, db: &impl HirDatabase) -> Resolver {
|
||||
pub(crate) fn resolver(&self, db: &impl HirDatabase) -> Resolver {
|
||||
// take the outer scope...
|
||||
let r = self
|
||||
.impl_block(db)
|
||||
@ -670,7 +654,7 @@ impl Static {
|
||||
}
|
||||
|
||||
/// Builds a resolver for code inside this item.
|
||||
pub fn resolver(&self, db: &impl HirDatabase) -> Resolver {
|
||||
pub(crate) fn resolver(&self, db: &impl HirDatabase) -> Resolver {
|
||||
// take the outer scope...
|
||||
self.module(db).resolver(db)
|
||||
}
|
||||
@ -678,10 +662,6 @@ impl Static {
|
||||
pub fn infer(&self, db: &impl HirDatabase) -> Arc<InferenceResult> {
|
||||
db.infer((*self).into())
|
||||
}
|
||||
|
||||
pub fn body_source_map(&self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
|
||||
db.body_with_source_map((*self).into()).1
|
||||
}
|
||||
}
|
||||
|
||||
impl Docs for Static {
|
||||
@ -756,7 +736,7 @@ impl TypeAlias {
|
||||
}
|
||||
|
||||
/// Builds a resolver for the type references in this type alias.
|
||||
pub fn resolver(&self, db: &impl HirDatabase) -> Resolver {
|
||||
pub(crate) fn resolver(&self, db: &impl HirDatabase) -> Resolver {
|
||||
// take the outer scope...
|
||||
let r = self
|
||||
.impl_block(db)
|
||||
|
@ -14,9 +14,9 @@ use crate::{
|
||||
name::AsName,
|
||||
type_ref::{Mutability, TypeRef},
|
||||
};
|
||||
use crate::{ path::GenericArgs, ty::primitive::{IntTy, UncertainIntTy, FloatTy, UncertainFloatTy}};
|
||||
use crate::{path::GenericArgs, ty::primitive::{IntTy, UncertainIntTy, FloatTy, UncertainFloatTy}};
|
||||
|
||||
pub use self::scope::{ExprScopes, ScopesWithSourceMap, ScopeEntryWithSyntax};
|
||||
pub use self::scope::ExprScopes;
|
||||
|
||||
pub(crate) mod scope;
|
||||
|
||||
@ -81,19 +81,23 @@ impl Body {
|
||||
}
|
||||
|
||||
// needs arbitrary_self_types to be a method... or maybe move to the def?
|
||||
pub fn resolver_for_expr(body: Arc<Body>, db: &impl HirDatabase, expr_id: ExprId) -> Resolver {
|
||||
pub(crate) fn resolver_for_expr(
|
||||
body: Arc<Body>,
|
||||
db: &impl HirDatabase,
|
||||
expr_id: ExprId,
|
||||
) -> Resolver {
|
||||
let scopes = db.expr_scopes(body.owner);
|
||||
resolver_for_scope(body, db, scopes.scope_for(expr_id))
|
||||
}
|
||||
|
||||
pub fn resolver_for_scope(
|
||||
pub(crate) fn resolver_for_scope(
|
||||
body: Arc<Body>,
|
||||
db: &impl HirDatabase,
|
||||
scope_id: Option<scope::ScopeId>,
|
||||
) -> Resolver {
|
||||
let mut r = body.owner.resolver(db);
|
||||
let scopes = db.expr_scopes(body.owner);
|
||||
let scope_chain = scopes.scope_chain_for(scope_id).collect::<Vec<_>>();
|
||||
let scope_chain = scopes.scope_chain(scope_id).collect::<Vec<_>>();
|
||||
for scope in scope_chain.into_iter().rev() {
|
||||
r = r.push_expr_scope(Arc::clone(&scopes), scope);
|
||||
}
|
||||
@ -117,31 +121,27 @@ impl Index<PatId> for Body {
|
||||
}
|
||||
|
||||
impl BodySourceMap {
|
||||
pub fn expr_syntax(&self, expr: ExprId) -> Option<SyntaxNodePtr> {
|
||||
pub(crate) fn expr_syntax(&self, expr: ExprId) -> Option<SyntaxNodePtr> {
|
||||
self.expr_map_back.get(expr).cloned()
|
||||
}
|
||||
|
||||
pub fn syntax_expr(&self, ptr: SyntaxNodePtr) -> Option<ExprId> {
|
||||
pub(crate) fn syntax_expr(&self, ptr: SyntaxNodePtr) -> Option<ExprId> {
|
||||
self.expr_map.get(&ptr).cloned()
|
||||
}
|
||||
|
||||
pub fn node_expr(&self, node: &ast::Expr) -> Option<ExprId> {
|
||||
pub(crate) fn node_expr(&self, node: &ast::Expr) -> Option<ExprId> {
|
||||
self.expr_map.get(&SyntaxNodePtr::new(node.syntax())).cloned()
|
||||
}
|
||||
|
||||
pub fn pat_syntax(&self, pat: PatId) -> Option<PatPtr> {
|
||||
pub(crate) fn pat_syntax(&self, pat: PatId) -> Option<PatPtr> {
|
||||
self.pat_map_back.get(pat).cloned()
|
||||
}
|
||||
|
||||
pub fn syntax_pat(&self, ptr: PatPtr) -> Option<PatId> {
|
||||
self.pat_map.get(&ptr).cloned()
|
||||
}
|
||||
|
||||
pub fn node_pat(&self, node: &ast::Pat) -> Option<PatId> {
|
||||
pub(crate) fn node_pat(&self, node: &ast::Pat) -> Option<PatId> {
|
||||
self.pat_map.get(&Either::A(AstPtr::new(node))).cloned()
|
||||
}
|
||||
|
||||
pub fn field_syntax(&self, expr: ExprId, field: usize) -> AstPtr<ast::NamedField> {
|
||||
pub(crate) fn field_syntax(&self, expr: ExprId, field: usize) -> AstPtr<ast::NamedField> {
|
||||
self.field_map[&(expr, field)].clone()
|
||||
}
|
||||
}
|
||||
|
@ -1,17 +1,11 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
|
||||
use ra_syntax::{
|
||||
AstNode, SyntaxNode, TextUnit, TextRange, SyntaxNodePtr, AstPtr,
|
||||
algo::generate,
|
||||
ast,
|
||||
};
|
||||
use rustc_hash::FxHashMap;
|
||||
use ra_arena::{Arena, RawId, impl_arena_id};
|
||||
|
||||
use crate::{
|
||||
Name, AsName,DefWithBody, Either,
|
||||
expr::{PatId, ExprId, Pat, Expr, Body, Statement, BodySourceMap},
|
||||
Name, DefWithBody,
|
||||
expr::{PatId, ExprId, Pat, Expr, Body, Statement},
|
||||
HirDatabase,
|
||||
};
|
||||
|
||||
@ -23,23 +17,32 @@ impl_arena_id!(ScopeId);
|
||||
pub struct ExprScopes {
|
||||
body: Arc<Body>,
|
||||
scopes: Arena<ScopeId, ScopeData>,
|
||||
scope_for: FxHashMap<ExprId, ScopeId>,
|
||||
scope_by_expr: FxHashMap<ExprId, ScopeId>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct ScopeEntry {
|
||||
pub(crate) struct ScopeEntry {
|
||||
name: Name,
|
||||
pat: PatId,
|
||||
}
|
||||
|
||||
impl ScopeEntry {
|
||||
pub(crate) fn name(&self) -> &Name {
|
||||
&self.name
|
||||
}
|
||||
|
||||
pub(crate) fn pat(&self) -> PatId {
|
||||
self.pat
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct ScopeData {
|
||||
pub(crate) struct ScopeData {
|
||||
parent: Option<ScopeId>,
|
||||
entries: Vec<ScopeEntry>,
|
||||
}
|
||||
|
||||
impl ExprScopes {
|
||||
// FIXME: This should take something more general than Function
|
||||
pub(crate) fn expr_scopes_query(db: &impl HirDatabase, def: DefWithBody) -> Arc<ExprScopes> {
|
||||
let body = db.body_hir(def);
|
||||
let res = ExprScopes::new(body);
|
||||
@ -50,7 +53,7 @@ impl ExprScopes {
|
||||
let mut scopes = ExprScopes {
|
||||
body: body.clone(),
|
||||
scopes: Arena::default(),
|
||||
scope_for: FxHashMap::default(),
|
||||
scope_by_expr: FxHashMap::default(),
|
||||
};
|
||||
let root = scopes.root_scope();
|
||||
scopes.add_params_bindings(root, body.params());
|
||||
@ -58,19 +61,23 @@ impl ExprScopes {
|
||||
scopes
|
||||
}
|
||||
|
||||
pub fn body(&self) -> Arc<Body> {
|
||||
self.body.clone()
|
||||
}
|
||||
|
||||
pub fn entries(&self, scope: ScopeId) -> &[ScopeEntry] {
|
||||
pub(crate) fn entries(&self, scope: ScopeId) -> &[ScopeEntry] {
|
||||
&self.scopes[scope].entries
|
||||
}
|
||||
|
||||
pub fn scope_chain_for<'a>(
|
||||
pub(crate) fn scope_chain<'a>(
|
||||
&'a self,
|
||||
scope: Option<ScopeId>,
|
||||
) -> impl Iterator<Item = ScopeId> + 'a {
|
||||
generate(scope, move |&scope| self.scopes[scope].parent)
|
||||
std::iter::successors(scope, move |&scope| self.scopes[scope].parent)
|
||||
}
|
||||
|
||||
pub(crate) fn scope_for(&self, expr: ExprId) -> Option<ScopeId> {
|
||||
self.scope_by_expr.get(&expr).map(|&scope| scope)
|
||||
}
|
||||
|
||||
pub(crate) fn scope_by_expr(&self) -> &FxHashMap<ExprId, ScopeId> {
|
||||
&self.scope_by_expr
|
||||
}
|
||||
|
||||
fn root_scope(&mut self) -> ScopeId {
|
||||
@ -99,130 +106,7 @@ impl ExprScopes {
|
||||
}
|
||||
|
||||
fn set_scope(&mut self, node: ExprId, scope: ScopeId) {
|
||||
self.scope_for.insert(node, scope);
|
||||
}
|
||||
|
||||
pub fn scope_for(&self, expr: ExprId) -> Option<ScopeId> {
|
||||
self.scope_for.get(&expr).map(|&scope| scope)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct ScopesWithSourceMap {
|
||||
pub source_map: Arc<BodySourceMap>,
|
||||
pub scopes: Arc<ExprScopes>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct ScopeEntryWithSyntax {
|
||||
name: Name,
|
||||
ptr: Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>,
|
||||
}
|
||||
|
||||
impl ScopeEntryWithSyntax {
|
||||
pub fn name(&self) -> &Name {
|
||||
&self.name
|
||||
}
|
||||
|
||||
pub fn ptr(&self) -> Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>> {
|
||||
self.ptr
|
||||
}
|
||||
}
|
||||
|
||||
impl ScopesWithSourceMap {
|
||||
fn scope_chain<'a>(&'a self, node: &SyntaxNode) -> impl Iterator<Item = ScopeId> + 'a {
|
||||
generate(self.scope_for(node), move |&scope| self.scopes.scopes[scope].parent)
|
||||
}
|
||||
|
||||
pub fn scope_for_offset(&self, offset: TextUnit) -> Option<ScopeId> {
|
||||
self.scopes
|
||||
.scope_for
|
||||
.iter()
|
||||
.filter_map(|(id, scope)| Some((self.source_map.expr_syntax(*id)?, scope)))
|
||||
// find containing scope
|
||||
.min_by_key(|(ptr, _scope)| {
|
||||
(!(ptr.range().start() <= offset && offset <= ptr.range().end()), ptr.range().len())
|
||||
})
|
||||
.map(|(ptr, scope)| self.adjust(ptr, *scope, offset))
|
||||
}
|
||||
|
||||
// XXX: during completion, cursor might be outside of any particular
|
||||
// expression. Try to figure out the correct scope...
|
||||
// FIXME: move this to source binder?
|
||||
fn adjust(&self, ptr: SyntaxNodePtr, original_scope: ScopeId, offset: TextUnit) -> ScopeId {
|
||||
let r = ptr.range();
|
||||
let child_scopes = self
|
||||
.scopes
|
||||
.scope_for
|
||||
.iter()
|
||||
.filter_map(|(id, scope)| Some((self.source_map.expr_syntax(*id)?, scope)))
|
||||
.map(|(ptr, scope)| (ptr.range(), scope))
|
||||
.filter(|(range, _)| range.start() <= offset && range.is_subrange(&r) && *range != r);
|
||||
|
||||
child_scopes
|
||||
.max_by(|(r1, _), (r2, _)| {
|
||||
if r2.is_subrange(&r1) {
|
||||
std::cmp::Ordering::Greater
|
||||
} else if r1.is_subrange(&r2) {
|
||||
std::cmp::Ordering::Less
|
||||
} else {
|
||||
r1.start().cmp(&r2.start())
|
||||
}
|
||||
})
|
||||
.map(|(_ptr, scope)| *scope)
|
||||
.unwrap_or(original_scope)
|
||||
}
|
||||
|
||||
pub fn resolve_local_name(&self, name_ref: &ast::NameRef) -> Option<ScopeEntryWithSyntax> {
|
||||
let mut shadowed = FxHashSet::default();
|
||||
let name = name_ref.as_name();
|
||||
let ret = self
|
||||
.scope_chain(name_ref.syntax())
|
||||
.flat_map(|scope| self.scopes.entries(scope).iter())
|
||||
.filter(|entry| shadowed.insert(entry.name()))
|
||||
.filter(|entry| entry.name() == &name)
|
||||
.nth(0);
|
||||
ret.and_then(|entry| {
|
||||
Some(ScopeEntryWithSyntax {
|
||||
name: entry.name().clone(),
|
||||
ptr: self.source_map.pat_syntax(entry.pat())?,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
pub fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> {
|
||||
let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap();
|
||||
let ptr = Either::A(AstPtr::new(pat.into()));
|
||||
fn_def
|
||||
.syntax()
|
||||
.descendants()
|
||||
.filter_map(ast::NameRef::cast)
|
||||
.filter(|name_ref| match self.resolve_local_name(*name_ref) {
|
||||
None => false,
|
||||
Some(entry) => entry.ptr() == ptr,
|
||||
})
|
||||
.map(|name_ref| ReferenceDescriptor {
|
||||
name: name_ref.syntax().text().to_string(),
|
||||
range: name_ref.syntax().range(),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn scope_for(&self, node: &SyntaxNode) -> Option<ScopeId> {
|
||||
node.ancestors()
|
||||
.map(SyntaxNodePtr::new)
|
||||
.filter_map(|ptr| self.source_map.syntax_expr(ptr))
|
||||
.find_map(|it| self.scopes.scope_for(it))
|
||||
}
|
||||
}
|
||||
|
||||
impl ScopeEntry {
|
||||
pub fn name(&self) -> &Name {
|
||||
&self.name
|
||||
}
|
||||
|
||||
pub fn pat(&self) -> PatId {
|
||||
self.pat
|
||||
self.scope_by_expr.insert(node, scope);
|
||||
}
|
||||
}
|
||||
|
||||
@ -286,22 +170,13 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope
|
||||
};
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ReferenceDescriptor {
|
||||
pub range: TextRange,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ra_db::salsa::InternKey;
|
||||
use ra_syntax::{SourceFile, algo::find_node_at_offset};
|
||||
use ra_db::SourceDatabase;
|
||||
use ra_syntax::{algo::find_node_at_offset, AstNode, SyntaxNodePtr, ast};
|
||||
use test_utils::{extract_offset, assert_eq_text};
|
||||
use crate::Function;
|
||||
|
||||
use crate::expr::{ExprCollector};
|
||||
|
||||
use super::*;
|
||||
use crate::{source_binder::SourceAnalyzer, mock::MockDatabase};
|
||||
|
||||
fn do_check(code: &str, expected: &[&str]) {
|
||||
let (off, code) = extract_offset(code);
|
||||
@ -313,18 +188,20 @@ mod tests {
|
||||
buf.push_str(&code[off..]);
|
||||
buf
|
||||
};
|
||||
let file = SourceFile::parse(&code);
|
||||
|
||||
let (db, _source_root, file_id) = MockDatabase::with_single_file(&code);
|
||||
let file = db.parse(file_id);
|
||||
let marker: &ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap();
|
||||
let fn_def: &ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap();
|
||||
let irrelevant_function =
|
||||
Function { id: crate::ids::FunctionId::from_intern_id(0u32.into()) };
|
||||
let (body, source_map) = collect_fn_body_syntax(irrelevant_function, fn_def);
|
||||
let scopes = ExprScopes::new(Arc::new(body));
|
||||
let scopes =
|
||||
ScopesWithSourceMap { scopes: Arc::new(scopes), source_map: Arc::new(source_map) };
|
||||
let analyzer = SourceAnalyzer::new(&db, file_id, marker.syntax(), None);
|
||||
|
||||
let scopes = analyzer.scopes();
|
||||
let expr_id =
|
||||
analyzer.body_source_map().syntax_expr(SyntaxNodePtr::new(marker.syntax())).unwrap();
|
||||
let scope = scopes.scope_for(expr_id);
|
||||
|
||||
let actual = scopes
|
||||
.scope_chain(marker.syntax())
|
||||
.flat_map(|scope| scopes.scopes.entries(scope))
|
||||
.scope_chain(scope)
|
||||
.flat_map(|scope| scopes.entries(scope))
|
||||
.map(|it| it.name().to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
@ -407,28 +284,17 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
fn collect_fn_body_syntax(function: Function, node: &ast::FnDef) -> (Body, BodySourceMap) {
|
||||
let mut collector = ExprCollector::new(DefWithBody::Function(function));
|
||||
collector.collect_fn_body(node);
|
||||
collector.finish()
|
||||
}
|
||||
|
||||
fn do_check_local_name(code: &str, expected_offset: u32) {
|
||||
let (off, code) = extract_offset(code);
|
||||
let file = SourceFile::parse(&code);
|
||||
|
||||
let (db, _source_root, file_id) = MockDatabase::with_single_file(&code);
|
||||
let file = db.parse(file_id);
|
||||
let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into())
|
||||
.expect("failed to find a name at the target offset");
|
||||
|
||||
let fn_def: &ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap();
|
||||
let name_ref: &ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap();
|
||||
let analyzer = SourceAnalyzer::new(&db, file_id, name_ref.syntax(), None);
|
||||
|
||||
let irrelevant_function =
|
||||
Function { id: crate::ids::FunctionId::from_intern_id(0u32.into()) };
|
||||
let (body, source_map) = collect_fn_body_syntax(irrelevant_function, fn_def);
|
||||
let scopes = ExprScopes::new(Arc::new(body));
|
||||
let scopes =
|
||||
ScopesWithSourceMap { scopes: Arc::new(scopes), source_map: Arc::new(source_map) };
|
||||
let local_name_entry = scopes.resolve_local_name(name_ref).unwrap();
|
||||
let local_name_entry = analyzer.resolve_local_name(name_ref).unwrap();
|
||||
let local_name =
|
||||
local_name_entry.ptr().either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr());
|
||||
assert_eq!(local_name.range(), expected_name.syntax().range());
|
||||
|
@ -105,7 +105,7 @@ impl ImplBlock {
|
||||
db.generic_params((*self).into())
|
||||
}
|
||||
|
||||
pub fn resolver(&self, db: &impl HirDatabase) -> Resolver {
|
||||
pub(crate) fn resolver(&self, db: &impl HirDatabase) -> Resolver {
|
||||
let r = self.module().resolver(db);
|
||||
// add generic params, if present
|
||||
let p = self.generic_params(db);
|
||||
|
@ -51,6 +51,7 @@ use crate::{
|
||||
db::{HirDatabase, DefDatabase},
|
||||
name::{AsName, KnownName},
|
||||
source_id::{FileAstId, AstId},
|
||||
resolve::Resolver,
|
||||
};
|
||||
|
||||
pub use self::{
|
||||
@ -60,12 +61,13 @@ pub use self::{
|
||||
source_id::{AstIdMap, ErasedFileAstId},
|
||||
ids::{HirFileId, MacroDefId, MacroCallId, MacroCallLoc},
|
||||
nameres::{PerNs, Namespace, ImportId},
|
||||
ty::{Ty, ApplicationTy, TypeCtor, Substs, display::HirDisplay},
|
||||
ty::{Ty, ApplicationTy, TypeCtor, Substs, display::HirDisplay, CallableDef},
|
||||
impl_block::{ImplBlock, ImplItem},
|
||||
docs::{Docs, Documentation},
|
||||
adt::AdtDef,
|
||||
expr::{ExprScopes, ScopesWithSourceMap, ScopeEntryWithSyntax},
|
||||
resolve::{Resolver, Resolution},
|
||||
expr::ExprScopes,
|
||||
resolve::Resolution,
|
||||
source_binder::{SourceAnalyzer, PathResolution, ScopeEntryWithSyntax},
|
||||
};
|
||||
|
||||
pub use self::code_model_api::{
|
||||
|
@ -9,13 +9,13 @@ use crate::{
|
||||
name::{Name, KnownName},
|
||||
nameres::{PerNs, CrateDefMap, CrateModuleId},
|
||||
generics::GenericParams,
|
||||
expr::{scope::{ExprScopes, ScopeId}, PatId, Body},
|
||||
expr::{scope::{ExprScopes, ScopeId}, PatId},
|
||||
impl_block::ImplBlock,
|
||||
path::Path, Trait
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct Resolver {
|
||||
pub(crate) struct Resolver {
|
||||
scopes: Vec<Scope>,
|
||||
}
|
||||
|
||||
@ -117,7 +117,7 @@ pub enum Resolution {
|
||||
}
|
||||
|
||||
impl Resolver {
|
||||
pub fn resolve_name(&self, db: &impl HirDatabase, name: &Name) -> PerNs<Resolution> {
|
||||
pub(crate) fn resolve_name(&self, db: &impl HirDatabase, name: &Name) -> PerNs<Resolution> {
|
||||
let mut resolution = PerNs::none();
|
||||
for scope in self.scopes.iter().rev() {
|
||||
resolution = resolution.or(scope.resolve_name(db, name));
|
||||
@ -154,12 +154,12 @@ impl Resolver {
|
||||
|
||||
/// Returns the fully resolved path if we were able to resolve it.
|
||||
/// otherwise returns `PerNs::none`
|
||||
pub fn resolve_path(&self, db: &impl HirDatabase, path: &Path) -> PerNs<Resolution> {
|
||||
pub(crate) fn resolve_path(&self, db: &impl HirDatabase, path: &Path) -> PerNs<Resolution> {
|
||||
// into_fully_resolved() returns the fully resolved path or PerNs::none() otherwise
|
||||
self.resolve_path_segments(db, path).into_fully_resolved()
|
||||
}
|
||||
|
||||
pub fn all_names(&self, db: &impl HirDatabase) -> FxHashMap<Name, PerNs<Resolution>> {
|
||||
pub(crate) fn all_names(&self, db: &impl HirDatabase) -> FxHashMap<Name, PerNs<Resolution>> {
|
||||
let mut names = FxHashMap::default();
|
||||
for scope in self.scopes.iter().rev() {
|
||||
scope.collect_names(db, &mut |name, res| {
|
||||
@ -197,14 +197,6 @@ impl Resolver {
|
||||
_ => None,
|
||||
})
|
||||
}
|
||||
|
||||
/// The body from which any `LocalBinding` resolutions in this resolver come.
|
||||
pub fn body(&self) -> Option<Arc<Body>> {
|
||||
self.scopes.iter().rev().find_map(|scope| match scope {
|
||||
Scope::ExprScope(expr_scope) => Some(expr_scope.expr_scopes.body()),
|
||||
_ => None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Resolver {
|
||||
|
@ -5,16 +5,21 @@
|
||||
///
|
||||
/// So, this modules should not be used during hir construction, it exists
|
||||
/// purely for "IDE needs".
|
||||
use std::sync::Arc;
|
||||
|
||||
use rustc_hash::{FxHashSet, FxHashMap};
|
||||
use ra_db::{FileId, FilePosition};
|
||||
use ra_syntax::{
|
||||
SyntaxNode,
|
||||
SyntaxNode, AstPtr, TextUnit, SyntaxNodePtr, TextRange,
|
||||
ast::{self, AstNode, NameOwner},
|
||||
algo::{find_node_at_offset, find_token_at_offset},
|
||||
algo::find_node_at_offset,
|
||||
SyntaxKind::*,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
HirDatabase, Function, Struct, Enum,Const,Static,
|
||||
HirDatabase, Function, Struct, Enum, Const, Static, Either, DefWithBody, PerNs, Name,
|
||||
AsName, Module, HirFileId, Crate, Trait, Resolver,
|
||||
expr::{BodySourceMap, scope::{ScopeId, ExprScopes}},
|
||||
ids::LocationCtx,
|
||||
expr, AstId
|
||||
};
|
||||
@ -87,63 +92,6 @@ fn module_from_source(
|
||||
)
|
||||
}
|
||||
|
||||
pub fn const_from_source(
|
||||
db: &impl HirDatabase,
|
||||
file_id: FileId,
|
||||
const_def: &ast::ConstDef,
|
||||
) -> Option<Const> {
|
||||
let module = module_from_child_node(db, file_id, const_def.syntax())?;
|
||||
let res = const_from_module(db, module, const_def);
|
||||
Some(res)
|
||||
}
|
||||
|
||||
pub fn const_from_module(
|
||||
db: &impl HirDatabase,
|
||||
module: Module,
|
||||
const_def: &ast::ConstDef,
|
||||
) -> Const {
|
||||
let (file_id, _) = module.definition_source(db);
|
||||
let file_id = file_id.into();
|
||||
let ctx = LocationCtx::new(db, module, file_id);
|
||||
Const { id: ctx.to_def(const_def) }
|
||||
}
|
||||
|
||||
pub fn function_from_position(db: &impl HirDatabase, position: FilePosition) -> Option<Function> {
|
||||
let file = db.parse(position.file_id);
|
||||
let fn_def = find_node_at_offset::<ast::FnDef>(file.syntax(), position.offset)?;
|
||||
function_from_source(db, position.file_id, fn_def)
|
||||
}
|
||||
|
||||
pub fn function_from_source(
|
||||
db: &impl HirDatabase,
|
||||
file_id: FileId,
|
||||
fn_def: &ast::FnDef,
|
||||
) -> Option<Function> {
|
||||
let module = module_from_child_node(db, file_id, fn_def.syntax())?;
|
||||
let res = function_from_module(db, module, fn_def);
|
||||
Some(res)
|
||||
}
|
||||
|
||||
pub fn function_from_module(
|
||||
db: &impl HirDatabase,
|
||||
module: Module,
|
||||
fn_def: &ast::FnDef,
|
||||
) -> Function {
|
||||
let (file_id, _) = module.definition_source(db);
|
||||
let file_id = file_id.into();
|
||||
let ctx = LocationCtx::new(db, module, file_id);
|
||||
Function { id: ctx.to_def(fn_def) }
|
||||
}
|
||||
|
||||
pub fn function_from_child_node(
|
||||
db: &impl HirDatabase,
|
||||
file_id: FileId,
|
||||
node: &SyntaxNode,
|
||||
) -> Option<Function> {
|
||||
let fn_def = node.ancestors().find_map(ast::FnDef::cast)?;
|
||||
function_from_source(db, file_id, fn_def)
|
||||
}
|
||||
|
||||
pub fn struct_from_module(
|
||||
db: &impl HirDatabase,
|
||||
module: Module,
|
||||
@ -155,27 +103,6 @@ pub fn struct_from_module(
|
||||
Struct { id: ctx.to_def(struct_def) }
|
||||
}
|
||||
|
||||
pub fn static_from_source(
|
||||
db: &impl HirDatabase,
|
||||
file_id: FileId,
|
||||
static_def: &ast::StaticDef,
|
||||
) -> Option<Static> {
|
||||
let module = module_from_child_node(db, file_id, static_def.syntax())?;
|
||||
let res = static_from_module(db, module, static_def);
|
||||
Some(res)
|
||||
}
|
||||
|
||||
pub fn static_from_module(
|
||||
db: &impl HirDatabase,
|
||||
module: Module,
|
||||
static_def: &ast::StaticDef,
|
||||
) -> Static {
|
||||
let (file_id, _) = module.definition_source(db);
|
||||
let file_id = file_id.into();
|
||||
let ctx = LocationCtx::new(db, module, file_id);
|
||||
Static { id: ctx.to_def(static_def) }
|
||||
}
|
||||
|
||||
pub fn enum_from_module(db: &impl HirDatabase, module: Module, enum_def: &ast::EnumDef) -> Enum {
|
||||
let (file_id, _) = module.definition_source(db);
|
||||
let file_id = file_id.into();
|
||||
@ -194,48 +121,6 @@ pub fn trait_from_module(
|
||||
Trait { id: ctx.to_def(trait_def) }
|
||||
}
|
||||
|
||||
pub fn resolver_for_position(db: &impl HirDatabase, position: FilePosition) -> Resolver {
|
||||
let file_id = position.file_id;
|
||||
let file = db.parse(file_id);
|
||||
find_token_at_offset(file.syntax(), position.offset)
|
||||
.find_map(|token| {
|
||||
token.parent().ancestors().find_map(|node| {
|
||||
if ast::Expr::cast(node).is_some() || ast::Block::cast(node).is_some() {
|
||||
if let Some(func) = function_from_child_node(db, file_id, node) {
|
||||
let scopes = func.scopes(db);
|
||||
let scope = scopes.scope_for_offset(position.offset);
|
||||
Some(expr::resolver_for_scope(func.body(db), db, scope))
|
||||
} else {
|
||||
// FIXME const/static/array length
|
||||
None
|
||||
}
|
||||
} else {
|
||||
try_get_resolver_for_node(db, file_id, node)
|
||||
}
|
||||
})
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
pub fn resolver_for_node(db: &impl HirDatabase, file_id: FileId, node: &SyntaxNode) -> Resolver {
|
||||
node.ancestors()
|
||||
.find_map(|node| {
|
||||
if ast::Expr::cast(node).is_some() || ast::Block::cast(node).is_some() {
|
||||
if let Some(func) = function_from_child_node(db, file_id, node) {
|
||||
let scopes = func.scopes(db);
|
||||
let scope = scopes.scope_for(&node);
|
||||
Some(expr::resolver_for_scope(func.body(db), db, scope))
|
||||
} else {
|
||||
// FIXME const/static/array length
|
||||
None
|
||||
}
|
||||
} else {
|
||||
try_get_resolver_for_node(db, file_id, node)
|
||||
}
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn try_get_resolver_for_node(
|
||||
db: &impl HirDatabase,
|
||||
file_id: FileId,
|
||||
@ -251,10 +136,281 @@ fn try_get_resolver_for_node(
|
||||
} else if let Some(e) = ast::EnumDef::cast(node) {
|
||||
let module = module_from_child_node(db, file_id, e.syntax())?;
|
||||
Some(enum_from_module(db, module, e).resolver(db))
|
||||
} else if let Some(f) = ast::FnDef::cast(node) {
|
||||
function_from_source(db, file_id, f).map(|f| f.resolver(db))
|
||||
} else if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF {
|
||||
Some(def_with_body_from_child_node(db, file_id, node)?.resolver(db))
|
||||
} else {
|
||||
// FIXME add missing cases
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn def_with_body_from_child_node(
|
||||
db: &impl HirDatabase,
|
||||
file_id: FileId,
|
||||
node: &SyntaxNode,
|
||||
) -> Option<DefWithBody> {
|
||||
let module = module_from_child_node(db, file_id, node)?;
|
||||
let ctx = LocationCtx::new(db, module, file_id.into());
|
||||
node.ancestors().find_map(|node| {
|
||||
if let Some(def) = ast::FnDef::cast(node) {
|
||||
return Some(Function { id: ctx.to_def(def) }.into());
|
||||
}
|
||||
if let Some(def) = ast::ConstDef::cast(node) {
|
||||
return Some(Const { id: ctx.to_def(def) }.into());
|
||||
}
|
||||
if let Some(def) = ast::StaticDef::cast(node) {
|
||||
return Some(Static { id: ctx.to_def(def) }.into());
|
||||
}
|
||||
None
|
||||
})
|
||||
}
|
||||
|
||||
/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of
|
||||
/// original source files. It should not be used inside the HIR itself.
|
||||
#[derive(Debug)]
|
||||
pub struct SourceAnalyzer {
|
||||
resolver: Resolver,
|
||||
body_source_map: Option<Arc<BodySourceMap>>,
|
||||
infer: Option<Arc<crate::ty::InferenceResult>>,
|
||||
scopes: Option<Arc<crate::expr::ExprScopes>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum PathResolution {
|
||||
/// An item
|
||||
Def(crate::ModuleDef),
|
||||
/// A local binding (only value namespace)
|
||||
LocalBinding(Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>),
|
||||
/// A generic parameter
|
||||
GenericParam(u32),
|
||||
SelfType(crate::ImplBlock),
|
||||
AssocItem(crate::ImplItem),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct ScopeEntryWithSyntax {
|
||||
pub(crate) name: Name,
|
||||
pub(crate) ptr: Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>,
|
||||
}
|
||||
|
||||
impl ScopeEntryWithSyntax {
|
||||
pub fn name(&self) -> &Name {
|
||||
&self.name
|
||||
}
|
||||
|
||||
pub fn ptr(&self) -> Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>> {
|
||||
self.ptr
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ReferenceDescriptor {
|
||||
pub range: TextRange,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
impl SourceAnalyzer {
|
||||
pub fn new(
|
||||
db: &impl HirDatabase,
|
||||
file_id: FileId,
|
||||
node: &SyntaxNode,
|
||||
offset: Option<TextUnit>,
|
||||
) -> SourceAnalyzer {
|
||||
let def_with_body = def_with_body_from_child_node(db, file_id, node);
|
||||
if let Some(def) = def_with_body {
|
||||
let source_map = def.body_source_map(db);
|
||||
let scopes = db.expr_scopes(def);
|
||||
let scope = match offset {
|
||||
None => scope_for(&scopes, &source_map, &node),
|
||||
Some(offset) => scope_for_offset(&scopes, &source_map, offset),
|
||||
};
|
||||
let resolver = expr::resolver_for_scope(def.body(db), db, scope);
|
||||
SourceAnalyzer {
|
||||
resolver,
|
||||
body_source_map: Some(source_map),
|
||||
infer: Some(def.infer(db)),
|
||||
scopes: Some(scopes),
|
||||
}
|
||||
} else {
|
||||
SourceAnalyzer {
|
||||
resolver: node
|
||||
.ancestors()
|
||||
.find_map(|node| try_get_resolver_for_node(db, file_id, node))
|
||||
.unwrap_or_default(),
|
||||
body_source_map: None,
|
||||
infer: None,
|
||||
scopes: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn type_of(&self, _db: &impl HirDatabase, expr: &ast::Expr) -> Option<crate::Ty> {
|
||||
let expr_id = self.body_source_map.as_ref()?.node_expr(expr)?;
|
||||
Some(self.infer.as_ref()?[expr_id].clone())
|
||||
}
|
||||
|
||||
pub fn type_of_pat(&self, _db: &impl HirDatabase, pat: &ast::Pat) -> Option<crate::Ty> {
|
||||
let pat_id = self.body_source_map.as_ref()?.node_pat(pat)?;
|
||||
Some(self.infer.as_ref()?[pat_id].clone())
|
||||
}
|
||||
|
||||
pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
|
||||
let expr_id = self.body_source_map.as_ref()?.node_expr(call.into())?;
|
||||
self.infer.as_ref()?.method_resolution(expr_id)
|
||||
}
|
||||
|
||||
pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<crate::StructField> {
|
||||
let expr_id = self.body_source_map.as_ref()?.node_expr(field.into())?;
|
||||
self.infer.as_ref()?.field_resolution(expr_id)
|
||||
}
|
||||
|
||||
pub fn resolve_hir_path(
|
||||
&self,
|
||||
db: &impl HirDatabase,
|
||||
path: &crate::Path,
|
||||
) -> PerNs<crate::Resolution> {
|
||||
self.resolver.resolve_path(db, path)
|
||||
}
|
||||
|
||||
pub fn resolve_path(&self, db: &impl HirDatabase, path: &ast::Path) -> Option<PathResolution> {
|
||||
if let Some(path_expr) = path.syntax().parent().and_then(ast::PathExpr::cast) {
|
||||
let expr_id = self.body_source_map.as_ref()?.node_expr(path_expr.into())?;
|
||||
if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_expr(expr_id) {
|
||||
return Some(PathResolution::AssocItem(assoc));
|
||||
}
|
||||
}
|
||||
if let Some(path_pat) = path.syntax().parent().and_then(ast::PathPat::cast) {
|
||||
let pat_id = self.body_source_map.as_ref()?.node_pat(path_pat.into())?;
|
||||
if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_pat(pat_id) {
|
||||
return Some(PathResolution::AssocItem(assoc));
|
||||
}
|
||||
}
|
||||
let hir_path = crate::Path::from_ast(path)?;
|
||||
let res = self.resolver.resolve_path(db, &hir_path);
|
||||
let res = res.clone().take_types().or_else(|| res.take_values())?;
|
||||
let res = match res {
|
||||
crate::Resolution::Def(it) => PathResolution::Def(it),
|
||||
crate::Resolution::LocalBinding(it) => {
|
||||
PathResolution::LocalBinding(self.body_source_map.as_ref()?.pat_syntax(it)?)
|
||||
}
|
||||
crate::Resolution::GenericParam(it) => PathResolution::GenericParam(it),
|
||||
crate::Resolution::SelfType(it) => PathResolution::SelfType(it),
|
||||
};
|
||||
Some(res)
|
||||
}
|
||||
|
||||
pub fn resolve_local_name(&self, name_ref: &ast::NameRef) -> Option<ScopeEntryWithSyntax> {
|
||||
let mut shadowed = FxHashSet::default();
|
||||
let name = name_ref.as_name();
|
||||
let source_map = self.body_source_map.as_ref()?;
|
||||
let scopes = self.scopes.as_ref()?;
|
||||
let scope = scope_for(scopes, source_map, name_ref.syntax());
|
||||
let ret = scopes
|
||||
.scope_chain(scope)
|
||||
.flat_map(|scope| scopes.entries(scope).iter())
|
||||
.filter(|entry| shadowed.insert(entry.name()))
|
||||
.filter(|entry| entry.name() == &name)
|
||||
.nth(0);
|
||||
ret.and_then(|entry| {
|
||||
Some(ScopeEntryWithSyntax {
|
||||
name: entry.name().clone(),
|
||||
ptr: source_map.pat_syntax(entry.pat())?,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
pub fn all_names(&self, db: &impl HirDatabase) -> FxHashMap<Name, PerNs<crate::Resolution>> {
|
||||
self.resolver.all_names(db)
|
||||
}
|
||||
|
||||
pub fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> {
|
||||
// FIXME: at least, this should work with any DefWithBody, but ideally
|
||||
// this should be hir-based altogether
|
||||
let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap();
|
||||
let ptr = Either::A(AstPtr::new(pat.into()));
|
||||
fn_def
|
||||
.syntax()
|
||||
.descendants()
|
||||
.filter_map(ast::NameRef::cast)
|
||||
.filter(|name_ref| match self.resolve_local_name(*name_ref) {
|
||||
None => false,
|
||||
Some(entry) => entry.ptr() == ptr,
|
||||
})
|
||||
.map(|name_ref| ReferenceDescriptor {
|
||||
name: name_ref.text().to_string(),
|
||||
range: name_ref.syntax().range(),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn body_source_map(&self) -> Arc<BodySourceMap> {
|
||||
self.body_source_map.clone().unwrap()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn inference_result(&self) -> Arc<crate::ty::InferenceResult> {
|
||||
self.infer.clone().unwrap()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn scopes(&self) -> Arc<ExprScopes> {
|
||||
self.scopes.clone().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
fn scope_for(
|
||||
scopes: &ExprScopes,
|
||||
source_map: &BodySourceMap,
|
||||
node: &SyntaxNode,
|
||||
) -> Option<ScopeId> {
|
||||
node.ancestors()
|
||||
.map(SyntaxNodePtr::new)
|
||||
.filter_map(|ptr| source_map.syntax_expr(ptr))
|
||||
.find_map(|it| scopes.scope_for(it))
|
||||
}
|
||||
|
||||
fn scope_for_offset(
|
||||
scopes: &ExprScopes,
|
||||
source_map: &BodySourceMap,
|
||||
offset: TextUnit,
|
||||
) -> Option<ScopeId> {
|
||||
scopes
|
||||
.scope_by_expr()
|
||||
.iter()
|
||||
.filter_map(|(id, scope)| Some((source_map.expr_syntax(*id)?, scope)))
|
||||
// find containing scope
|
||||
.min_by_key(|(ptr, _scope)| {
|
||||
(!(ptr.range().start() <= offset && offset <= ptr.range().end()), ptr.range().len())
|
||||
})
|
||||
.map(|(ptr, scope)| adjust(scopes, source_map, ptr, offset).unwrap_or(*scope))
|
||||
}
|
||||
|
||||
// XXX: during completion, cursor might be outside of any particular
|
||||
// expression. Try to figure out the correct scope...
|
||||
fn adjust(
|
||||
scopes: &ExprScopes,
|
||||
source_map: &BodySourceMap,
|
||||
ptr: SyntaxNodePtr,
|
||||
offset: TextUnit,
|
||||
) -> Option<ScopeId> {
|
||||
let r = ptr.range();
|
||||
let child_scopes = scopes
|
||||
.scope_by_expr()
|
||||
.iter()
|
||||
.filter_map(|(id, scope)| Some((source_map.expr_syntax(*id)?, scope)))
|
||||
.map(|(ptr, scope)| (ptr.range(), scope))
|
||||
.filter(|(range, _)| range.start() <= offset && range.is_subrange(&r) && *range != r);
|
||||
|
||||
child_scopes
|
||||
.max_by(|(r1, _), (r2, _)| {
|
||||
if r2.is_subrange(&r1) {
|
||||
std::cmp::Ordering::Greater
|
||||
} else if r1.is_subrange(&r2) {
|
||||
std::cmp::Ordering::Less
|
||||
} else {
|
||||
r1.start().cmp(&r2.start())
|
||||
}
|
||||
})
|
||||
.map(|(_ptr, scope)| *scope)
|
||||
}
|
||||
|
@ -15,11 +15,12 @@ use std::sync::Arc;
|
||||
use std::{fmt, mem};
|
||||
|
||||
use crate::{Name, AdtDef, type_ref::Mutability, db::HirDatabase, Trait};
|
||||
|
||||
pub(crate) use lower::{TypableDef, CallableDef, type_for_def, type_for_field, callable_item_sig};
|
||||
pub(crate) use infer::{infer, InferenceResult, InferTy};
|
||||
use display::{HirDisplay, HirFormatter};
|
||||
|
||||
pub(crate) use lower::{TypableDef, type_for_def, type_for_field, callable_item_sig};
|
||||
pub(crate) use infer::{infer, InferenceResult, InferTy};
|
||||
pub use lower::CallableDef;
|
||||
|
||||
/// A type constructor or type name: this might be something like the primitive
|
||||
/// type `bool`, a struct like `Vec`, or things like function pointers or
|
||||
/// tuples.
|
||||
@ -288,6 +289,15 @@ impl Ty {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_callable(&self) -> Option<(CallableDef, &Substs)> {
|
||||
match self {
|
||||
Ty::Apply(ApplicationTy { ctor: TypeCtor::FnDef(callable_def), parameters }) => {
|
||||
Some((*callable_def, parameters))
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn builtin_deref(&self) -> Option<Ty> {
|
||||
match self {
|
||||
Ty::Apply(a_ty) => match a_ty.ctor {
|
||||
|
@ -135,7 +135,7 @@ fn def_crate(db: &impl HirDatabase, ty: &Ty) -> Option<Crate> {
|
||||
impl Ty {
|
||||
/// Look up the method with the given name, returning the actual autoderefed
|
||||
/// receiver type (but without autoref applied yet).
|
||||
pub fn lookup_method(
|
||||
pub(crate) fn lookup_method(
|
||||
self,
|
||||
db: &impl HirDatabase,
|
||||
name: &Name,
|
||||
|
@ -4,15 +4,15 @@ use std::fmt::Write;
|
||||
use insta::assert_snapshot_matches;
|
||||
|
||||
use ra_db::{SourceDatabase, salsa::Database, FilePosition};
|
||||
use ra_syntax::{algo, ast::{self, AstNode}};
|
||||
use ra_syntax::{algo, ast::{self, AstNode}, SyntaxKind::*};
|
||||
use test_utils::covers;
|
||||
|
||||
use crate::{
|
||||
source_binder,
|
||||
mock::MockDatabase,
|
||||
ty::display::HirDisplay,
|
||||
ty::InferenceResult,
|
||||
expr::BodySourceMap
|
||||
expr::BodySourceMap,
|
||||
SourceAnalyzer,
|
||||
};
|
||||
|
||||
// These tests compare the inference results for all expressions in a file
|
||||
@ -1862,14 +1862,14 @@ fn test() {
|
||||
@r###"
|
||||
[49; 50) '0': u32
|
||||
[80; 83) '101': u32
|
||||
[126; 128) '99': u32
|
||||
[95; 213) '{ ...NST; }': ()
|
||||
[138; 139) 'x': {unknown}
|
||||
[142; 153) 'LOCAL_CONST': {unknown}
|
||||
[163; 164) 'z': u32
|
||||
[167; 179) 'GLOBAL_CONST': u32
|
||||
[189; 191) 'id': u32
|
||||
[194; 210) 'Foo::A..._CONST': u32"###
|
||||
[194; 210) 'Foo::A..._CONST': u32
|
||||
[126; 128) '99': u32"###
|
||||
);
|
||||
}
|
||||
|
||||
@ -1891,8 +1891,6 @@ fn test() {
|
||||
@r###"
|
||||
[29; 32) '101': u32
|
||||
[70; 73) '101': u32
|
||||
[118; 120) '99': u32
|
||||
[161; 163) '99': u32
|
||||
[85; 280) '{ ...MUT; }': ()
|
||||
[173; 174) 'x': {unknown}
|
||||
[177; 189) 'LOCAL_STATIC': {unknown}
|
||||
@ -1901,7 +1899,9 @@ fn test() {
|
||||
[229; 230) 'z': u32
|
||||
[233; 246) 'GLOBAL_STATIC': u32
|
||||
[256; 257) 'w': u32
|
||||
[260; 277) 'GLOBAL...IC_MUT': u32"###
|
||||
[260; 277) 'GLOBAL...IC_MUT': u32
|
||||
[118; 120) '99': u32
|
||||
[161; 163) '99': u32"###
|
||||
);
|
||||
}
|
||||
|
||||
@ -2302,13 +2302,10 @@ fn test() -> u64 {
|
||||
}
|
||||
|
||||
fn type_at_pos(db: &MockDatabase, pos: FilePosition) -> String {
|
||||
let func = source_binder::function_from_position(db, pos).unwrap();
|
||||
let body_source_map = func.body_source_map(db);
|
||||
let inference_result = func.infer(db);
|
||||
let (_, syntax) = func.source(db);
|
||||
let node = algo::find_node_at_offset::<ast::Expr>(syntax.syntax(), pos.offset).unwrap();
|
||||
let expr = body_source_map.node_expr(node).unwrap();
|
||||
let ty = &inference_result[expr];
|
||||
let file = db.parse(pos.file_id);
|
||||
let expr = algo::find_node_at_offset::<ast::Expr>(file.syntax(), pos.offset).unwrap();
|
||||
let analyzer = SourceAnalyzer::new(db, pos.file_id, expr.syntax(), Some(pos.offset));
|
||||
let ty = analyzer.type_of(db, expr).unwrap();
|
||||
ty.display(db).to_string()
|
||||
}
|
||||
|
||||
@ -2350,25 +2347,11 @@ fn infer(content: &str) -> String {
|
||||
}
|
||||
};
|
||||
|
||||
for const_def in source_file.syntax().descendants().filter_map(ast::ConstDef::cast) {
|
||||
let konst = source_binder::const_from_source(&db, file_id, const_def).unwrap();
|
||||
let inference_result = konst.infer(&db);
|
||||
let body_source_map = konst.body_source_map(&db);
|
||||
infer_def(inference_result, body_source_map)
|
||||
}
|
||||
|
||||
for static_def in source_file.syntax().descendants().filter_map(ast::StaticDef::cast) {
|
||||
let static_ = source_binder::static_from_source(&db, file_id, static_def).unwrap();
|
||||
let inference_result = static_.infer(&db);
|
||||
let body_source_map = static_.body_source_map(&db);
|
||||
infer_def(inference_result, body_source_map)
|
||||
}
|
||||
|
||||
for fn_def in source_file.syntax().descendants().filter_map(ast::FnDef::cast) {
|
||||
let func = source_binder::function_from_source(&db, file_id, fn_def).unwrap();
|
||||
let inference_result = func.infer(&db);
|
||||
let body_source_map = func.body_source_map(&db);
|
||||
infer_def(inference_result, body_source_map)
|
||||
for node in source_file.syntax().descendants() {
|
||||
if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF {
|
||||
let analyzer = SourceAnalyzer::new(&db, file_id, node, None);
|
||||
infer_def(analyzer.inference_result(), analyzer.body_source_map());
|
||||
}
|
||||
}
|
||||
|
||||
acc.truncate(acc.trim_end().len());
|
||||
@ -2403,10 +2386,12 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
|
||||
}
|
||||
",
|
||||
);
|
||||
let func = source_binder::function_from_position(&db, pos).unwrap();
|
||||
{
|
||||
let file = db.parse(pos.file_id);
|
||||
let node =
|
||||
algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent();
|
||||
let events = db.log_executed(|| {
|
||||
func.infer(&db);
|
||||
SourceAnalyzer::new(&db, pos.file_id, node, None);
|
||||
});
|
||||
assert!(format!("{:?}", events).contains("infer"))
|
||||
}
|
||||
@ -2423,8 +2408,11 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
|
||||
db.query_mut(ra_db::FileTextQuery).set(pos.file_id, Arc::new(new_text));
|
||||
|
||||
{
|
||||
let file = db.parse(pos.file_id);
|
||||
let node =
|
||||
algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent();
|
||||
let events = db.log_executed(|| {
|
||||
func.infer(&db);
|
||||
SourceAnalyzer::new(&db, pos.file_id, node, None);
|
||||
});
|
||||
assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events)
|
||||
}
|
||||
|
@ -2,7 +2,6 @@ use test_utils::tested_by;
|
||||
use ra_db::SourceDatabase;
|
||||
use ra_syntax::{
|
||||
AstNode, SyntaxNode, TextUnit,
|
||||
SyntaxKind::FN_DEF,
|
||||
ast::{self, ArgListOwner},
|
||||
algo::find_node_at_offset,
|
||||
};
|
||||
@ -18,19 +17,26 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal
|
||||
let calling_node = FnCallNode::with_node(syntax, position.offset)?;
|
||||
let name_ref = calling_node.name_ref()?;
|
||||
|
||||
// Resolve the function's NameRef (NOTE: this isn't entirely accurate).
|
||||
let file_symbols = crate::symbol_index::index_resolve(db, name_ref);
|
||||
let symbol = file_symbols.into_iter().find(|it| it.ptr.kind() == FN_DEF)?;
|
||||
let fn_file = db.parse(symbol.file_id);
|
||||
let fn_def = symbol.ptr.to_node(&fn_file);
|
||||
let fn_def = ast::FnDef::cast(fn_def).unwrap();
|
||||
let function = hir::source_binder::function_from_source(db, symbol.file_id, fn_def)?;
|
||||
let analyzer = hir::SourceAnalyzer::new(db, position.file_id, name_ref.syntax(), None);
|
||||
let function = match calling_node {
|
||||
FnCallNode::CallExpr(expr) => {
|
||||
//FIXME: apply subst
|
||||
let (callable_def, _subst) =
|
||||
analyzer.type_of(db, expr.expr()?.into())?.as_callable()?;
|
||||
match callable_def {
|
||||
hir::CallableDef::Function(it) => it,
|
||||
//FIXME: handle other callables
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
FnCallNode::MethodCallExpr(expr) => analyzer.resolve_method_call(expr)?,
|
||||
};
|
||||
|
||||
let mut call_info = CallInfo::new(db, function);
|
||||
|
||||
// If we have a calling expression let's find which argument we are on
|
||||
let num_params = call_info.parameters().len();
|
||||
let has_self = fn_def.param_list().and_then(|l| l.self_param()).is_some();
|
||||
let has_self = function.signature(db).has_self_param();
|
||||
|
||||
if num_params == 1 {
|
||||
if !has_self {
|
||||
@ -74,7 +80,7 @@ enum FnCallNode<'a> {
|
||||
}
|
||||
|
||||
impl<'a> FnCallNode<'a> {
|
||||
pub fn with_node(syntax: &'a SyntaxNode, offset: TextUnit) -> Option<FnCallNode<'a>> {
|
||||
fn with_node(syntax: &'a SyntaxNode, offset: TextUnit) -> Option<FnCallNode<'a>> {
|
||||
if let Some(expr) = find_node_at_offset::<ast::CallExpr>(syntax, offset) {
|
||||
return Some(FnCallNode::CallExpr(expr));
|
||||
}
|
||||
@ -84,7 +90,7 @@ impl<'a> FnCallNode<'a> {
|
||||
None
|
||||
}
|
||||
|
||||
pub fn name_ref(&self) -> Option<&'a ast::NameRef> {
|
||||
fn name_ref(&self) -> Option<&'a ast::NameRef> {
|
||||
match *self {
|
||||
FnCallNode::CallExpr(call_expr) => Some(match call_expr.expr()?.kind() {
|
||||
ast::ExprKind::PathExpr(path_expr) => path_expr.path()?.segment()?.name_ref()?,
|
||||
@ -97,7 +103,7 @@ impl<'a> FnCallNode<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn arg_list(&self) -> Option<&'a ast::ArgList> {
|
||||
fn arg_list(&self) -> Option<&'a ast::ArgList> {
|
||||
match *self {
|
||||
FnCallNode::CallExpr(expr) => expr.arg_list(),
|
||||
FnCallNode::MethodCallExpr(expr) => expr.arg_list(),
|
||||
@ -142,7 +148,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fn_signature_two_args_first() {
|
||||
fn test_fn_signature_two_args_firstx() {
|
||||
let info = call_info(
|
||||
r#"fn foo(x: u32, y: u32) -> u32 {x + y}
|
||||
fn bar() { foo(<|>3, ); }"#,
|
||||
@ -382,11 +388,9 @@ assert_eq!(6, my_crate::add_one(5));
|
||||
fn test_fn_signature_with_docs_from_actix() {
|
||||
let info = call_info(
|
||||
r#"
|
||||
pub trait WriteHandler<E>
|
||||
where
|
||||
Self: Actor,
|
||||
Self::Context: ActorContext,
|
||||
{
|
||||
struct WriteHandler<E>;
|
||||
|
||||
impl<E> WriteHandler<E> {
|
||||
/// Method is called when writer emits error.
|
||||
///
|
||||
/// If this method returns `ErrorAction::Continue` writer processing
|
||||
@ -403,8 +407,7 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
pub fn foo() {
|
||||
WriteHandler r;
|
||||
pub fn foo(mut r: WriteHandler<()>) {
|
||||
r.finished(<|>);
|
||||
}
|
||||
|
||||
|
@ -4,17 +4,10 @@ use crate::completion::{CompletionContext, Completions};
|
||||
|
||||
/// Complete dot accesses, i.e. fields or methods (currently only fields).
|
||||
pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) {
|
||||
let (function, receiver) = match (&ctx.function, ctx.dot_receiver) {
|
||||
(Some(function), Some(receiver)) => (function, receiver),
|
||||
_ => return,
|
||||
};
|
||||
let infer_result = function.infer(ctx.db);
|
||||
let source_map = function.body_source_map(ctx.db);
|
||||
let expr = match source_map.node_expr(receiver) {
|
||||
Some(expr) => expr,
|
||||
let receiver_ty = match ctx.dot_receiver.and_then(|it| ctx.analyzer.type_of(ctx.db, it)) {
|
||||
Some(it) => it,
|
||||
None => return,
|
||||
};
|
||||
let receiver_ty = infer_result[expr].clone();
|
||||
if !ctx.is_call {
|
||||
complete_fields(acc, ctx, receiver_ty.clone());
|
||||
}
|
||||
@ -312,6 +305,30 @@ mod tests {
|
||||
kind: Method,
|
||||
detail: "pub fn blah(&self)"
|
||||
}
|
||||
]"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_completion_works_in_consts() {
|
||||
assert_debug_snapshot_matches!(
|
||||
do_ref_completion(
|
||||
r"
|
||||
struct A { the_field: u32 }
|
||||
const X: u32 = {
|
||||
A { the_field: 92 }.<|>
|
||||
};
|
||||
",
|
||||
),
|
||||
@r###"[
|
||||
CompletionItem {
|
||||
label: "the_field",
|
||||
source_range: [106; 106),
|
||||
delete: [106; 106),
|
||||
insert: "the_field",
|
||||
kind: Field,
|
||||
detail: "u32"
|
||||
}
|
||||
]"###
|
||||
);
|
||||
}
|
||||
|
@ -9,7 +9,7 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
|
||||
Some(path) => path.clone(),
|
||||
_ => return,
|
||||
};
|
||||
let def = match ctx.resolver.resolve_path(ctx.db, &path).take_types() {
|
||||
let def = match ctx.analyzer.resolve_hir_path(ctx.db, &path).take_types() {
|
||||
Some(Resolution::Def(def)) => def,
|
||||
_ => return,
|
||||
};
|
||||
|
@ -7,7 +7,7 @@ pub(super) fn complete_pattern(acc: &mut Completions, ctx: &CompletionContext) {
|
||||
}
|
||||
// FIXME: ideally, we should look at the type we are matching against and
|
||||
// suggest variants + auto-imports
|
||||
let names = ctx.resolver.all_names(ctx.db);
|
||||
let names = ctx.analyzer.all_names(ctx.db);
|
||||
for (name, res) in names.into_iter() {
|
||||
let r = res.as_ref();
|
||||
let def = match r.take_types().or(r.take_values()) {
|
||||
|
@ -4,7 +4,7 @@ pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) {
|
||||
if !ctx.is_trivial_path {
|
||||
return;
|
||||
}
|
||||
let names = ctx.resolver.all_names(ctx.db);
|
||||
let names = ctx.analyzer.all_names(ctx.db);
|
||||
|
||||
names.into_iter().for_each(|(name, res)| acc.add_resolution(ctx, name.to_string(), &res));
|
||||
}
|
||||
|
@ -4,17 +4,10 @@ use crate::completion::{CompletionContext, Completions};
|
||||
|
||||
/// Complete fields in fields literals.
|
||||
pub(super) fn complete_struct_literal(acc: &mut Completions, ctx: &CompletionContext) {
|
||||
let (function, struct_lit) = match (&ctx.function, ctx.struct_lit_syntax) {
|
||||
(Some(function), Some(struct_lit)) => (function, struct_lit),
|
||||
_ => return,
|
||||
};
|
||||
let infer_result = function.infer(ctx.db);
|
||||
let source_map = function.body_source_map(ctx.db);
|
||||
let expr = match source_map.node_expr(struct_lit.into()) {
|
||||
Some(expr) => expr,
|
||||
let ty = match ctx.struct_lit_syntax.and_then(|it| ctx.analyzer.type_of(ctx.db, it.into())) {
|
||||
Some(it) => it,
|
||||
None => return,
|
||||
};
|
||||
let ty = infer_result[expr].clone();
|
||||
let (adt, substs) = match ty.as_adt() {
|
||||
Some(res) => res,
|
||||
_ => return,
|
||||
|
@ -5,7 +5,7 @@ use ra_syntax::{
|
||||
algo::{find_token_at_offset, find_covering_element, find_node_at_offset},
|
||||
SyntaxKind::*,
|
||||
};
|
||||
use hir::{source_binder, Resolver};
|
||||
use hir::source_binder;
|
||||
|
||||
use crate::{db, FilePosition};
|
||||
|
||||
@ -14,11 +14,10 @@ use crate::{db, FilePosition};
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct CompletionContext<'a> {
|
||||
pub(super) db: &'a db::RootDatabase,
|
||||
pub(super) analyzer: hir::SourceAnalyzer,
|
||||
pub(super) offset: TextUnit,
|
||||
pub(super) token: SyntaxToken<'a>,
|
||||
pub(super) resolver: Resolver,
|
||||
pub(super) module: Option<hir::Module>,
|
||||
pub(super) function: Option<hir::Function>,
|
||||
pub(super) function_syntax: Option<&'a ast::FnDef>,
|
||||
pub(super) use_item_syntax: Option<&'a ast::UseItem>,
|
||||
pub(super) struct_lit_syntax: Option<&'a ast::StructLit>,
|
||||
@ -47,16 +46,16 @@ impl<'a> CompletionContext<'a> {
|
||||
original_file: &'a SourceFile,
|
||||
position: FilePosition,
|
||||
) -> Option<CompletionContext<'a>> {
|
||||
let resolver = source_binder::resolver_for_position(db, position);
|
||||
let module = source_binder::module_from_position(db, position);
|
||||
let token = find_token_at_offset(original_file.syntax(), position.offset).left_biased()?;
|
||||
let analyzer =
|
||||
hir::SourceAnalyzer::new(db, position.file_id, token.parent(), Some(position.offset));
|
||||
let mut ctx = CompletionContext {
|
||||
db,
|
||||
analyzer,
|
||||
token,
|
||||
offset: position.offset,
|
||||
resolver,
|
||||
module,
|
||||
function: None,
|
||||
function_syntax: None,
|
||||
use_item_syntax: None,
|
||||
struct_lit_syntax: None,
|
||||
@ -147,10 +146,6 @@ impl<'a> CompletionContext<'a> {
|
||||
.ancestors()
|
||||
.take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE)
|
||||
.find_map(ast::FnDef::cast);
|
||||
if let (Some(module), Some(fn_def)) = (self.module, self.function_syntax) {
|
||||
let function = source_binder::function_from_module(self.db, module, fn_def);
|
||||
self.function = Some(function);
|
||||
}
|
||||
|
||||
let parent = match name_ref.syntax().parent() {
|
||||
Some(it) => it,
|
||||
|
@ -1,11 +1,11 @@
|
||||
use ra_db::{FileId, SourceDatabase};
|
||||
use ra_syntax::{
|
||||
SyntaxNode, SyntaxNodePtr, AstNode, SmolStr, TextRange, TreeArc,
|
||||
SyntaxNode, AstNode, SmolStr, TextRange, TreeArc, AstPtr,
|
||||
SyntaxKind::{self, NAME},
|
||||
ast::{self, NameOwner, VisibilityOwner, TypeAscriptionOwner},
|
||||
algo::visit::{visitor, Visitor},
|
||||
};
|
||||
use hir::{ModuleSource, FieldSource, Name, ImplItem};
|
||||
use hir::{ModuleSource, FieldSource, ImplItem, Either};
|
||||
|
||||
use crate::{FileSymbol, db::RootDatabase};
|
||||
|
||||
@ -74,15 +74,25 @@ impl NavigationTarget {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn from_scope_entry(
|
||||
pub(crate) fn from_pat(
|
||||
db: &RootDatabase,
|
||||
file_id: FileId,
|
||||
name: Name,
|
||||
ptr: SyntaxNodePtr,
|
||||
pat: Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>,
|
||||
) -> NavigationTarget {
|
||||
let file = db.parse(file_id);
|
||||
let (name, full_range) = match pat {
|
||||
Either::A(pat) => match pat.to_node(&file).kind() {
|
||||
ast::PatKind::BindPat(pat) => {
|
||||
return NavigationTarget::from_bind_pat(file_id, &pat)
|
||||
}
|
||||
_ => ("_".into(), pat.syntax_node_ptr().range()),
|
||||
},
|
||||
Either::B(slf) => ("self".into(), slf.syntax_node_ptr().range()),
|
||||
};
|
||||
NavigationTarget {
|
||||
file_id,
|
||||
name: name.to_string().into(),
|
||||
full_range: ptr.range(),
|
||||
name,
|
||||
full_range,
|
||||
focus_range: None,
|
||||
kind: NAME,
|
||||
container_name: None,
|
||||
@ -229,6 +239,7 @@ impl NavigationTarget {
|
||||
|
||||
/// Allows `NavigationTarget` to be created from a `NameOwner`
|
||||
pub(crate) fn from_named(file_id: FileId, node: &impl ast::NameOwner) -> NavigationTarget {
|
||||
//FIXME: use `_` instead of empty string
|
||||
let name = node.name().map(|it| it.text().clone()).unwrap_or_default();
|
||||
let focus_range = node.name().map(|it| it.syntax().range());
|
||||
NavigationTarget::from_syntax(file_id, name, focus_range, node.syntax())
|
||||
|
@ -5,7 +5,6 @@ use ra_syntax::{
|
||||
SyntaxNode,
|
||||
};
|
||||
use test_utils::tested_by;
|
||||
use hir::Resolution;
|
||||
|
||||
use crate::{FilePosition, NavigationTarget, db::RootDatabase, RangeInfo};
|
||||
|
||||
@ -48,127 +47,70 @@ pub(crate) fn reference_definition(
|
||||
) -> ReferenceResult {
|
||||
use self::ReferenceResult::*;
|
||||
|
||||
let function = hir::source_binder::function_from_child_node(db, file_id, name_ref.syntax());
|
||||
let analyzer = hir::SourceAnalyzer::new(db, file_id, name_ref.syntax(), None);
|
||||
|
||||
if let Some(function) = function {
|
||||
// Check if it is a method
|
||||
if let Some(method_call) = name_ref.syntax().parent().and_then(ast::MethodCallExpr::cast) {
|
||||
tested_by!(goto_definition_works_for_methods);
|
||||
let infer_result = function.infer(db);
|
||||
let source_map = function.body_source_map(db);
|
||||
let expr = ast::Expr::cast(method_call.syntax()).unwrap();
|
||||
if let Some(func) =
|
||||
source_map.node_expr(expr).and_then(|it| infer_result.method_resolution(it))
|
||||
{
|
||||
return Exact(NavigationTarget::from_function(db, func));
|
||||
};
|
||||
}
|
||||
// It could also be a field access
|
||||
if let Some(field_expr) = name_ref.syntax().parent().and_then(ast::FieldExpr::cast) {
|
||||
tested_by!(goto_definition_works_for_fields);
|
||||
let infer_result = function.infer(db);
|
||||
let source_map = function.body_source_map(db);
|
||||
let expr = ast::Expr::cast(field_expr.syntax()).unwrap();
|
||||
if let Some(field) =
|
||||
source_map.node_expr(expr).and_then(|it| infer_result.field_resolution(it))
|
||||
{
|
||||
return Exact(NavigationTarget::from_field(db, field));
|
||||
};
|
||||
// Special cases:
|
||||
|
||||
// Check if it is a method
|
||||
if let Some(method_call) = name_ref.syntax().parent().and_then(ast::MethodCallExpr::cast) {
|
||||
tested_by!(goto_definition_works_for_methods);
|
||||
if let Some(func) = analyzer.resolve_method_call(method_call) {
|
||||
return Exact(NavigationTarget::from_function(db, func));
|
||||
}
|
||||
}
|
||||
// It could also be a field access
|
||||
if let Some(field_expr) = name_ref.syntax().parent().and_then(ast::FieldExpr::cast) {
|
||||
tested_by!(goto_definition_works_for_fields);
|
||||
if let Some(field) = analyzer.resolve_field(field_expr) {
|
||||
return Exact(NavigationTarget::from_field(db, field));
|
||||
};
|
||||
}
|
||||
|
||||
// It could also be a named field
|
||||
if let Some(field_expr) = name_ref.syntax().parent().and_then(ast::NamedField::cast) {
|
||||
tested_by!(goto_definition_works_for_named_fields);
|
||||
// It could also be a named field
|
||||
if let Some(field_expr) = name_ref.syntax().parent().and_then(ast::NamedField::cast) {
|
||||
tested_by!(goto_definition_works_for_named_fields);
|
||||
|
||||
let infer_result = function.infer(db);
|
||||
let source_map = function.body_source_map(db);
|
||||
let struct_lit = field_expr.syntax().ancestors().find_map(ast::StructLit::cast);
|
||||
|
||||
let struct_lit = field_expr.syntax().ancestors().find_map(ast::StructLit::cast);
|
||||
if let Some(ty) = struct_lit.and_then(|lit| analyzer.type_of(db, lit.into())) {
|
||||
if let Some((hir::AdtDef::Struct(s), _)) = ty.as_adt() {
|
||||
let hir_path = hir::Path::from_name_ref(name_ref);
|
||||
let hir_name = hir_path.as_ident().unwrap();
|
||||
|
||||
if let Some(expr) = struct_lit.and_then(|lit| source_map.node_expr(lit.into())) {
|
||||
let ty = infer_result[expr].clone();
|
||||
if let Some((hir::AdtDef::Struct(s), _)) = ty.as_adt() {
|
||||
let hir_path = hir::Path::from_name_ref(name_ref);
|
||||
let hir_name = hir_path.as_ident().unwrap();
|
||||
|
||||
if let Some(field) = s.field(db, hir_name) {
|
||||
return Exact(NavigationTarget::from_field(db, field));
|
||||
}
|
||||
if let Some(field) = s.field(db, hir_name) {
|
||||
return Exact(NavigationTarget::from_field(db, field));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try name resolution
|
||||
let resolver = hir::source_binder::resolver_for_node(db, file_id, name_ref.syntax());
|
||||
if let Some(path) =
|
||||
name_ref.syntax().ancestors().find_map(ast::Path::cast).and_then(hir::Path::from_ast)
|
||||
{
|
||||
let resolved = resolver.resolve_path(db, &path);
|
||||
match resolved.clone().take_types().or_else(|| resolved.take_values()) {
|
||||
Some(Resolution::Def(def)) => return Exact(NavigationTarget::from_def(db, def)),
|
||||
Some(Resolution::LocalBinding(pat)) => {
|
||||
let body = resolver.body().expect("no body for local binding");
|
||||
let source_map = body.owner().body_source_map(db);
|
||||
let ptr = source_map.pat_syntax(pat).expect("pattern not found in syntax mapping");
|
||||
let name =
|
||||
path.as_ident().cloned().expect("local binding from a multi-segment path");
|
||||
let ptr = ptr.either(|it| it.into(), |it| it.into());
|
||||
let nav = NavigationTarget::from_scope_entry(file_id, name, ptr);
|
||||
return Exact(nav);
|
||||
}
|
||||
Some(Resolution::GenericParam(..)) => {
|
||||
// FIXME: go to the generic param def
|
||||
}
|
||||
Some(Resolution::SelfType(impl_block)) => {
|
||||
let ty = impl_block.target_ty(db);
|
||||
|
||||
if let Some((def_id, _)) = ty.as_adt() {
|
||||
return Exact(NavigationTarget::from_adt_def(db, def_id));
|
||||
// General case, a path or a local:
|
||||
if let Some(path) = name_ref.syntax().ancestors().find_map(ast::Path::cast) {
|
||||
if let Some(resolved) = analyzer.resolve_path(db, path) {
|
||||
match resolved {
|
||||
hir::PathResolution::Def(def) => return Exact(NavigationTarget::from_def(db, def)),
|
||||
hir::PathResolution::LocalBinding(pat) => {
|
||||
let nav = NavigationTarget::from_pat(db, file_id, pat);
|
||||
return Exact(nav);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
// If we failed to resolve then check associated items
|
||||
if let Some(function) = function {
|
||||
// Resolve associated item for path expressions
|
||||
if let Some(path_expr) =
|
||||
name_ref.syntax().ancestors().find_map(ast::PathExpr::cast)
|
||||
{
|
||||
let infer_result = function.infer(db);
|
||||
let source_map = function.body_source_map(db);
|
||||
hir::PathResolution::GenericParam(..) => {
|
||||
// FIXME: go to the generic param def
|
||||
}
|
||||
hir::PathResolution::SelfType(impl_block) => {
|
||||
let ty = impl_block.target_ty(db);
|
||||
|
||||
if let Some(expr) = ast::Expr::cast(path_expr.syntax()) {
|
||||
if let Some(res) = source_map
|
||||
.node_expr(expr)
|
||||
.and_then(|it| infer_result.assoc_resolutions_for_expr(it.into()))
|
||||
{
|
||||
return Exact(NavigationTarget::from_impl_item(db, res));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Resolve associated item for path patterns
|
||||
if let Some(path_pat) =
|
||||
name_ref.syntax().ancestors().find_map(ast::PathPat::cast)
|
||||
{
|
||||
let infer_result = function.infer(db);
|
||||
let source_map = function.body_source_map(db);
|
||||
|
||||
let pat: &ast::Pat = path_pat.into();
|
||||
|
||||
if let Some(res) = source_map
|
||||
.node_pat(pat)
|
||||
.and_then(|it| infer_result.assoc_resolutions_for_pat(it.into()))
|
||||
{
|
||||
return Exact(NavigationTarget::from_impl_item(db, res));
|
||||
}
|
||||
if let Some((def_id, _)) = ty.as_adt() {
|
||||
return Exact(NavigationTarget::from_adt_def(db, def_id));
|
||||
}
|
||||
}
|
||||
hir::PathResolution::AssocItem(assoc) => {
|
||||
return Exact(NavigationTarget::from_impl_item(db, assoc))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If that fails try the index based approach.
|
||||
// Fallback index based approach:
|
||||
let navs = crate::symbol_index::index_resolve(db, name_ref)
|
||||
.into_iter()
|
||||
.map(NavigationTarget::from_symbol)
|
||||
|
@ -132,17 +132,15 @@ pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> {
|
||||
.ancestors()
|
||||
.take_while(|it| it.range() == leaf_node.range())
|
||||
.find(|&it| ast::Expr::cast(it).is_some() || ast::Pat::cast(it).is_some())?;
|
||||
let parent_fn = node.ancestors().find_map(ast::FnDef::cast)?;
|
||||
let function = hir::source_binder::function_from_source(db, frange.file_id, parent_fn)?;
|
||||
let infer = function.infer(db);
|
||||
let source_map = function.body_source_map(db);
|
||||
if let Some(expr) = ast::Expr::cast(node).and_then(|e| source_map.node_expr(e)) {
|
||||
Some(infer[expr].display(db).to_string())
|
||||
} else if let Some(pat) = ast::Pat::cast(node).and_then(|p| source_map.node_pat(p)) {
|
||||
Some(infer[pat].display(db).to_string())
|
||||
let analyzer = hir::SourceAnalyzer::new(db, frange.file_id, node, None);
|
||||
let ty = if let Some(ty) = ast::Expr::cast(node).and_then(|e| analyzer.type_of(db, e)) {
|
||||
ty
|
||||
} else if let Some(ty) = ast::Pat::cast(node).and_then(|p| analyzer.type_of_pat(db, p)) {
|
||||
ty
|
||||
} else {
|
||||
None
|
||||
}
|
||||
return None;
|
||||
};
|
||||
Some(ty.display(db).to_string())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -61,11 +61,10 @@ pub(crate) fn find_all_refs(
|
||||
position: FilePosition,
|
||||
) -> Option<ReferenceSearchResult> {
|
||||
let file = db.parse(position.file_id);
|
||||
let (binding, descr) = find_binding(db, &file, position)?;
|
||||
let (binding, analyzer) = find_binding(db, &file, position)?;
|
||||
let declaration = NavigationTarget::from_bind_pat(position.file_id, binding);
|
||||
|
||||
let references = descr
|
||||
.scopes(db)
|
||||
let references = analyzer
|
||||
.find_all_refs(binding)
|
||||
.into_iter()
|
||||
.map(move |ref_desc| FileRange { file_id: position.file_id, range: ref_desc.range })
|
||||
@ -77,21 +76,18 @@ pub(crate) fn find_all_refs(
|
||||
db: &RootDatabase,
|
||||
source_file: &'a SourceFile,
|
||||
position: FilePosition,
|
||||
) -> Option<(&'a ast::BindPat, hir::Function)> {
|
||||
) -> Option<(&'a ast::BindPat, hir::SourceAnalyzer)> {
|
||||
let syntax = source_file.syntax();
|
||||
if let Some(binding) = find_node_at_offset::<ast::BindPat>(syntax, position.offset) {
|
||||
let descr =
|
||||
source_binder::function_from_child_node(db, position.file_id, binding.syntax())?;
|
||||
return Some((binding, descr));
|
||||
let analyzer = hir::SourceAnalyzer::new(db, position.file_id, binding.syntax(), None);
|
||||
return Some((binding, analyzer));
|
||||
};
|
||||
let name_ref = find_node_at_offset::<ast::NameRef>(syntax, position.offset)?;
|
||||
let descr =
|
||||
source_binder::function_from_child_node(db, position.file_id, name_ref.syntax())?;
|
||||
let scope = descr.scopes(db);
|
||||
let resolved = scope.resolve_local_name(name_ref)?;
|
||||
let analyzer = hir::SourceAnalyzer::new(db, position.file_id, name_ref.syntax(), None);
|
||||
let resolved = analyzer.resolve_local_name(name_ref)?;
|
||||
if let Either::A(ptr) = resolved.ptr() {
|
||||
if let ast::PatKind::BindPat(binding) = ptr.to_node(source_file).kind() {
|
||||
return Some((binding, descr));
|
||||
return Some((binding, analyzer));
|
||||
}
|
||||
}
|
||||
None
|
||||
|
@ -65,7 +65,6 @@ fn runnable_mod(db: &RootDatabase, file_id: FileId, module: &ast::Module) -> Opt
|
||||
let range = module.syntax().range();
|
||||
let module = hir::source_binder::module_from_child_node(db, file_id, module.syntax())?;
|
||||
|
||||
// FIXME: thread cancellation instead of `.ok`ing
|
||||
let path = module.path_to_root(db).into_iter().rev().filter_map(|it| it.name(db)).join("::");
|
||||
Some(Runnable { range, kind: RunnableKind::TestMod { path } })
|
||||
}
|
||||
|
@ -60,7 +60,7 @@ fn reparse_fuzz_tests() {
|
||||
}
|
||||
}
|
||||
|
||||
/// Test that Rust-analyzer can parse and validate the rust-analyser
|
||||
/// Test that Rust-analyzer can parse and validate the rust-analyzer
|
||||
/// FIXME: Use this as a benchmark
|
||||
#[test]
|
||||
fn self_hosting_parsing() {
|
||||
|
Loading…
x
Reference in New Issue
Block a user