This commit is contained in:
Florian Diebold 2018-12-23 12:15:46 +01:00
parent 7348f7883f
commit 515c3bc59b
4 changed files with 110 additions and 108 deletions

View File

@ -516,8 +516,14 @@ pub fn type_of(&self, file_id: FileId, range: TextRange) -> Cancelable<Option<St
let syntax = file.syntax();
let node = find_covering_node(syntax, range);
let parent_fn = node.ancestors().filter_map(FnDef::cast).next();
let parent_fn = if let Some(p) = parent_fn { p } else { return Ok(None) };
let function = ctry!(source_binder::function_from_source(&*self.db, file_id, parent_fn)?);
let parent_fn = if let Some(p) = parent_fn {
p
} else {
return Ok(None);
};
let function = ctry!(source_binder::function_from_source(
&*self.db, file_id, parent_fn
)?);
let infer = function.infer(&*self.db);
Ok(infer.type_of_node(node).map(|t| t.to_string()))
}

View File

@ -2,24 +2,21 @@
#[cfg(test)]
mod tests;
use rustc_hash::{FxHashMap, FxHashSet};
use std::sync::Arc;
use std::collections::HashMap;
use std::fmt;
use rustc_hash::{FxHashMap};
use ra_db::LocalSyntaxPtr;
use ra_syntax::{
TextRange, TextUnit, SmolStr,
algo::visit::{visitor, Visitor},
ast::{self, AstNode, DocCommentsOwner, NameOwner, LoopBodyOwner, ArgListOwner},
SmolStr,
ast::{self, AstNode, LoopBodyOwner, ArgListOwner},
SyntaxNodeRef
};
use crate::{
FnScopes,
db::HirDatabase,
arena::{Arena, Id},
};
// pub(crate) type TypeId = Id<Ty>;
@ -150,9 +147,17 @@ pub fn new(node: ast::TypeRef) -> Self {
TupleType(_inner) => Ty::Unknown, // TODO
NeverType(..) => Ty::Never,
PathType(inner) => {
let path = if let Some(p) = inner.path() { p } else { return Ty::Unknown };
let path = if let Some(p) = inner.path() {
p
} else {
return Ty::Unknown;
};
if path.qualifier().is_none() {
let name = path.segment().and_then(|s| s.name_ref()).map(|n| n.text()).unwrap_or(SmolStr::new(""));
let name = path
.segment()
.and_then(|s| s.name_ref())
.map(|n| n.text())
.unwrap_or(SmolStr::new(""));
if let Some(int_ty) = primitive::IntTy::from_string(&name) {
Ty::Int(int_ty)
} else if let Some(uint_ty) = primitive::UintTy::from_string(&name) {
@ -167,16 +172,16 @@ pub fn new(node: ast::TypeRef) -> Self {
// TODO
Ty::Unknown
}
},
PointerType(_inner) => Ty::Unknown, // TODO
ArrayType(_inner) => Ty::Unknown, // TODO
SliceType(_inner) => Ty::Unknown, // TODO
ReferenceType(_inner) => Ty::Unknown, // TODO
}
PointerType(_inner) => Ty::Unknown, // TODO
ArrayType(_inner) => Ty::Unknown, // TODO
SliceType(_inner) => Ty::Unknown, // TODO
ReferenceType(_inner) => Ty::Unknown, // TODO
PlaceholderType(_inner) => Ty::Unknown, // TODO
FnPointerType(_inner) => Ty::Unknown, // TODO
ForType(_inner) => Ty::Unknown, // TODO
ImplTraitType(_inner) => Ty::Unknown, // TODO
DynTraitType(_inner) => Ty::Unknown, // TODO
FnPointerType(_inner) => Ty::Unknown, // TODO
ForType(_inner) => Ty::Unknown, // TODO
ImplTraitType(_inner) => Ty::Unknown, // TODO
DynTraitType(_inner) => Ty::Unknown, // TODO
}
}
@ -203,7 +208,7 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
}
write!(f, ")")
}
Ty::Unknown => write!(f, "[unknown]")
Ty::Unknown => write!(f, "[unknown]"),
}
}
}
@ -230,7 +235,7 @@ impl InferenceContext {
fn new(scopes: Arc<FnScopes>) -> Self {
InferenceContext {
type_for: FxHashMap::default(),
scopes
scopes,
}
}
@ -238,7 +243,7 @@ fn write_ty(&mut self, node: SyntaxNodeRef, ty: Ty) {
self.type_for.insert(LocalSyntaxPtr::new(node), ty);
}
fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
fn unify(&mut self, _ty1: &Ty, _ty2: &Ty) -> bool {
unimplemented!()
}
@ -303,7 +308,7 @@ fn infer_expr(&mut self, expr: ast::Expr) -> Ty {
if let Some(expr) = e.iterable() {
self.infer_expr(expr);
}
if let Some(pat) = e.pat() {
if let Some(_pat) = e.pat() {
// TODO write type for pat
}
if let Some(block) = e.loop_body() {
@ -313,7 +318,7 @@ fn infer_expr(&mut self, expr: ast::Expr) -> Ty {
Ty::Unknown
}
ast::Expr::LambdaExpr(e) => {
let body_ty = if let Some(body) = e.body() {
let _body_ty = if let Some(body) = e.body() {
self.infer_expr(body)
} else {
Ty::Unknown
@ -339,7 +344,7 @@ fn infer_expr(&mut self, expr: ast::Expr) -> Ty {
Ty::Unknown
}
ast::Expr::MatchExpr(e) => {
let ty = if let Some(match_expr) = e.expr() {
let _ty = if let Some(match_expr) = e.expr() {
self.infer_expr(match_expr)
} else {
Ty::Unknown
@ -348,7 +353,7 @@ fn infer_expr(&mut self, expr: ast::Expr) -> Ty {
for arm in match_arm_list.arms() {
// TODO type the bindings in pat
// TODO type the guard
let ty = if let Some(e) = arm.expr() {
let _ty = if let Some(e) = arm.expr() {
self.infer_expr(e)
} else {
Ty::Unknown
@ -360,12 +365,8 @@ fn infer_expr(&mut self, expr: ast::Expr) -> Ty {
Ty::Unknown
}
}
ast::Expr::TupleExpr(e) => {
Ty::Unknown
}
ast::Expr::ArrayExpr(e) => {
Ty::Unknown
}
ast::Expr::TupleExpr(_e) => Ty::Unknown,
ast::Expr::ArrayExpr(_e) => Ty::Unknown,
ast::Expr::PathExpr(e) => {
if let Some(p) = e.path() {
if p.qualifier().is_none() {
@ -392,12 +393,8 @@ fn infer_expr(&mut self, expr: ast::Expr) -> Ty {
Ty::Unknown
}
}
ast::Expr::ContinueExpr(e) => {
Ty::Never
}
ast::Expr::BreakExpr(e) => {
Ty::Never
}
ast::Expr::ContinueExpr(_e) => Ty::Never,
ast::Expr::BreakExpr(_e) => Ty::Never,
ast::Expr::ParenExpr(e) => {
if let Some(e) = e.expr() {
self.infer_expr(e)
@ -405,9 +402,7 @@ fn infer_expr(&mut self, expr: ast::Expr) -> Ty {
Ty::Unknown
}
}
ast::Expr::Label(e) => {
Ty::Unknown
}
ast::Expr::Label(_e) => Ty::Unknown,
ast::Expr::ReturnExpr(e) => {
if let Some(e) = e.expr() {
// TODO unify with return type
@ -419,21 +414,15 @@ fn infer_expr(&mut self, expr: ast::Expr) -> Ty {
// Can this even occur outside of a match expression?
Ty::Unknown
}
ast::Expr::StructLit(e) => {
Ty::Unknown
}
ast::Expr::StructLit(_e) => Ty::Unknown,
ast::Expr::NamedFieldList(_) | ast::Expr::NamedField(_) => {
// Can this even occur outside of a struct literal?
Ty::Unknown
}
ast::Expr::IndexExpr(e) => {
Ty::Unknown
}
ast::Expr::FieldExpr(e) => {
Ty::Unknown
}
ast::Expr::IndexExpr(_e) => Ty::Unknown,
ast::Expr::FieldExpr(_e) => Ty::Unknown,
ast::Expr::TryExpr(e) => {
let inner_ty = if let Some(e) = e.expr() {
let _inner_ty = if let Some(e) = e.expr() {
self.infer_expr(e)
} else {
Ty::Unknown
@ -441,7 +430,7 @@ fn infer_expr(&mut self, expr: ast::Expr) -> Ty {
Ty::Unknown
}
ast::Expr::CastExpr(e) => {
let inner_ty = if let Some(e) = e.expr() {
let _inner_ty = if let Some(e) = e.expr() {
self.infer_expr(e)
} else {
Ty::Unknown
@ -451,7 +440,7 @@ fn infer_expr(&mut self, expr: ast::Expr) -> Ty {
cast_ty
}
ast::Expr::RefExpr(e) => {
let inner_ty = if let Some(e) = e.expr() {
let _inner_ty = if let Some(e) = e.expr() {
self.infer_expr(e)
} else {
Ty::Unknown
@ -459,22 +448,16 @@ fn infer_expr(&mut self, expr: ast::Expr) -> Ty {
Ty::Unknown
}
ast::Expr::PrefixExpr(e) => {
let inner_ty = if let Some(e) = e.expr() {
let _inner_ty = if let Some(e) = e.expr() {
self.infer_expr(e)
} else {
Ty::Unknown
};
Ty::Unknown
}
ast::Expr::RangeExpr(e) => {
Ty::Unknown
}
ast::Expr::BinExpr(e) => {
Ty::Unknown
}
ast::Expr::Literal(e) => {
Ty::Unknown
}
ast::Expr::RangeExpr(_e) => Ty::Unknown,
ast::Expr::BinExpr(_e) => Ty::Unknown,
ast::Expr::Literal(_e) => Ty::Unknown,
};
self.write_ty(expr.syntax(), ty.clone());
ty
@ -505,7 +488,7 @@ fn infer_block(&mut self, node: ast::Block) -> Ty {
}
}
pub fn infer(db: &impl HirDatabase, node: ast::FnDef, scopes: Arc<FnScopes>) -> InferenceResult {
pub fn infer(_db: &impl HirDatabase, node: ast::FnDef, scopes: Arc<FnScopes>) -> InferenceResult {
let mut ctx = InferenceContext::new(scopes);
for param in node.param_list().unwrap().params() {
@ -522,5 +505,7 @@ pub fn infer(db: &impl HirDatabase, node: ast::FnDef, scopes: Arc<FnScopes>) ->
// TODO 'resolve' the types: replace inference variables by their inferred results
InferenceResult { type_for: ctx.type_for }
InferenceResult {
type_for: ctx.type_for,
}
}

View File

@ -1,31 +1,38 @@
use std::fmt::Write;
use std::sync::Arc;
use std::path::{Path, PathBuf};
use std::path::{PathBuf};
use salsa::Database;
use ra_db::{FilesDatabase, CrateGraph, SyntaxDatabase};
use ra_syntax::{SmolStr, algo::visit::{visitor, Visitor}, ast::{self, AstNode}};
use ra_db::{SyntaxDatabase};
use ra_syntax::ast::{self, AstNode};
use test_utils::{project_dir, dir_tests};
use relative_path::RelativePath;
use crate::{source_binder, mock::WORKSPACE, module::ModuleSourceNode};
use crate::{
self as hir,
db::HirDatabase,
source_binder,
mock::MockDatabase,
};
fn infer_file(content: &str) -> String {
let (db, source_root, file_id) = MockDatabase::with_single_file(content);
let (db, _, file_id) = MockDatabase::with_single_file(content);
let source_file = db.source_file(file_id);
let mut acc = String::new();
for fn_def in source_file.syntax().descendants().filter_map(ast::FnDef::cast) {
let func = source_binder::function_from_source(&db, file_id, fn_def).unwrap().unwrap();
for fn_def in source_file
.syntax()
.descendants()
.filter_map(ast::FnDef::cast)
{
let func = source_binder::function_from_source(&db, file_id, fn_def)
.unwrap()
.unwrap();
let inference_result = func.infer(&db);
for (syntax_ptr, ty) in &inference_result.type_for {
let node = syntax_ptr.resolve(&source_file);
write!(acc, "{} '{}': {}\n", syntax_ptr.range(), ellipsize(node.text().to_string().replace("\n", " "), 15), ty);
write!(
acc,
"{} '{}': {}\n",
syntax_ptr.range(),
ellipsize(node.text().to_string().replace("\n", " "), 15),
ty
)
.unwrap();
}
}
acc
@ -51,9 +58,7 @@ fn ellipsize(mut text: String, max_len: usize) -> String {
#[test]
pub fn infer_tests() {
dir_tests(&test_data_dir(), &["."], |text, _path| {
infer_file(text)
});
dir_tests(&test_data_dir(), &["."], |text, _path| infer_file(text));
}
fn test_data_dir() -> PathBuf {

View File

@ -1,12 +1,10 @@
extern crate ra_syntax;
#[macro_use]
extern crate test_utils;
extern crate walkdir;
use std::{
fmt::Write,
fs,
path::{Path, PathBuf, Component},
path::{PathBuf, Component},
};
use test_utils::{project_dir, dir_tests, read_text, collect_tests};
@ -25,28 +23,36 @@ fn lexer_tests() {
#[test]
fn parser_tests() {
dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], |text, path| {
let file = SourceFileNode::parse(text);
let errors = file.errors();
assert_eq!(
&*errors,
&[] as &[ra_syntax::SyntaxError],
"There should be no errors in the file {:?}",
path.display()
);
dump_tree(file.syntax())
});
dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], |text, path| {
let file = SourceFileNode::parse(text);
let errors = file.errors();
assert_ne!(
&*errors,
&[] as &[ra_syntax::SyntaxError],
"There should be errors in the file {:?}",
path.display()
);
dump_tree(file.syntax())
});
dir_tests(
&test_data_dir(),
&["parser/inline/ok", "parser/ok"],
|text, path| {
let file = SourceFileNode::parse(text);
let errors = file.errors();
assert_eq!(
&*errors,
&[] as &[ra_syntax::SyntaxError],
"There should be no errors in the file {:?}",
path.display()
);
dump_tree(file.syntax())
},
);
dir_tests(
&test_data_dir(),
&["parser/err", "parser/inline/err"],
|text, path| {
let file = SourceFileNode::parse(text);
let errors = file.errors();
assert_ne!(
&*errors,
&[] as &[ra_syntax::SyntaxError],
"There should be errors in the file {:?}",
path.display()
);
dump_tree(file.syntax())
},
);
}
#[test]