2428: Remove TypableDef r=matklad a=matklad



Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2019-11-27 13:41:55 +00:00 committed by GitHub
commit 2798beeeb0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 131 additions and 219 deletions

View File

@ -534,14 +534,6 @@ impl VariantDef {
}
}
pub(crate) fn field(self, db: &impl HirDatabase, name: &Name) -> Option<StructField> {
match self {
VariantDef::Struct(it) => it.field(db, name),
VariantDef::Union(it) => it.field(db, name),
VariantDef::EnumVariant(it) => it.field(db, name),
}
}
pub fn module(self, db: &impl HirDatabase) -> Module {
match self {
VariantDef::Struct(it) => it.module(db),
@ -618,7 +610,7 @@ impl Function {
}
pub fn infer(self, db: &impl HirDatabase) -> Arc<InferenceResult> {
db.infer(self.into())
db.infer(self.id.into())
}
/// The containing impl block, if this is a method.
@ -647,7 +639,7 @@ impl Function {
pub fn diagnostics(self, db: &impl HirDatabase, sink: &mut DiagnosticSink) {
let infer = self.infer(db);
infer.add_diagnostics(db, self, sink);
infer.add_diagnostics(db, self.id, sink);
let mut validator = ExprValidator::new(self, infer, sink);
validator.validate_body(db);
}
@ -672,7 +664,7 @@ impl Const {
}
pub fn infer(self, db: &impl HirDatabase) -> Arc<InferenceResult> {
db.infer(self.into())
db.infer(self.id.into())
}
/// The containing impl block, if this is a type alias.
@ -715,7 +707,7 @@ impl Static {
}
pub fn infer(self, db: &impl HirDatabase) -> Arc<InferenceResult> {
db.infer(self.into())
db.infer(self.id.into())
}
}
@ -908,9 +900,9 @@ impl Local {
}
pub fn ty(self, db: &impl HirDatabase) -> Type {
let infer = db.infer(self.parent);
let ty = infer[self.pat_id].clone();
let def = DefWithBodyId::from(self.parent);
let infer = db.infer(def);
let ty = infer[self.pat_id].clone();
let resolver = def.resolver(db);
let krate = def.module(db).krate;
let environment = TraitEnvironment::lower(db, &resolver);

View File

@ -2,18 +2,15 @@
use std::sync::Arc;
use hir_def::{GenericDefId, ImplId, LocalStructFieldId, TraitId, VariantId};
use hir_def::{DefWithBodyId, GenericDefId, ImplId, LocalStructFieldId, TraitId, VariantId};
use ra_arena::map::ArenaMap;
use ra_db::{salsa, CrateId};
use crate::{
ty::{
method_resolution::CrateImplBlocks,
traits::{AssocTyValue, Impl},
CallableDef, FnSig, GenericPredicate, InferenceResult, Substs, Ty, TyDefId, TypeCtor,
ValueTyDefId,
},
DefWithBody,
use crate::ty::{
method_resolution::CrateImplBlocks,
traits::{AssocTyValue, Impl},
CallableDef, FnSig, GenericPredicate, InferenceResult, Substs, Ty, TyDefId, TypeCtor,
ValueTyDefId,
};
pub use hir_def::db::{
@ -32,7 +29,7 @@ pub use hir_expand::db::{
#[salsa::requires(salsa::Database)]
pub trait HirDatabase: DefDatabase {
#[salsa::invoke(crate::ty::infer_query)]
fn infer(&self, def: DefWithBody) -> Arc<InferenceResult>;
fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
#[salsa::invoke(crate::ty::ty_query)]
fn ty(&self, def: TyDefId) -> Ty;

View File

@ -4,13 +4,13 @@
//! are splitting the hir.
use hir_def::{
AdtId, AssocItemId, AttrDefId, ConstId, DefWithBodyId, EnumId, EnumVariantId, FunctionId,
GenericDefId, ModuleDefId, StaticId, StructFieldId, StructId, TypeAliasId, UnionId, VariantId,
AdtId, AssocItemId, AttrDefId, DefWithBodyId, EnumVariantId, GenericDefId, ModuleDefId,
StructFieldId, VariantId,
};
use crate::{
ty::TypableDef, Adt, AssocItem, AttrDef, Const, Crate, DefWithBody, EnumVariant, Function,
GenericDef, ModuleDef, Static, StructField, TypeAlias, VariantDef,
Adt, AssocItem, AttrDef, Crate, DefWithBody, EnumVariant, GenericDef, ModuleDef, StructField,
VariantDef,
};
impl From<ra_db::CrateId> for Crate {
@ -137,58 +137,6 @@ impl From<GenericDef> for GenericDefId {
}
}
impl From<AdtId> for TypableDef {
fn from(id: AdtId) -> Self {
Adt::from(id).into()
}
}
impl From<StructId> for TypableDef {
fn from(id: StructId) -> Self {
AdtId::StructId(id).into()
}
}
impl From<UnionId> for TypableDef {
fn from(id: UnionId) -> Self {
AdtId::UnionId(id).into()
}
}
impl From<EnumId> for TypableDef {
fn from(id: EnumId) -> Self {
AdtId::EnumId(id).into()
}
}
impl From<EnumVariantId> for TypableDef {
fn from(id: EnumVariantId) -> Self {
EnumVariant::from(id).into()
}
}
impl From<TypeAliasId> for TypableDef {
fn from(id: TypeAliasId) -> Self {
TypeAlias::from(id).into()
}
}
impl From<FunctionId> for TypableDef {
fn from(id: FunctionId) -> Self {
Function::from(id).into()
}
}
impl From<ConstId> for TypableDef {
fn from(id: ConstId) -> Self {
Const::from(id).into()
}
}
impl From<StaticId> for TypableDef {
fn from(id: StaticId) -> Self {
Static::from(id).into()
}
}
impl From<Adt> for GenericDefId {
fn from(id: Adt) -> Self {
match id {

View File

@ -168,7 +168,7 @@ impl SourceAnalyzer {
resolver,
body_owner: Some(def),
body_source_map: Some(source_map),
infer: Some(db.infer(def)),
infer: Some(db.infer(def.into())),
scopes: Some(scopes),
file_id: node.file_id,
}
@ -214,27 +214,27 @@ impl SourceAnalyzer {
pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
let expr_id = self.expr_id(&call.clone().into())?;
self.infer.as_ref()?.method_resolution(expr_id)
self.infer.as_ref()?.method_resolution(expr_id).map(Function::from)
}
pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<crate::StructField> {
let expr_id = self.expr_id(&field.clone().into())?;
self.infer.as_ref()?.field_resolution(expr_id)
self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into())
}
pub fn resolve_record_field(&self, field: &ast::RecordField) -> Option<crate::StructField> {
let expr_id = self.expr_id(&field.expr()?)?;
self.infer.as_ref()?.record_field_resolution(expr_id)
self.infer.as_ref()?.record_field_resolution(expr_id).map(|it| it.into())
}
pub fn resolve_record_literal(&self, record_lit: &ast::RecordLit) -> Option<crate::VariantDef> {
let expr_id = self.expr_id(&record_lit.clone().into())?;
self.infer.as_ref()?.variant_resolution_for_expr(expr_id)
self.infer.as_ref()?.variant_resolution_for_expr(expr_id).map(|it| it.into())
}
pub fn resolve_record_pattern(&self, record_pat: &ast::RecordPat) -> Option<crate::VariantDef> {
let pat_id = self.pat_id(&record_pat.clone().into())?;
self.infer.as_ref()?.variant_resolution_for_pat(pat_id)
self.infer.as_ref()?.variant_resolution_for_pat(pat_id).map(|it| it.into())
}
pub fn resolve_macro_call(
@ -297,13 +297,13 @@ impl SourceAnalyzer {
if let Some(path_expr) = path.syntax().parent().and_then(ast::PathExpr::cast) {
let expr_id = self.expr_id(&path_expr.into())?;
if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_expr(expr_id) {
return Some(PathResolution::AssocItem(assoc));
return Some(PathResolution::AssocItem(assoc.into()));
}
}
if let Some(path_pat) = path.syntax().parent().and_then(ast::PathPat::cast) {
let pat_id = self.pat_id(&path_pat.into())?;
if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_pat(pat_id) {
return Some(PathResolution::AssocItem(assoc));
return Some(PathResolution::AssocItem(assoc.into()));
}
}
// This must be a normal source file rather than macro file.

View File

@ -38,7 +38,7 @@ pub use lower::CallableDef;
pub(crate) use lower::{
callable_item_sig, field_types_query, generic_defaults_query,
generic_predicates_for_param_query, generic_predicates_query, ty_query, value_ty_query,
TyDefId, TypableDef, ValueTyDefId,
TyDefId, ValueTyDefId,
};
pub(crate) use traits::{InEnvironment, Obligation, ProjectionPredicate, TraitEnvironment};

View File

@ -22,11 +22,13 @@ use ena::unify::{InPlaceUnificationTable, NoError, UnifyKey, UnifyValue};
use rustc_hash::FxHashMap;
use hir_def::{
body::Body,
data::{ConstData, FunctionData},
path::known,
expr::{BindingAnnotation, ExprId, PatId},
path::{known, Path},
resolver::{HasResolver, Resolver, TypeNs},
type_ref::{Mutability, TypeRef},
AdtId, AssocItemId, DefWithBodyId,
AdtId, AssocItemId, DefWithBodyId, FunctionId, StructFieldId, TypeAliasId, VariantId,
};
use hir_expand::{diagnostics::DiagnosticSink, name};
use ra_arena::map::ArenaMap;
@ -34,17 +36,12 @@ use ra_prof::profile;
use test_utils::tested_by;
use super::{
primitive::{FloatTy, IntTy},
traits::{Guidance, Obligation, ProjectionPredicate, Solution},
ApplicationTy, InEnvironment, ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TypeCtor,
TypeWalk, Uncertain,
};
use crate::{
code_model::TypeAlias,
db::HirDatabase,
expr::{BindingAnnotation, Body, ExprId, PatId},
ty::infer::diagnostics::InferenceDiagnostic,
AssocItem, DefWithBody, FloatTy, Function, IntTy, Path, StructField, VariantDef,
};
use crate::{db::HirDatabase, ty::infer::diagnostics::InferenceDiagnostic};
macro_rules! ty_app {
($ctor:pat, $param:pat) => {
@ -62,15 +59,15 @@ mod pat;
mod coerce;
/// The entry point of type inference.
pub fn infer_query(db: &impl HirDatabase, def: DefWithBody) -> Arc<InferenceResult> {
pub fn infer_query(db: &impl HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
let _p = profile("infer_query");
let resolver = DefWithBodyId::from(def).resolver(db);
let resolver = def.resolver(db);
let mut ctx = InferenceContext::new(db, def, resolver);
match &def {
DefWithBody::Const(c) => ctx.collect_const(&db.const_data(c.id)),
DefWithBody::Function(f) => ctx.collect_fn(&db.function_data(f.id)),
DefWithBody::Static(s) => ctx.collect_const(&db.static_data(s.id)),
match def {
DefWithBodyId::ConstId(c) => ctx.collect_const(&db.const_data(c)),
DefWithBodyId::FunctionId(f) => ctx.collect_fn(&db.function_data(f)),
DefWithBodyId::StaticId(s) => ctx.collect_const(&db.static_data(s)),
}
ctx.infer_body();
@ -121,15 +118,15 @@ pub struct TypeMismatch {
#[derive(Clone, PartialEq, Eq, Debug, Default)]
pub struct InferenceResult {
/// For each method call expr, records the function it resolves to.
method_resolutions: FxHashMap<ExprId, Function>,
method_resolutions: FxHashMap<ExprId, FunctionId>,
/// For each field access expr, records the field it resolves to.
field_resolutions: FxHashMap<ExprId, StructField>,
field_resolutions: FxHashMap<ExprId, StructFieldId>,
/// For each field in record literal, records the field it resolves to.
record_field_resolutions: FxHashMap<ExprId, StructField>,
record_field_resolutions: FxHashMap<ExprId, StructFieldId>,
/// For each struct literal, records the variant it resolves to.
variant_resolutions: FxHashMap<ExprOrPatId, VariantDef>,
variant_resolutions: FxHashMap<ExprOrPatId, VariantId>,
/// For each associated item record what it resolves to
assoc_resolutions: FxHashMap<ExprOrPatId, AssocItem>,
assoc_resolutions: FxHashMap<ExprOrPatId, AssocItemId>,
diagnostics: Vec<InferenceDiagnostic>,
pub(super) type_of_expr: ArenaMap<ExprId, Ty>,
pub(super) type_of_pat: ArenaMap<PatId, Ty>,
@ -137,25 +134,25 @@ pub struct InferenceResult {
}
impl InferenceResult {
pub fn method_resolution(&self, expr: ExprId) -> Option<Function> {
pub fn method_resolution(&self, expr: ExprId) -> Option<FunctionId> {
self.method_resolutions.get(&expr).copied()
}
pub fn field_resolution(&self, expr: ExprId) -> Option<StructField> {
pub fn field_resolution(&self, expr: ExprId) -> Option<StructFieldId> {
self.field_resolutions.get(&expr).copied()
}
pub fn record_field_resolution(&self, expr: ExprId) -> Option<StructField> {
pub fn record_field_resolution(&self, expr: ExprId) -> Option<StructFieldId> {
self.record_field_resolutions.get(&expr).copied()
}
pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option<VariantDef> {
pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option<VariantId> {
self.variant_resolutions.get(&id.into()).copied()
}
pub fn variant_resolution_for_pat(&self, id: PatId) -> Option<VariantDef> {
pub fn variant_resolution_for_pat(&self, id: PatId) -> Option<VariantId> {
self.variant_resolutions.get(&id.into()).copied()
}
pub fn assoc_resolutions_for_expr(&self, id: ExprId) -> Option<AssocItem> {
pub fn assoc_resolutions_for_expr(&self, id: ExprId) -> Option<AssocItemId> {
self.assoc_resolutions.get(&id.into()).copied()
}
pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<AssocItem> {
pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<AssocItemId> {
self.assoc_resolutions.get(&id.into()).copied()
}
pub fn type_mismatch_for_expr(&self, expr: ExprId) -> Option<&TypeMismatch> {
@ -164,7 +161,7 @@ impl InferenceResult {
pub(crate) fn add_diagnostics(
&self,
db: &impl HirDatabase,
owner: Function,
owner: FunctionId,
sink: &mut DiagnosticSink,
) {
self.diagnostics.iter().for_each(|it| it.add_to(db, owner, sink))
@ -191,7 +188,7 @@ impl Index<PatId> for InferenceResult {
#[derive(Clone, Debug)]
struct InferenceContext<'a, D: HirDatabase> {
db: &'a D,
owner: DefWithBody,
owner: DefWithBodyId,
body: Arc<Body>,
resolver: Resolver,
var_unification_table: InPlaceUnificationTable<TypeVarId>,
@ -209,7 +206,7 @@ struct InferenceContext<'a, D: HirDatabase> {
}
impl<'a, D: HirDatabase> InferenceContext<'a, D> {
fn new(db: &'a D, owner: DefWithBody, resolver: Resolver) -> Self {
fn new(db: &'a D, owner: DefWithBodyId, resolver: Resolver) -> Self {
InferenceContext {
result: InferenceResult::default(),
var_unification_table: InPlaceUnificationTable::new(),
@ -243,15 +240,15 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
self.result.type_of_expr.insert(expr, ty);
}
fn write_method_resolution(&mut self, expr: ExprId, func: Function) {
fn write_method_resolution(&mut self, expr: ExprId, func: FunctionId) {
self.result.method_resolutions.insert(expr, func);
}
fn write_field_resolution(&mut self, expr: ExprId, field: StructField) {
fn write_field_resolution(&mut self, expr: ExprId, field: StructFieldId) {
self.result.field_resolutions.insert(expr, field);
}
fn write_variant_resolution(&mut self, id: ExprOrPatId, variant: VariantDef) {
fn write_variant_resolution(&mut self, id: ExprOrPatId, variant: VariantId) {
self.result.variant_resolutions.insert(id, variant);
}
@ -514,7 +511,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
})
}
fn resolve_variant(&mut self, path: Option<&Path>) -> (Ty, Option<VariantDef>) {
fn resolve_variant(&mut self, path: Option<&Path>) -> (Ty, Option<VariantId>) {
let path = match path {
Some(path) => path,
None => return (Ty::Unknown, None),
@ -527,13 +524,13 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
let substs = Ty::substs_from_path(self.db, resolver, path, strukt.into());
let ty = self.db.ty(strukt.into());
let ty = self.insert_type_vars(ty.apply_substs(substs));
(ty, Some(VariantDef::Struct(strukt.into())))
(ty, Some(strukt.into()))
}
Some(TypeNs::EnumVariantId(var)) => {
let substs = Ty::substs_from_path(self.db, resolver, path, var.into());
let ty = self.db.ty(var.parent.into());
let ty = self.insert_type_vars(ty.apply_substs(substs));
(ty, Some(VariantDef::EnumVariant(var.into())))
(ty, Some(var.into()))
}
Some(_) | None => (Ty::Unknown, None),
}
@ -557,22 +554,22 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
self.infer_expr(self.body.body_expr, &Expectation::has_type(self.return_ty.clone()));
}
fn resolve_into_iter_item(&self) -> Option<TypeAlias> {
fn resolve_into_iter_item(&self) -> Option<TypeAliasId> {
let path = known::std_iter_into_iterator();
let trait_ = self.resolver.resolve_known_trait(self.db, &path)?;
self.db.trait_data(trait_).associated_type_by_name(&name::ITEM_TYPE).map(TypeAlias::from)
self.db.trait_data(trait_).associated_type_by_name(&name::ITEM_TYPE)
}
fn resolve_ops_try_ok(&self) -> Option<TypeAlias> {
fn resolve_ops_try_ok(&self) -> Option<TypeAliasId> {
let path = known::std_ops_try();
let trait_ = self.resolver.resolve_known_trait(self.db, &path)?;
self.db.trait_data(trait_).associated_type_by_name(&name::OK_TYPE).map(TypeAlias::from)
self.db.trait_data(trait_).associated_type_by_name(&name::OK_TYPE)
}
fn resolve_future_future_output(&self) -> Option<TypeAlias> {
fn resolve_future_future_output(&self) -> Option<TypeAliasId> {
let path = known::std_future_future();
let trait_ = self.resolver.resolve_known_trait(self.db, &path)?;
self.db.trait_data(trait_).associated_type_by_name(&name::OUTPUT_TYPE).map(TypeAlias::from)
self.db.trait_data(trait_).associated_type_by_name(&name::OUTPUT_TYPE)
}
fn resolve_boxed_box(&self) -> Option<AdtId> {
@ -696,9 +693,10 @@ impl Expectation {
}
mod diagnostics {
use hir_def::{expr::ExprId, FunctionId, HasSource, Lookup};
use hir_expand::diagnostics::DiagnosticSink;
use crate::{db::HirDatabase, diagnostics::NoSuchField, expr::ExprId, Function, HasSource};
use crate::{db::HirDatabase, diagnostics::NoSuchField};
#[derive(Debug, PartialEq, Eq, Clone)]
pub(super) enum InferenceDiagnostic {
@ -709,13 +707,14 @@ mod diagnostics {
pub(super) fn add_to(
&self,
db: &impl HirDatabase,
owner: Function,
owner: FunctionId,
sink: &mut DiagnosticSink,
) {
match self {
InferenceDiagnostic::NoSuchField { expr, field } => {
let file = owner.source(db).file_id;
let field = owner.body_source_map(db).field_syntax(*expr, *field);
let file = owner.lookup(db).source(db).file_id;
let (_, source_map) = db.body_with_source_map(owner.into());
let field = source_map.field_syntax(*expr, *field);
sink.push(NoSuchField { file, field })
}
}

View File

@ -16,9 +16,9 @@ use hir_expand::name::{self, Name};
use crate::{
db::HirDatabase,
ty::{
autoderef, method_resolution, op, traits::InEnvironment, CallableDef, InferTy, IntTy,
Mutability, Obligation, ProjectionPredicate, ProjectionTy, Substs, TraitRef, Ty, TypeCtor,
TypeWalk, Uncertain,
autoderef, method_resolution, op, traits::InEnvironment, utils::variant_data, CallableDef,
InferTy, IntTy, Mutability, Obligation, ProjectionPredicate, ProjectionTy, Substs,
TraitRef, Ty, TypeCtor, TypeWalk, Uncertain,
},
};
@ -100,7 +100,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
let projection = ProjectionPredicate {
ty: pat_ty.clone(),
projection_ty: ProjectionTy {
associated_ty: into_iter_item_alias.id,
associated_ty: into_iter_item_alias,
parameters: Substs::single(iterable_ty),
},
};
@ -218,22 +218,26 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
let substs = ty.substs().unwrap_or_else(Substs::empty);
let field_types =
def_id.map(|it| self.db.field_types(it.into())).unwrap_or_default();
let variant_data = def_id.map(|it| variant_data(self.db, it));
for (field_idx, field) in fields.iter().enumerate() {
let field_def = def_id.and_then(|it| match it.field(self.db, &field.name) {
Some(field) => Some(field),
None => {
self.push_diagnostic(InferenceDiagnostic::NoSuchField {
expr: tgt_expr,
field: field_idx,
});
None
}
});
let field_def =
variant_data.as_ref().and_then(|it| match it.field(&field.name) {
Some(local_id) => {
Some(StructFieldId { parent: def_id.unwrap(), local_id })
}
None => {
self.push_diagnostic(InferenceDiagnostic::NoSuchField {
expr: tgt_expr,
field: field_idx,
});
None
}
});
if let Some(field_def) = field_def {
self.result.record_field_resolutions.insert(field.expr, field_def);
}
let field_ty = field_def
.map_or(Ty::Unknown, |it| field_types[it.id].clone())
.map_or(Ty::Unknown, |it| field_types[it.local_id].clone())
.subst(&substs);
self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty));
}
@ -262,7 +266,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
self.db.struct_data(s).variant_data.field(name).map(|local_id| {
let field = StructFieldId { parent: s.into(), local_id }.into();
self.write_field_resolution(tgt_expr, field);
self.db.field_types(s.into())[field.id]
self.db.field_types(s.into())[field.local_id]
.clone()
.subst(&a_ty.parameters)
})
@ -285,7 +289,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
let projection = ProjectionPredicate {
ty: ty.clone(),
projection_ty: ProjectionTy {
associated_ty: future_future_output_alias.id,
associated_ty: future_future_output_alias,
parameters: Substs::single(inner_ty),
},
};
@ -304,7 +308,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
let projection = ProjectionPredicate {
ty: ty.clone(),
projection_ty: ProjectionTy {
associated_ty: ops_try_ok_alias.id,
associated_ty: ops_try_ok_alias,
parameters: Substs::single(inner_ty),
},
};
@ -557,7 +561,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
Some((ty, func)) => {
let ty = canonicalized_receiver.decanonicalize_ty(ty);
self.write_method_resolution(tgt_expr, func);
(ty, self.db.value_ty(func.id.into()), Some(self.db.generic_params(func.id.into())))
(ty, self.db.value_ty(func.into()), Some(self.db.generic_params(func.into())))
}
None => (receiver_ty, Ty::Unknown, None),
};

View File

@ -14,7 +14,7 @@ use test_utils::tested_by;
use super::{BindingMode, InferenceContext};
use crate::{
db::HirDatabase,
ty::{Substs, Ty, TypeCtor, TypeWalk},
ty::{utils::variant_data, Substs, Ty, TypeCtor, TypeWalk},
};
impl<'a, D: HirDatabase> InferenceContext<'a, D> {
@ -26,16 +26,18 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
default_bm: BindingMode,
) -> Ty {
let (ty, def) = self.resolve_variant(path);
let var_data = def.map(|it| variant_data(self.db, it));
self.unify(&ty, expected);
let substs = ty.substs().unwrap_or_else(Substs::empty);
let field_tys = def.map(|it| self.db.field_types(it.into())).unwrap_or_default();
for (i, &subpat) in subpats.iter().enumerate() {
let expected_ty = def
.and_then(|d| d.field(self.db, &Name::new_tuple_field(i)))
.map_or(Ty::Unknown, |field| field_tys[field.id].clone())
let expected_ty = var_data
.as_ref()
.and_then(|d| d.field(&Name::new_tuple_field(i)))
.map_or(Ty::Unknown, |field| field_tys[field].clone())
.subst(&substs);
let expected_ty = self.normalize_associated_types_in(expected_ty);
self.infer_pat(subpat, &expected_ty, default_bm);
@ -53,6 +55,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
id: PatId,
) -> Ty {
let (ty, def) = self.resolve_variant(path);
let var_data = def.map(|it| variant_data(self.db, it));
if let Some(variant) = def {
self.write_variant_resolution(id.into(), variant);
}
@ -63,10 +66,9 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
let field_tys = def.map(|it| self.db.field_types(it.into())).unwrap_or_default();
for subpat in subpats {
let matching_field = def.and_then(|it| it.field(self.db, &subpat.name));
let expected_ty = matching_field
.map_or(Ty::Unknown, |field| field_tys[field.id].clone())
.subst(&substs);
let matching_field = var_data.as_ref().and_then(|it| it.field(&subpat.name));
let expected_ty =
matching_field.map_or(Ty::Unknown, |field| field_tys[field].clone()).subst(&substs);
let expected_ty = self.normalize_associated_types_in(expected_ty);
self.infer_pat(subpat.pat, &expected_ty, default_bm);
}

View File

@ -11,7 +11,7 @@ use std::sync::Arc;
use hir_def::{
builtin_type::BuiltinType,
generics::WherePredicate,
path::{GenericArg, PathSegment},
path::{GenericArg, Path, PathSegment},
resolver::{HasResolver, Resolver, TypeNs},
type_ref::{TypeBound, TypeRef},
AdtId, AstItemDef, ConstId, EnumId, EnumVariantId, FunctionId, GenericDefId, HasModule,
@ -28,11 +28,10 @@ use crate::{
db::HirDatabase,
ty::{
primitive::{FloatTy, IntTy},
utils::{all_super_traits, associated_type_by_name_including_super_traits},
utils::{all_super_traits, associated_type_by_name_including_super_traits, variant_data},
},
util::make_mut_slice,
Adt, Const, Enum, EnumVariant, Function, ImplBlock, ModuleDef, Path, Static, Struct, Trait,
TypeAlias, Union,
ImplBlock, Trait,
};
impl Ty {
@ -514,13 +513,11 @@ pub(crate) fn field_types_query(
db: &impl HirDatabase,
variant_id: VariantId,
) -> Arc<ArenaMap<LocalStructFieldId, Ty>> {
let (resolver, var_data) = match variant_id {
VariantId::StructId(it) => (it.resolver(db), db.struct_data(it).variant_data.clone()),
VariantId::UnionId(it) => (it.resolver(db), db.union_data(it).variant_data.clone()),
VariantId::EnumVariantId(it) => (
it.parent.resolver(db),
db.enum_data(it.parent).variants[it.local_id].variant_data.clone(),
),
let var_data = variant_data(db, variant_id);
let resolver = match variant_id {
VariantId::StructId(it) => it.resolver(db),
VariantId::UnionId(it) => it.resolver(db),
VariantId::EnumVariantId(it) => it.parent.resolver(db),
};
let mut res = ArenaMap::default();
for (field_id, field_data) in var_data.fields().iter() {
@ -695,42 +692,6 @@ fn type_for_type_alias(db: &impl HirDatabase, t: TypeAliasId) -> Ty {
inner.subst(&substs)
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum TypableDef {
Function(Function),
Adt(Adt),
EnumVariant(EnumVariant),
TypeAlias(TypeAlias),
Const(Const),
Static(Static),
BuiltinType(BuiltinType),
}
impl_froms!(
TypableDef: Function,
Adt(Struct, Enum, Union),
EnumVariant,
TypeAlias,
Const,
Static,
BuiltinType
);
impl From<ModuleDef> for Option<TypableDef> {
fn from(def: ModuleDef) -> Option<TypableDef> {
let res = match def {
ModuleDef::Function(f) => f.into(),
ModuleDef::Adt(adt) => adt.into(),
ModuleDef::EnumVariant(v) => v.into(),
ModuleDef::TypeAlias(t) => t.into(),
ModuleDef::Const(v) => v.into(),
ModuleDef::Static(v) => v.into(),
ModuleDef::BuiltinType(t) => t.into(),
ModuleDef::Module(_) | ModuleDef::Trait(_) => return None,
};
Some(res)
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum CallableDef {
FunctionId(FunctionId),

View File

@ -7,7 +7,7 @@ use std::sync::Arc;
use arrayvec::ArrayVec;
use hir_def::{
lang_item::LangItemTarget, resolver::HasResolver, resolver::Resolver, type_ref::Mutability,
AssocItemId, AstItemDef, HasModule, ImplId, TraitId,
AssocItemId, AstItemDef, FunctionId, HasModule, ImplId, TraitId,
};
use hir_expand::name::Name;
use ra_db::CrateId;
@ -18,7 +18,6 @@ use crate::{
db::HirDatabase,
ty::primitive::{FloatBitness, Uncertain},
ty::{utils::all_super_traits, Ty, TypeCtor},
Function,
};
use super::{autoderef, Canonical, InEnvironment, TraitEnvironment, TraitRef};
@ -154,10 +153,10 @@ pub(crate) fn lookup_method(
db: &impl HirDatabase,
name: &Name,
resolver: &Resolver,
) -> Option<(Ty, Function)> {
) -> Option<(Ty, FunctionId)> {
iterate_method_candidates(ty, db, resolver, Some(name), LookupMode::MethodCall, |ty, f| match f
{
AssocItemId::FunctionId(f) => Some((ty.clone(), f.into())),
AssocItemId::FunctionId(f) => Some((ty.clone(), f)),
_ => None,
})
}

View File

@ -1,10 +1,8 @@
//! FIXME: write short doc here
use hir_def::expr::{BinaryOp, CmpOp};
use super::{InferTy, Ty, TypeCtor};
use crate::{
expr::{BinaryOp, CmpOp},
ty::ApplicationTy,
};
use crate::ty::ApplicationTy;
pub(super) fn binary_op_return_ty(op: BinaryOp, rhs_ty: Ty) -> Ty {
match op {

View File

@ -1,11 +1,13 @@
//! Helper functions for working with def, which don't need to be a separate
//! query, but can't be computed directly from `*Data` (ie, which need a `db`).
use std::sync::Arc;
use hir_def::{
adt::VariantData,
db::DefDatabase,
resolver::{HasResolver, TypeNs},
type_ref::TypeRef,
TraitId, TypeAliasId,
TraitId, TypeAliasId, VariantId,
};
use hir_expand::name::{self, Name};
@ -61,3 +63,13 @@ pub(super) fn associated_type_by_name_including_super_traits(
.into_iter()
.find_map(|t| db.trait_data(t).associated_type_by_name(name))
}
pub(super) fn variant_data(db: &impl DefDatabase, var: VariantId) -> Arc<VariantData> {
match var {
VariantId::StructId(it) => db.struct_data(it).variant_data.clone(),
VariantId::UnionId(it) => db.union_data(it).variant_data.clone(),
VariantId::EnumVariantId(it) => {
db.enum_data(it.parent).variants[it.local_id].variant_data.clone()
}
}
}