3882: Move computation of missing fields into hir r=matklad a=matklad

cc @SomeoneToIgnore, this is that refactoring that moves computation of missing fields to hir. 

it actually removes meaningful duplication between diagnostics code and the completion code. Nontheless, it's a net addition of code :(

Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2020-04-07 16:48:15 +00:00 committed by GitHub
commit 0c927b4584
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 198 additions and 168 deletions

View File

@ -1027,8 +1027,16 @@ pub(crate) fn new_with_resolver(
ty: Ty, ty: Ty,
) -> Option<Type> { ) -> Option<Type> {
let krate = resolver.krate()?; let krate = resolver.krate()?;
Some(Type::new_with_resolver_inner(db, krate, resolver, ty))
}
pub(crate) fn new_with_resolver_inner(
db: &dyn HirDatabase,
krate: CrateId,
resolver: &Resolver,
ty: Ty,
) -> Type {
let environment = TraitEnvironment::lower(db, &resolver); let environment = TraitEnvironment::lower(db, &resolver);
Some(Type { krate, ty: InEnvironment { value: ty, environment } }) Type { krate, ty: InEnvironment { value: ty, environment } }
} }
fn new(db: &dyn HirDatabase, krate: CrateId, lexical_env: impl HasResolver, ty: Ty) -> Type { fn new(db: &dyn HirDatabase, krate: CrateId, lexical_env: impl HasResolver, ty: Ty) -> Type {
@ -1152,27 +1160,6 @@ pub fn tuple_fields(&self, _db: &dyn HirDatabase) -> Vec<Type> {
res res
} }
pub fn variant_fields(
&self,
db: &dyn HirDatabase,
def: VariantDef,
) -> Vec<(StructField, Type)> {
// FIXME: check that ty and def match
match &self.ty.value {
Ty::Apply(a_ty) => {
let field_types = db.field_types(def.into());
def.fields(db)
.into_iter()
.map(|it| {
let ty = field_types[it.id].clone().subst(&a_ty.parameters);
(it, self.derived(ty))
})
.collect()
}
_ => Vec::new(),
}
}
pub fn autoderef<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Type> + 'a { pub fn autoderef<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Type> + 'a {
// There should be no inference vars in types passed here // There should be no inference vars in types passed here
// FIXME check that? // FIXME check that?

View File

@ -23,7 +23,7 @@
semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
source_analyzer::{resolve_hir_path, SourceAnalyzer}, source_analyzer::{resolve_hir_path, SourceAnalyzer},
AssocItem, Function, HirFileId, ImplDef, InFile, Local, MacroDef, Module, ModuleDef, Name, AssocItem, Function, HirFileId, ImplDef, InFile, Local, MacroDef, Module, ModuleDef, Name,
Origin, Path, ScopeDef, StructField, Trait, Type, TypeParam, VariantDef, Origin, Path, ScopeDef, StructField, Trait, Type, TypeParam,
}; };
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
@ -187,14 +187,6 @@ pub fn resolve_record_field(
self.analyze(field.syntax()).resolve_record_field(self.db, field) self.analyze(field.syntax()).resolve_record_field(self.db, field)
} }
pub fn resolve_record_literal(&self, record_lit: &ast::RecordLit) -> Option<VariantDef> {
self.analyze(record_lit.syntax()).resolve_record_literal(self.db, record_lit)
}
pub fn resolve_record_pattern(&self, record_pat: &ast::RecordPat) -> Option<VariantDef> {
self.analyze(record_pat.syntax()).resolve_record_pattern(record_pat)
}
pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> { pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> {
let sa = self.analyze(macro_call.syntax()); let sa = self.analyze(macro_call.syntax());
let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call); let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call);
@ -212,6 +204,24 @@ pub fn resolve_bind_pat_to_const(&self, pat: &ast::BindPat) -> Option<ModuleDef>
// FIXME: use this instead? // FIXME: use this instead?
// pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>; // pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>;
pub fn record_literal_missing_fields(
&self,
literal: &ast::RecordLit,
) -> Vec<(StructField, Type)> {
self.analyze(literal.syntax())
.record_literal_missing_fields(self.db, literal)
.unwrap_or_default()
}
pub fn record_pattern_missing_fields(
&self,
pattern: &ast::RecordPat,
) -> Vec<(StructField, Type)> {
self.analyze(pattern.syntax())
.record_pattern_missing_fields(self.db, pattern)
.unwrap_or_default()
}
pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> { pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
let src = self.find_file(src.syntax().clone()).with_value(src).cloned(); let src = self.find_file(src.syntax().clone()).with_value(src).cloned();
T::to_def(self, src) T::to_def(self, src)

View File

@ -14,10 +14,13 @@
}, },
expr::{ExprId, Pat, PatId}, expr::{ExprId, Pat, PatId},
resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs}, resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs},
AsMacroCall, DefWithBodyId, AsMacroCall, DefWithBodyId, LocalStructFieldId, StructFieldId, VariantId,
}; };
use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile}; use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile};
use hir_ty::InferenceResult; use hir_ty::{
expr::{record_literal_missing_fields, record_pattern_missing_fields},
InferenceResult, Substs, Ty,
};
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode}, ast::{self, AstNode},
SyntaxNode, SyntaxNodePtr, TextUnit, SyntaxNode, SyntaxNodePtr, TextUnit,
@ -25,8 +28,10 @@
use crate::{ use crate::{
db::HirDatabase, semantics::PathResolution, Adt, Const, EnumVariant, Function, Local, MacroDef, db::HirDatabase, semantics::PathResolution, Adt, Const, EnumVariant, Function, Local, MacroDef,
ModPath, ModuleDef, Path, PathKind, Static, Struct, Trait, Type, TypeAlias, TypeParam, ModPath, ModuleDef, Path, PathKind, Static, Struct, StructField, Trait, Type, TypeAlias,
TypeParam,
}; };
use ra_db::CrateId;
/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of /// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of
/// original source files. It should not be used inside the HIR itself. /// original source files. It should not be used inside the HIR itself.
@ -164,23 +169,6 @@ pub(crate) fn resolve_record_field(
Some((struct_field.into(), local)) Some((struct_field.into(), local))
} }
pub(crate) fn resolve_record_literal(
&self,
db: &dyn HirDatabase,
record_lit: &ast::RecordLit,
) -> Option<crate::VariantDef> {
let expr_id = self.expr_id(db, &record_lit.clone().into())?;
self.infer.as_ref()?.variant_resolution_for_expr(expr_id).map(|it| it.into())
}
pub(crate) fn resolve_record_pattern(
&self,
record_pat: &ast::RecordPat,
) -> Option<crate::VariantDef> {
let pat_id = self.pat_id(&record_pat.clone().into())?;
self.infer.as_ref()?.variant_resolution_for_pat(pat_id).map(|it| it.into())
}
pub(crate) fn resolve_macro_call( pub(crate) fn resolve_macro_call(
&self, &self,
db: &dyn HirDatabase, db: &dyn HirDatabase,
@ -231,6 +219,68 @@ pub(crate) fn resolve_path(
resolve_hir_path(db, &self.resolver, &hir_path) resolve_hir_path(db, &self.resolver, &hir_path)
} }
pub(crate) fn record_literal_missing_fields(
&self,
db: &dyn HirDatabase,
literal: &ast::RecordLit,
) -> Option<Vec<(StructField, Type)>> {
let krate = self.resolver.krate()?;
let body = self.body.as_ref()?;
let infer = self.infer.as_ref()?;
let expr_id = self.expr_id(db, &literal.clone().into())?;
let substs = match &infer.type_of_expr[expr_id] {
Ty::Apply(a_ty) => &a_ty.parameters,
_ => return None,
};
let (variant, missing_fields, _exhaustive) =
record_literal_missing_fields(db, infer, expr_id, &body[expr_id])?;
let res = self.missing_fields(db, krate, substs, variant, missing_fields);
Some(res)
}
pub(crate) fn record_pattern_missing_fields(
&self,
db: &dyn HirDatabase,
pattern: &ast::RecordPat,
) -> Option<Vec<(StructField, Type)>> {
let krate = self.resolver.krate()?;
let body = self.body.as_ref()?;
let infer = self.infer.as_ref()?;
let pat_id = self.pat_id(&pattern.clone().into())?;
let substs = match &infer.type_of_pat[pat_id] {
Ty::Apply(a_ty) => &a_ty.parameters,
_ => return None,
};
let (variant, missing_fields) =
record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?;
let res = self.missing_fields(db, krate, substs, variant, missing_fields);
Some(res)
}
fn missing_fields(
&self,
db: &dyn HirDatabase,
krate: CrateId,
substs: &Substs,
variant: VariantId,
missing_fields: Vec<LocalStructFieldId>,
) -> Vec<(StructField, Type)> {
let field_types = db.field_types(variant);
missing_fields
.into_iter()
.map(|local_id| {
let field = StructFieldId { parent: variant, local_id };
let ty = field_types[local_id].clone().subst(substs);
(field.into(), Type::new_with_resolver_inner(db, krate, &self.resolver, ty))
})
.collect()
}
pub(crate) fn expand( pub(crate) fn expand(
&self, &self,
db: &dyn HirDatabase, db: &dyn HirDatabase,

View File

@ -2,12 +2,8 @@
use std::sync::Arc; use std::sync::Arc;
use hir_def::{ use hir_def::{path::path, resolver::HasResolver, AdtId, FunctionId};
path::{path, Path}, use hir_expand::diagnostics::DiagnosticSink;
resolver::HasResolver,
AdtId, FunctionId,
};
use hir_expand::{diagnostics::DiagnosticSink, name::Name};
use ra_syntax::ast; use ra_syntax::ast;
use ra_syntax::AstPtr; use ra_syntax::AstPtr;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
@ -29,7 +25,7 @@
ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Literal, LogicOp, ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Literal, LogicOp,
MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, UnaryOp, MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, UnaryOp,
}, },
VariantId, LocalStructFieldId, VariantId,
}; };
pub struct ExprValidator<'a, 'b: 'a> { pub struct ExprValidator<'a, 'b: 'a> {
@ -50,14 +46,37 @@ pub fn new(
pub fn validate_body(&mut self, db: &dyn HirDatabase) { pub fn validate_body(&mut self, db: &dyn HirDatabase) {
let body = db.body(self.func.into()); let body = db.body(self.func.into());
for e in body.exprs.iter() { for (id, expr) in body.exprs.iter() {
if let (id, Expr::RecordLit { path, fields, spread }) = e { if let Some((variant_def, missed_fields, true)) =
self.validate_record_literal(id, path, fields, *spread, db); record_literal_missing_fields(db, &self.infer, id, expr)
} else if let (id, Expr::Match { expr, arms }) = e { {
// XXX: only look at source_map if we do have missing fields
let (_, source_map) = db.body_with_source_map(self.func.into());
if let Ok(source_ptr) = source_map.expr_syntax(id) {
if let Some(expr) = source_ptr.value.left() {
let root = source_ptr.file_syntax(db.upcast());
if let ast::Expr::RecordLit(record_lit) = expr.to_node(&root) {
if let Some(field_list) = record_lit.record_field_list() {
let variant_data = variant_data(db.upcast(), variant_def);
let missed_fields = missed_fields
.into_iter()
.map(|idx| variant_data.fields()[idx].name.clone())
.collect();
self.sink.push(MissingFields {
file: source_ptr.file_id,
field_list: AstPtr::new(&field_list),
missed_fields,
})
}
}
}
}
}
if let Expr::Match { expr, arms } = expr {
self.validate_match(id, *expr, arms, db, self.infer.clone()); self.validate_match(id, *expr, arms, db, self.infer.clone());
} }
} }
let body_expr = &body[body.body_expr]; let body_expr = &body[body.body_expr];
if let Expr::Block { tail: Some(t), .. } = body_expr { if let Expr::Block { tail: Some(t), .. } = body_expr {
self.validate_results_in_tail_expr(body.body_expr, *t, db); self.validate_results_in_tail_expr(body.body_expr, *t, db);
@ -146,61 +165,6 @@ fn validate_match(
} }
} }
fn validate_record_literal(
&mut self,
id: ExprId,
_path: &Option<Path>,
fields: &[RecordLitField],
spread: Option<ExprId>,
db: &dyn HirDatabase,
) {
if spread.is_some() {
return;
};
let variant_def: VariantId = match self.infer.variant_resolution_for_expr(id) {
Some(VariantId::UnionId(_)) | None => return,
Some(it) => it,
};
if let VariantId::UnionId(_) = variant_def {
return;
}
let variant_data = variant_data(db.upcast(), variant_def);
let lit_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
let missed_fields: Vec<Name> = variant_data
.fields()
.iter()
.filter_map(|(_f, d)| {
let name = d.name.clone();
if lit_fields.contains(&name) {
None
} else {
Some(name)
}
})
.collect();
if missed_fields.is_empty() {
return;
}
let (_, source_map) = db.body_with_source_map(self.func.into());
if let Ok(source_ptr) = source_map.expr_syntax(id) {
if let Some(expr) = source_ptr.value.left() {
let root = source_ptr.file_syntax(db.upcast());
if let ast::Expr::RecordLit(record_lit) = expr.to_node(&root) {
if let Some(field_list) = record_lit.record_field_list() {
self.sink.push(MissingFields {
file: source_ptr.file_id,
field_list: AstPtr::new(&field_list),
missed_fields,
})
}
}
}
}
}
fn validate_results_in_tail_expr(&mut self, body_id: ExprId, id: ExprId, db: &dyn HirDatabase) { fn validate_results_in_tail_expr(&mut self, body_id: ExprId, id: ExprId, db: &dyn HirDatabase) {
// the mismatch will be on the whole block currently // the mismatch will be on the whole block currently
let mismatch = match self.infer.type_mismatch_for_expr(body_id) { let mismatch = match self.infer.type_mismatch_for_expr(body_id) {
@ -233,3 +197,63 @@ fn validate_results_in_tail_expr(&mut self, body_id: ExprId, id: ExprId, db: &dy
} }
} }
} }
pub fn record_literal_missing_fields(
db: &dyn HirDatabase,
infer: &InferenceResult,
id: ExprId,
expr: &Expr,
) -> Option<(VariantId, Vec<LocalStructFieldId>, /*exhaustive*/ bool)> {
let (fields, exhausitve) = match expr {
Expr::RecordLit { path: _, fields, spread } => (fields, spread.is_none()),
_ => return None,
};
let variant_def = infer.variant_resolution_for_expr(id)?;
if let VariantId::UnionId(_) = variant_def {
return None;
}
let variant_data = variant_data(db.upcast(), variant_def);
let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
let missed_fields: Vec<LocalStructFieldId> = variant_data
.fields()
.iter()
.filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
.collect();
if missed_fields.is_empty() {
return None;
}
Some((variant_def, missed_fields, exhausitve))
}
pub fn record_pattern_missing_fields(
db: &dyn HirDatabase,
infer: &InferenceResult,
id: PatId,
pat: &Pat,
) -> Option<(VariantId, Vec<LocalStructFieldId>)> {
let fields = match pat {
Pat::Record { path: _, args } => args,
_ => return None,
};
let variant_def = infer.variant_resolution_for_pat(id)?;
if let VariantId::UnionId(_) = variant_def {
return None;
}
let variant_data = variant_data(db.upcast(), variant_def);
let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
let missed_fields: Vec<LocalStructFieldId> = variant_data
.fields()
.iter()
.filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
.collect();
if missed_fields.is_empty() {
return None;
}
Some((variant_def, missed_fields))
}

View File

@ -1,62 +1,21 @@
//! Complete fields in record literals and patterns. //! Complete fields in record literals and patterns.
use ra_syntax::{ast, ast::NameOwner, SmolStr};
use crate::completion::{CompletionContext, Completions}; use crate::completion::{CompletionContext, Completions};
pub(super) fn complete_record(acc: &mut Completions, ctx: &CompletionContext) -> Option<()> { pub(super) fn complete_record(acc: &mut Completions, ctx: &CompletionContext) -> Option<()> {
let (ty, variant, already_present_fields) = let missing_fields = match (ctx.record_lit_pat.as_ref(), ctx.record_lit_syntax.as_ref()) {
match (ctx.record_lit_pat.as_ref(), ctx.record_lit_syntax.as_ref()) {
(None, None) => return None, (None, None) => return None,
(Some(_), Some(_)) => unreachable!("A record cannot be both a literal and a pattern"), (Some(_), Some(_)) => unreachable!("A record cannot be both a literal and a pattern"),
(Some(record_pat), _) => ( (Some(record_pat), _) => ctx.sema.record_pattern_missing_fields(record_pat),
ctx.sema.type_of_pat(&record_pat.clone().into())?, (_, Some(record_lit)) => ctx.sema.record_literal_missing_fields(record_lit),
ctx.sema.resolve_record_pattern(record_pat)?,
pattern_ascribed_fields(record_pat),
),
(_, Some(record_lit)) => (
ctx.sema.type_of_expr(&record_lit.clone().into())?,
ctx.sema.resolve_record_literal(record_lit)?,
literal_ascribed_fields(record_lit),
),
}; };
for (field, field_ty) in ty.variant_fields(ctx.db, variant).into_iter().filter(|(field, _)| { for (field, ty) in missing_fields {
// FIXME: already_present_names better be `Vec<hir::Name>` acc.add_field(ctx, field, &ty)
!already_present_fields.contains(&SmolStr::from(field.name(ctx.db).to_string()))
}) {
acc.add_field(ctx, field, &field_ty);
} }
Some(()) Some(())
} }
fn literal_ascribed_fields(record_lit: &ast::RecordLit) -> Vec<SmolStr> {
record_lit
.record_field_list()
.map(|field_list| field_list.fields())
.map(|fields| {
fields
.into_iter()
.filter_map(|field| field.name_ref())
.map(|name_ref| name_ref.text().clone())
.collect()
})
.unwrap_or_default()
}
fn pattern_ascribed_fields(record_pat: &ast::RecordPat) -> Vec<SmolStr> {
record_pat
.record_field_pat_list()
.map(|pat_list| {
pat_list
.record_field_pats()
.filter_map(|fild_pat| fild_pat.name())
.chain(pat_list.bind_pats().filter_map(|bind_pat| bind_pat.name()))
.map(|name| name.text().clone())
.collect()
})
.unwrap_or_default()
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
mod record_pat_tests { mod record_pat_tests {