remove useless conversions

This commit is contained in:
Daniel Eades 2022-12-30 08:42:44 +00:00
parent cc80c5bd07
commit efd2c20e96
17 changed files with 26 additions and 38 deletions

View File

@ -67,7 +67,7 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, mut def_map: DefMap, tree_id: T
let dep_def_map = db.crate_def_map(dep.crate_id); let dep_def_map = db.crate_def_map(dep.crate_id);
let dep_root = dep_def_map.module_id(dep_def_map.root); let dep_root = dep_def_map.module_id(dep_def_map.root);
deps.insert(dep.as_name(), dep_root.into()); deps.insert(dep.as_name(), dep_root);
if dep.is_prelude() && !tree_id.is_block() { if dep.is_prelude() && !tree_id.is_block() {
def_map.extern_prelude.insert(dep.as_name(), dep_root); def_map.extern_prelude.insert(dep.as_name(), dep_root);
@ -2085,7 +2085,7 @@ fn collect_macro_call(&mut self, mac: &MacroCall, container: ItemContainerId) {
.scope .scope
.get_legacy_macro(name) .get_legacy_macro(name)
.and_then(|it| it.last()) .and_then(|it| it.last())
.map(|&it| macro_id_to_def_id(self.def_collector.db, it.into())) .map(|&it| macro_id_to_def_id(self.def_collector.db, it))
}, },
) )
}) })

View File

@ -390,7 +390,7 @@ fn resolve_name_in_module(
.get_legacy_macro(name) .get_legacy_macro(name)
// FIXME: shadowing // FIXME: shadowing
.and_then(|it| it.last()) .and_then(|it| it.last())
.map_or_else(PerNs::none, |&m| PerNs::macros(m.into(), Visibility::Public)); .map_or_else(PerNs::none, |&m| PerNs::macros(m, Visibility::Public));
let from_scope = self[module].scope.get(name); let from_scope = self[module].scope.get(name);
let from_builtin = match self.block { let from_builtin = match self.block {
Some(_) => { Some(_) => {

View File

@ -381,7 +381,7 @@ pub fn names_in_scope(
}); });
def_map[module_id].scope.legacy_macros().for_each(|(name, macs)| { def_map[module_id].scope.legacy_macros().for_each(|(name, macs)| {
macs.iter().for_each(|&mac| { macs.iter().for_each(|&mac| {
res.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(MacroId::from(mac)))); res.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(mac)));
}) })
}); });
def_map.extern_prelude().for_each(|(name, &def)| { def_map.extern_prelude().for_each(|(name, &def)| {
@ -517,10 +517,7 @@ fn process_names(&self, acc: &mut ScopeNames, db: &dyn DefDatabase) {
}); });
m.def_map[m.module_id].scope.legacy_macros().for_each(|(name, macs)| { m.def_map[m.module_id].scope.legacy_macros().for_each(|(name, macs)| {
macs.iter().for_each(|&mac| { macs.iter().for_each(|&mac| {
acc.add( acc.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(mac)));
name,
ScopeDef::ModuleDef(ModuleDefId::MacroId(MacroId::from(mac))),
);
}) })
}); });
} }

View File

@ -25,7 +25,6 @@ fn eval_goal(ra_fixture: &str) -> Result<ComputedExpr, ConstEvalError> {
let scope = &def_map[module_id.local_id].scope; let scope = &def_map[module_id.local_id].scope;
let const_id = scope let const_id = scope
.declarations() .declarations()
.into_iter()
.find_map(|x| match x { .find_map(|x| match x {
hir_def::ModuleDefId::ConstId(x) => { hir_def::ModuleDefId::ConstId(x) => {
if db.const_data(x).name.as_ref()?.to_string() == "GOAL" { if db.const_data(x).name.as_ref()?.to_string() == "GOAL" {

View File

@ -960,7 +960,7 @@ pub(super) fn infer_assignee_expr(&mut self, lhs: ExprId, rhs_ty: &Ty) -> Ty {
Expr::RecordLit { path, fields, .. } => { Expr::RecordLit { path, fields, .. } => {
let subs = fields.iter().map(|f| (f.name.clone(), f.expr)); let subs = fields.iter().map(|f| (f.name.clone(), f.expr));
self.infer_record_pat_like(path.as_deref(), &rhs_ty, (), lhs.into(), subs) self.infer_record_pat_like(path.as_deref(), &rhs_ty, (), lhs, subs)
} }
Expr::Underscore => rhs_ty.clone(), Expr::Underscore => rhs_ty.clone(),
_ => { _ => {

View File

@ -220,7 +220,7 @@ pub(super) fn infer_pat(
), ),
Pat::Record { path: p, args: fields, ellipsis: _ } => { Pat::Record { path: p, args: fields, ellipsis: _ } => {
let subs = fields.iter().map(|f| (f.name.clone(), f.pat)); let subs = fields.iter().map(|f| (f.name.clone(), f.pat));
self.infer_record_pat_like(p.as_deref(), &expected, default_bm, pat.into(), subs) self.infer_record_pat_like(p.as_deref(), &expected, default_bm, pat, subs)
} }
Pat::Path(path) => { Pat::Path(path) => {
// FIXME use correct resolver for the surrounding expression // FIXME use correct resolver for the surrounding expression

View File

@ -29,7 +29,6 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Layout, LayoutError> {
let scope = &def_map[module_id.local_id].scope; let scope = &def_map[module_id.local_id].scope;
let adt_id = scope let adt_id = scope
.declarations() .declarations()
.into_iter()
.find_map(|x| match x { .find_map(|x| match x {
hir_def::ModuleDefId::AdtId(x) => { hir_def::ModuleDefId::AdtId(x) => {
let name = match x { let name = match x {

View File

@ -608,7 +608,7 @@ pub fn declarations(self, db: &dyn HirDatabase) -> Vec<ModuleDef> {
pub fn legacy_macros(self, db: &dyn HirDatabase) -> Vec<Macro> { pub fn legacy_macros(self, db: &dyn HirDatabase) -> Vec<Macro> {
let def_map = self.id.def_map(db.upcast()); let def_map = self.id.def_map(db.upcast());
let scope = &def_map[self.id.local_id].scope; let scope = &def_map[self.id.local_id].scope;
scope.legacy_macros().flat_map(|(_, it)| it).map(|&it| MacroId::from(it).into()).collect() scope.legacy_macros().flat_map(|(_, it)| it).map(|&it| it.into()).collect()
} }
pub fn impl_defs(self, db: &dyn HirDatabase) -> Vec<Impl> { pub fn impl_defs(self, db: &dyn HirDatabase) -> Vec<Impl> {
@ -2411,7 +2411,7 @@ pub struct DeriveHelper {
impl DeriveHelper { impl DeriveHelper {
pub fn derive(&self) -> Macro { pub fn derive(&self) -> Macro {
Macro { id: self.derive.into() } Macro { id: self.derive }
} }
pub fn name(&self, db: &dyn HirDatabase) -> Name { pub fn name(&self, db: &dyn HirDatabase) -> Name {
@ -2781,7 +2781,7 @@ pub fn all_for_type(db: &dyn HirDatabase, Type { ty, env }: Type) -> Vec<Impl> {
pub fn all_for_trait(db: &dyn HirDatabase, trait_: Trait) -> Vec<Impl> { pub fn all_for_trait(db: &dyn HirDatabase, trait_: Trait) -> Vec<Impl> {
let krate = trait_.module(db).krate(); let krate = trait_.module(db).krate();
let mut all = Vec::new(); let mut all = Vec::new();
for Crate { id } in krate.transitive_reverse_dependencies(db).into_iter() { for Crate { id } in krate.transitive_reverse_dependencies(db) {
let impls = db.trait_impls_in_crate(id); let impls = db.trait_impls_in_crate(id);
all.extend(impls.for_trait(trait_.id).map(Self::from)) all.extend(impls.for_trait(trait_.id).map(Self::from))
} }

View File

@ -270,7 +270,7 @@ pub(crate) fn resolve_await_to_poll(
db: &dyn HirDatabase, db: &dyn HirDatabase,
await_expr: &ast::AwaitExpr, await_expr: &ast::AwaitExpr,
) -> Option<FunctionId> { ) -> Option<FunctionId> {
let mut ty = self.ty_of_expr(db, &await_expr.expr()?.into())?.clone(); let mut ty = self.ty_of_expr(db, &await_expr.expr()?)?.clone();
let into_future_trait = self let into_future_trait = self
.resolver .resolver
@ -316,7 +316,7 @@ pub(crate) fn resolve_prefix_expr(
ast::UnaryOp::Not => name![not], ast::UnaryOp::Not => name![not],
ast::UnaryOp::Neg => name![neg], ast::UnaryOp::Neg => name![neg],
}; };
let ty = self.ty_of_expr(db, &prefix_expr.expr()?.into())?; let ty = self.ty_of_expr(db, &prefix_expr.expr()?)?;
let (op_trait, op_fn) = self.lang_trait_fn(db, &lang_item_name, &lang_item_name)?; let (op_trait, op_fn) = self.lang_trait_fn(db, &lang_item_name, &lang_item_name)?;
// HACK: subst for all methods coincides with that for their trait because the methods // HACK: subst for all methods coincides with that for their trait because the methods
@ -331,8 +331,8 @@ pub(crate) fn resolve_index_expr(
db: &dyn HirDatabase, db: &dyn HirDatabase,
index_expr: &ast::IndexExpr, index_expr: &ast::IndexExpr,
) -> Option<FunctionId> { ) -> Option<FunctionId> {
let base_ty = self.ty_of_expr(db, &index_expr.base()?.into())?; let base_ty = self.ty_of_expr(db, &index_expr.base()?)?;
let index_ty = self.ty_of_expr(db, &index_expr.index()?.into())?; let index_ty = self.ty_of_expr(db, &index_expr.index()?)?;
let lang_item_name = name![index]; let lang_item_name = name![index];
@ -352,8 +352,8 @@ pub(crate) fn resolve_bin_expr(
binop_expr: &ast::BinExpr, binop_expr: &ast::BinExpr,
) -> Option<FunctionId> { ) -> Option<FunctionId> {
let op = binop_expr.op_kind()?; let op = binop_expr.op_kind()?;
let lhs = self.ty_of_expr(db, &binop_expr.lhs()?.into())?; let lhs = self.ty_of_expr(db, &binop_expr.lhs()?)?;
let rhs = self.ty_of_expr(db, &binop_expr.rhs()?.into())?; let rhs = self.ty_of_expr(db, &binop_expr.rhs()?)?;
let (op_trait, op_fn) = lang_names_for_bin_op(op) let (op_trait, op_fn) = lang_names_for_bin_op(op)
.and_then(|(name, lang_item)| self.lang_trait_fn(db, &lang_item, &name))?; .and_then(|(name, lang_item)| self.lang_trait_fn(db, &lang_item, &name))?;
@ -372,7 +372,7 @@ pub(crate) fn resolve_try_expr(
db: &dyn HirDatabase, db: &dyn HirDatabase,
try_expr: &ast::TryExpr, try_expr: &ast::TryExpr,
) -> Option<FunctionId> { ) -> Option<FunctionId> {
let ty = self.ty_of_expr(db, &try_expr.expr()?.into())?; let ty = self.ty_of_expr(db, &try_expr.expr()?)?;
let op_fn = let op_fn =
db.lang_item(self.resolver.krate(), name![branch].to_smol_str())?.as_function()?; db.lang_item(self.resolver.krate(), name![branch].to_smol_str())?.as_function()?;

View File

@ -53,7 +53,7 @@ pub(crate) fn generate_default_from_new(acc: &mut Assists, ctx: &AssistContext<'
return None; return None;
} }
let impl_ = fn_node.syntax().ancestors().into_iter().find_map(ast::Impl::cast)?; let impl_ = fn_node.syntax().ancestors().find_map(ast::Impl::cast)?;
if is_default_implemented(ctx, &impl_) { if is_default_implemented(ctx, &impl_) {
cov_mark::hit!(default_block_is_already_present); cov_mark::hit!(default_block_is_already_present);
cov_mark::hit!(struct_in_module_with_default); cov_mark::hit!(struct_in_module_with_default);

View File

@ -85,8 +85,7 @@ fn generate_tuple_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()
let strukt = ctx.find_node_at_offset::<ast::Struct>()?; let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
let field = ctx.find_node_at_offset::<ast::TupleField>()?; let field = ctx.find_node_at_offset::<ast::TupleField>()?;
let field_list = ctx.find_node_at_offset::<ast::TupleFieldList>()?; let field_list = ctx.find_node_at_offset::<ast::TupleFieldList>()?;
let field_list_index = let field_list_index = field_list.syntax().children().position(|s| &s == field.syntax())?;
field_list.syntax().children().into_iter().position(|s| &s == field.syntax())?;
let deref_type_to_generate = match existing_deref_impl(&ctx.sema, &strukt) { let deref_type_to_generate = match existing_deref_impl(&ctx.sema, &strukt) {
None => DerefType::Deref, None => DerefType::Deref,

View File

@ -42,7 +42,7 @@ pub(crate) fn replace_turbofish_with_explicit_type(
let r_angle = generic_args.r_angle_token()?; let r_angle = generic_args.r_angle_token()?;
let turbofish_range = TextRange::new(colon2.text_range().start(), r_angle.text_range().end()); let turbofish_range = TextRange::new(colon2.text_range().start(), r_angle.text_range().end());
let turbofish_args: Vec<GenericArg> = generic_args.generic_args().into_iter().collect(); let turbofish_args: Vec<GenericArg> = generic_args.generic_args().collect();
// Find type of ::<_> // Find type of ::<_>
if turbofish_args.len() != 1 { if turbofish_args.len() != 1 {

View File

@ -183,7 +183,7 @@ pub(crate) fn check_edit_with_config(
let ra_fixture_after = trim_indent(ra_fixture_after); let ra_fixture_after = trim_indent(ra_fixture_after);
let (db, position) = position(ra_fixture_before); let (db, position) = position(ra_fixture_before);
let completions: Vec<CompletionItem> = let completions: Vec<CompletionItem> =
crate::completions(&db, &config, position, None).unwrap().into(); crate::completions(&db, &config, position, None).unwrap();
let (completion,) = completions let (completion,) = completions
.iter() .iter()
.filter(|it| it.lookup() == what) .filter(|it| it.lookup() == what)

View File

@ -364,11 +364,8 @@ fn check(new_name: &str, ra_fixture_before: &str, ra_fixture_after: &str) {
} }
Err(err) => { Err(err) => {
if ra_fixture_after.starts_with("error:") { if ra_fixture_after.starts_with("error:") {
let error_message = ra_fixture_after let error_message =
.chars() ra_fixture_after.chars().skip("error:".len()).collect::<String>();
.into_iter()
.skip("error:".len())
.collect::<String>();
assert_eq!(error_message.trim(), err.to_string()); assert_eq!(error_message.trim(), err.to_string());
} else { } else {
panic!("Rename to '{new_name}' failed unexpectedly: {err}") panic!("Rename to '{new_name}' failed unexpectedly: {err}")

View File

@ -145,7 +145,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
} }
if iter.peek_n(0).is_some() { if iter.peek_n(0).is_some() {
res.push(tt::Subtree { delimiter: None, token_trees: iter.into_iter().cloned().collect() }); res.push(tt::Subtree { delimiter: None, token_trees: iter.cloned().collect() });
} }
res res

View File

@ -101,8 +101,7 @@ pub(crate) fn diagnostics_for(
file_id: FileId, file_id: FileId,
) -> impl Iterator<Item = &lsp_types::Diagnostic> { ) -> impl Iterator<Item = &lsp_types::Diagnostic> {
let native = self.native.get(&file_id).into_iter().flatten(); let native = self.native.get(&file_id).into_iter().flatten();
let check = let check = self.check.values().filter_map(move |it| it.get(&file_id)).flatten();
self.check.values().filter_map(move |it| it.get(&file_id)).into_iter().flatten();
native.chain(check) native.chain(check)
} }

View File

@ -1101,9 +1101,7 @@ pub(crate) fn handle_code_action(
} }
// Fixes from `cargo check`. // Fixes from `cargo check`.
for fix in for fix in snap.check_fixes.values().filter_map(|it| it.get(&frange.file_id)).flatten() {
snap.check_fixes.values().filter_map(|it| it.get(&frange.file_id)).into_iter().flatten()
{
// FIXME: this mapping is awkward and shouldn't exist. Refactor // FIXME: this mapping is awkward and shouldn't exist. Refactor
// `snap.check_fixes` to not convert to LSP prematurely. // `snap.check_fixes` to not convert to LSP prematurely.
let intersect_fix_range = fix let intersect_fix_range = fix