remove useless conversions
This commit is contained in:
parent
cc80c5bd07
commit
efd2c20e96
@ -67,7 +67,7 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, mut def_map: DefMap, tree_id: T
|
||||
let dep_def_map = db.crate_def_map(dep.crate_id);
|
||||
let dep_root = dep_def_map.module_id(dep_def_map.root);
|
||||
|
||||
deps.insert(dep.as_name(), dep_root.into());
|
||||
deps.insert(dep.as_name(), dep_root);
|
||||
|
||||
if dep.is_prelude() && !tree_id.is_block() {
|
||||
def_map.extern_prelude.insert(dep.as_name(), dep_root);
|
||||
@ -2085,7 +2085,7 @@ fn collect_macro_call(&mut self, mac: &MacroCall, container: ItemContainerId) {
|
||||
.scope
|
||||
.get_legacy_macro(name)
|
||||
.and_then(|it| it.last())
|
||||
.map(|&it| macro_id_to_def_id(self.def_collector.db, it.into()))
|
||||
.map(|&it| macro_id_to_def_id(self.def_collector.db, it))
|
||||
},
|
||||
)
|
||||
})
|
||||
|
@ -390,7 +390,7 @@ fn resolve_name_in_module(
|
||||
.get_legacy_macro(name)
|
||||
// FIXME: shadowing
|
||||
.and_then(|it| it.last())
|
||||
.map_or_else(PerNs::none, |&m| PerNs::macros(m.into(), Visibility::Public));
|
||||
.map_or_else(PerNs::none, |&m| PerNs::macros(m, Visibility::Public));
|
||||
let from_scope = self[module].scope.get(name);
|
||||
let from_builtin = match self.block {
|
||||
Some(_) => {
|
||||
|
@ -381,7 +381,7 @@ pub fn names_in_scope(
|
||||
});
|
||||
def_map[module_id].scope.legacy_macros().for_each(|(name, macs)| {
|
||||
macs.iter().for_each(|&mac| {
|
||||
res.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(MacroId::from(mac))));
|
||||
res.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(mac)));
|
||||
})
|
||||
});
|
||||
def_map.extern_prelude().for_each(|(name, &def)| {
|
||||
@ -517,10 +517,7 @@ fn process_names(&self, acc: &mut ScopeNames, db: &dyn DefDatabase) {
|
||||
});
|
||||
m.def_map[m.module_id].scope.legacy_macros().for_each(|(name, macs)| {
|
||||
macs.iter().for_each(|&mac| {
|
||||
acc.add(
|
||||
name,
|
||||
ScopeDef::ModuleDef(ModuleDefId::MacroId(MacroId::from(mac))),
|
||||
);
|
||||
acc.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(mac)));
|
||||
})
|
||||
});
|
||||
}
|
||||
|
@ -25,7 +25,6 @@ fn eval_goal(ra_fixture: &str) -> Result<ComputedExpr, ConstEvalError> {
|
||||
let scope = &def_map[module_id.local_id].scope;
|
||||
let const_id = scope
|
||||
.declarations()
|
||||
.into_iter()
|
||||
.find_map(|x| match x {
|
||||
hir_def::ModuleDefId::ConstId(x) => {
|
||||
if db.const_data(x).name.as_ref()?.to_string() == "GOAL" {
|
||||
|
@ -960,7 +960,7 @@ pub(super) fn infer_assignee_expr(&mut self, lhs: ExprId, rhs_ty: &Ty) -> Ty {
|
||||
Expr::RecordLit { path, fields, .. } => {
|
||||
let subs = fields.iter().map(|f| (f.name.clone(), f.expr));
|
||||
|
||||
self.infer_record_pat_like(path.as_deref(), &rhs_ty, (), lhs.into(), subs)
|
||||
self.infer_record_pat_like(path.as_deref(), &rhs_ty, (), lhs, subs)
|
||||
}
|
||||
Expr::Underscore => rhs_ty.clone(),
|
||||
_ => {
|
||||
|
@ -220,7 +220,7 @@ pub(super) fn infer_pat(
|
||||
),
|
||||
Pat::Record { path: p, args: fields, ellipsis: _ } => {
|
||||
let subs = fields.iter().map(|f| (f.name.clone(), f.pat));
|
||||
self.infer_record_pat_like(p.as_deref(), &expected, default_bm, pat.into(), subs)
|
||||
self.infer_record_pat_like(p.as_deref(), &expected, default_bm, pat, subs)
|
||||
}
|
||||
Pat::Path(path) => {
|
||||
// FIXME use correct resolver for the surrounding expression
|
||||
|
@ -29,7 +29,6 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Layout, LayoutError> {
|
||||
let scope = &def_map[module_id.local_id].scope;
|
||||
let adt_id = scope
|
||||
.declarations()
|
||||
.into_iter()
|
||||
.find_map(|x| match x {
|
||||
hir_def::ModuleDefId::AdtId(x) => {
|
||||
let name = match x {
|
||||
|
@ -608,7 +608,7 @@ pub fn declarations(self, db: &dyn HirDatabase) -> Vec<ModuleDef> {
|
||||
pub fn legacy_macros(self, db: &dyn HirDatabase) -> Vec<Macro> {
|
||||
let def_map = self.id.def_map(db.upcast());
|
||||
let scope = &def_map[self.id.local_id].scope;
|
||||
scope.legacy_macros().flat_map(|(_, it)| it).map(|&it| MacroId::from(it).into()).collect()
|
||||
scope.legacy_macros().flat_map(|(_, it)| it).map(|&it| it.into()).collect()
|
||||
}
|
||||
|
||||
pub fn impl_defs(self, db: &dyn HirDatabase) -> Vec<Impl> {
|
||||
@ -2411,7 +2411,7 @@ pub struct DeriveHelper {
|
||||
|
||||
impl DeriveHelper {
|
||||
pub fn derive(&self) -> Macro {
|
||||
Macro { id: self.derive.into() }
|
||||
Macro { id: self.derive }
|
||||
}
|
||||
|
||||
pub fn name(&self, db: &dyn HirDatabase) -> Name {
|
||||
@ -2781,7 +2781,7 @@ pub fn all_for_type(db: &dyn HirDatabase, Type { ty, env }: Type) -> Vec<Impl> {
|
||||
pub fn all_for_trait(db: &dyn HirDatabase, trait_: Trait) -> Vec<Impl> {
|
||||
let krate = trait_.module(db).krate();
|
||||
let mut all = Vec::new();
|
||||
for Crate { id } in krate.transitive_reverse_dependencies(db).into_iter() {
|
||||
for Crate { id } in krate.transitive_reverse_dependencies(db) {
|
||||
let impls = db.trait_impls_in_crate(id);
|
||||
all.extend(impls.for_trait(trait_.id).map(Self::from))
|
||||
}
|
||||
|
@ -270,7 +270,7 @@ pub(crate) fn resolve_await_to_poll(
|
||||
db: &dyn HirDatabase,
|
||||
await_expr: &ast::AwaitExpr,
|
||||
) -> Option<FunctionId> {
|
||||
let mut ty = self.ty_of_expr(db, &await_expr.expr()?.into())?.clone();
|
||||
let mut ty = self.ty_of_expr(db, &await_expr.expr()?)?.clone();
|
||||
|
||||
let into_future_trait = self
|
||||
.resolver
|
||||
@ -316,7 +316,7 @@ pub(crate) fn resolve_prefix_expr(
|
||||
ast::UnaryOp::Not => name![not],
|
||||
ast::UnaryOp::Neg => name![neg],
|
||||
};
|
||||
let ty = self.ty_of_expr(db, &prefix_expr.expr()?.into())?;
|
||||
let ty = self.ty_of_expr(db, &prefix_expr.expr()?)?;
|
||||
|
||||
let (op_trait, op_fn) = self.lang_trait_fn(db, &lang_item_name, &lang_item_name)?;
|
||||
// HACK: subst for all methods coincides with that for their trait because the methods
|
||||
@ -331,8 +331,8 @@ pub(crate) fn resolve_index_expr(
|
||||
db: &dyn HirDatabase,
|
||||
index_expr: &ast::IndexExpr,
|
||||
) -> Option<FunctionId> {
|
||||
let base_ty = self.ty_of_expr(db, &index_expr.base()?.into())?;
|
||||
let index_ty = self.ty_of_expr(db, &index_expr.index()?.into())?;
|
||||
let base_ty = self.ty_of_expr(db, &index_expr.base()?)?;
|
||||
let index_ty = self.ty_of_expr(db, &index_expr.index()?)?;
|
||||
|
||||
let lang_item_name = name![index];
|
||||
|
||||
@ -352,8 +352,8 @@ pub(crate) fn resolve_bin_expr(
|
||||
binop_expr: &ast::BinExpr,
|
||||
) -> Option<FunctionId> {
|
||||
let op = binop_expr.op_kind()?;
|
||||
let lhs = self.ty_of_expr(db, &binop_expr.lhs()?.into())?;
|
||||
let rhs = self.ty_of_expr(db, &binop_expr.rhs()?.into())?;
|
||||
let lhs = self.ty_of_expr(db, &binop_expr.lhs()?)?;
|
||||
let rhs = self.ty_of_expr(db, &binop_expr.rhs()?)?;
|
||||
|
||||
let (op_trait, op_fn) = lang_names_for_bin_op(op)
|
||||
.and_then(|(name, lang_item)| self.lang_trait_fn(db, &lang_item, &name))?;
|
||||
@ -372,7 +372,7 @@ pub(crate) fn resolve_try_expr(
|
||||
db: &dyn HirDatabase,
|
||||
try_expr: &ast::TryExpr,
|
||||
) -> Option<FunctionId> {
|
||||
let ty = self.ty_of_expr(db, &try_expr.expr()?.into())?;
|
||||
let ty = self.ty_of_expr(db, &try_expr.expr()?)?;
|
||||
|
||||
let op_fn =
|
||||
db.lang_item(self.resolver.krate(), name![branch].to_smol_str())?.as_function()?;
|
||||
|
@ -53,7 +53,7 @@ pub(crate) fn generate_default_from_new(acc: &mut Assists, ctx: &AssistContext<'
|
||||
return None;
|
||||
}
|
||||
|
||||
let impl_ = fn_node.syntax().ancestors().into_iter().find_map(ast::Impl::cast)?;
|
||||
let impl_ = fn_node.syntax().ancestors().find_map(ast::Impl::cast)?;
|
||||
if is_default_implemented(ctx, &impl_) {
|
||||
cov_mark::hit!(default_block_is_already_present);
|
||||
cov_mark::hit!(struct_in_module_with_default);
|
||||
|
@ -85,8 +85,7 @@ fn generate_tuple_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()
|
||||
let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
|
||||
let field = ctx.find_node_at_offset::<ast::TupleField>()?;
|
||||
let field_list = ctx.find_node_at_offset::<ast::TupleFieldList>()?;
|
||||
let field_list_index =
|
||||
field_list.syntax().children().into_iter().position(|s| &s == field.syntax())?;
|
||||
let field_list_index = field_list.syntax().children().position(|s| &s == field.syntax())?;
|
||||
|
||||
let deref_type_to_generate = match existing_deref_impl(&ctx.sema, &strukt) {
|
||||
None => DerefType::Deref,
|
||||
|
@ -42,7 +42,7 @@ pub(crate) fn replace_turbofish_with_explicit_type(
|
||||
let r_angle = generic_args.r_angle_token()?;
|
||||
let turbofish_range = TextRange::new(colon2.text_range().start(), r_angle.text_range().end());
|
||||
|
||||
let turbofish_args: Vec<GenericArg> = generic_args.generic_args().into_iter().collect();
|
||||
let turbofish_args: Vec<GenericArg> = generic_args.generic_args().collect();
|
||||
|
||||
// Find type of ::<_>
|
||||
if turbofish_args.len() != 1 {
|
||||
|
@ -183,7 +183,7 @@ pub(crate) fn check_edit_with_config(
|
||||
let ra_fixture_after = trim_indent(ra_fixture_after);
|
||||
let (db, position) = position(ra_fixture_before);
|
||||
let completions: Vec<CompletionItem> =
|
||||
crate::completions(&db, &config, position, None).unwrap().into();
|
||||
crate::completions(&db, &config, position, None).unwrap();
|
||||
let (completion,) = completions
|
||||
.iter()
|
||||
.filter(|it| it.lookup() == what)
|
||||
|
@ -364,11 +364,8 @@ fn check(new_name: &str, ra_fixture_before: &str, ra_fixture_after: &str) {
|
||||
}
|
||||
Err(err) => {
|
||||
if ra_fixture_after.starts_with("error:") {
|
||||
let error_message = ra_fixture_after
|
||||
.chars()
|
||||
.into_iter()
|
||||
.skip("error:".len())
|
||||
.collect::<String>();
|
||||
let error_message =
|
||||
ra_fixture_after.chars().skip("error:".len()).collect::<String>();
|
||||
assert_eq!(error_message.trim(), err.to_string());
|
||||
} else {
|
||||
panic!("Rename to '{new_name}' failed unexpectedly: {err}")
|
||||
|
@ -145,7 +145,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
|
||||
}
|
||||
|
||||
if iter.peek_n(0).is_some() {
|
||||
res.push(tt::Subtree { delimiter: None, token_trees: iter.into_iter().cloned().collect() });
|
||||
res.push(tt::Subtree { delimiter: None, token_trees: iter.cloned().collect() });
|
||||
}
|
||||
|
||||
res
|
||||
|
@ -101,8 +101,7 @@ pub(crate) fn diagnostics_for(
|
||||
file_id: FileId,
|
||||
) -> impl Iterator<Item = &lsp_types::Diagnostic> {
|
||||
let native = self.native.get(&file_id).into_iter().flatten();
|
||||
let check =
|
||||
self.check.values().filter_map(move |it| it.get(&file_id)).into_iter().flatten();
|
||||
let check = self.check.values().filter_map(move |it| it.get(&file_id)).flatten();
|
||||
native.chain(check)
|
||||
}
|
||||
|
||||
|
@ -1101,9 +1101,7 @@ pub(crate) fn handle_code_action(
|
||||
}
|
||||
|
||||
// Fixes from `cargo check`.
|
||||
for fix in
|
||||
snap.check_fixes.values().filter_map(|it| it.get(&frange.file_id)).into_iter().flatten()
|
||||
{
|
||||
for fix in snap.check_fixes.values().filter_map(|it| it.get(&frange.file_id)).flatten() {
|
||||
// FIXME: this mapping is awkward and shouldn't exist. Refactor
|
||||
// `snap.check_fixes` to not convert to LSP prematurely.
|
||||
let intersect_fix_range = fix
|
||||
|
Loading…
Reference in New Issue
Block a user