Remove redundant clone()
This commit is contained in:
parent
7e0fa71532
commit
9c45a9e586
@ -8,7 +8,7 @@ pub(crate) fn merge_match_arms(mut ctx: AssistCtx<impl HirDatabase>) -> Option<A
|
||||
// We check if the following match arm matches this one. We could, but don't,
|
||||
// compare to the previous match arm as well.
|
||||
let next = current_arm.syntax().next_sibling();
|
||||
let next_arm = MatchArm::cast(next?.clone())?;
|
||||
let next_arm = MatchArm::cast(next?)?;
|
||||
|
||||
// Don't try to handle arms with guards for now - can add support for this later
|
||||
if current_arm.guard().is_some() || next_arm.guard().is_some() {
|
||||
|
@ -93,7 +93,7 @@ fn main() -> Result<()> {
|
||||
(true, true) => Err("Invalid flags: -q conflicts with -v")?,
|
||||
};
|
||||
let memory_usage = matches.contains("--memory-usage");
|
||||
let only = matches.value_from_str(["-o", "--only"])?.map(|v: String| v.to_owned());
|
||||
let only: Option<String> = matches.value_from_str(["-o", "--only"])?;
|
||||
let path = {
|
||||
let mut trailing = matches.free()?;
|
||||
if trailing.len() != 1 {
|
||||
|
@ -56,8 +56,7 @@ impl EnumVariant {
|
||||
.zip(db.enum_data(self.parent).variants.iter())
|
||||
.find(|(_syntax, (id, _))| *id == self.id)
|
||||
.unwrap()
|
||||
.0
|
||||
.to_owned();
|
||||
.0;
|
||||
Source { file_id: src.file_id, ast }
|
||||
}
|
||||
pub(crate) fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> {
|
||||
@ -203,12 +202,8 @@ impl StructField {
|
||||
};
|
||||
|
||||
let field_sources = match struct_kind {
|
||||
ast::StructKind::Tuple(fl) => {
|
||||
fl.fields().map(|it| FieldSource::Pos(it.to_owned())).collect()
|
||||
}
|
||||
ast::StructKind::Named(fl) => {
|
||||
fl.fields().map(|it| FieldSource::Named(it.to_owned())).collect()
|
||||
}
|
||||
ast::StructKind::Tuple(fl) => fl.fields().map(|it| FieldSource::Pos(it)).collect(),
|
||||
ast::StructKind::Named(fl) => fl.fields().map(|it| FieldSource::Named(it)).collect(),
|
||||
ast::StructKind::Unit => Vec::new(),
|
||||
};
|
||||
let ast = field_sources
|
||||
|
@ -161,7 +161,7 @@ impl ModuleSource {
|
||||
) -> ModuleSource {
|
||||
match (file_id, decl_id) {
|
||||
(Some(file_id), _) => {
|
||||
let source_file = db.parse(file_id).tree().to_owned();
|
||||
let source_file = db.parse(file_id).tree();
|
||||
ModuleSource::SourceFile(source_file)
|
||||
}
|
||||
(None, Some(item_id)) => {
|
||||
|
@ -137,7 +137,7 @@ impl ModuleSource {
|
||||
match &find_node_at_offset::<ast::Module>(parse.tree().syntax(), position.offset) {
|
||||
Some(m) if !m.has_semi() => ModuleSource::Module(m.clone()),
|
||||
_ => {
|
||||
let source_file = parse.tree().to_owned();
|
||||
let source_file = parse.tree();
|
||||
ModuleSource::SourceFile(source_file)
|
||||
}
|
||||
}
|
||||
@ -149,15 +149,15 @@ impl ModuleSource {
|
||||
child: &SyntaxNode,
|
||||
) -> ModuleSource {
|
||||
if let Some(m) = child.ancestors().filter_map(ast::Module::cast).find(|it| !it.has_semi()) {
|
||||
ModuleSource::Module(m.clone())
|
||||
ModuleSource::Module(m)
|
||||
} else {
|
||||
let source_file = db.parse(file_id).tree().to_owned();
|
||||
let source_file = db.parse(file_id).tree();
|
||||
ModuleSource::SourceFile(source_file)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_file_id(db: &(impl DefDatabase + AstDatabase), file_id: FileId) -> ModuleSource {
|
||||
let source_file = db.parse(file_id).tree().to_owned();
|
||||
let source_file = db.parse(file_id).tree();
|
||||
ModuleSource::SourceFile(source_file)
|
||||
}
|
||||
}
|
||||
|
@ -166,7 +166,7 @@ where
|
||||
// In Rust, `#[macro_export]` macros are unconditionally visible at the
|
||||
// crate root, even if the parent modules is **not** visible.
|
||||
if export {
|
||||
self.update(self.def_map.root, None, &[(name.clone(), Resolution::from_macro(macro_))]);
|
||||
self.update(self.def_map.root, None, &[(name, Resolution::from_macro(macro_))]);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -36,10 +36,7 @@ type ImportSource = Either<ast::UseTree, ast::ExternCrateItem>;
|
||||
|
||||
impl ImportSourcePtr {
|
||||
fn to_node(self, file: &SourceFile) -> ImportSource {
|
||||
self.map(
|
||||
|ptr| ptr.to_node(file.syntax()).to_owned(),
|
||||
|ptr| ptr.to_node(file.syntax()).to_owned(),
|
||||
)
|
||||
self.map(|ptr| ptr.to_node(file.syntax()), |ptr| ptr.to_node(file.syntax()))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -73,7 +73,7 @@ fn def_with_body_from_child_node(
|
||||
if let Some(def) = ast::ConstDef::cast(node.clone()) {
|
||||
return Some(Const { id: ctx.to_def(&def) }.into());
|
||||
}
|
||||
if let Some(def) = ast::StaticDef::cast(node.clone()) {
|
||||
if let Some(def) = ast::StaticDef::cast(node) {
|
||||
return Some(Static { id: ctx.to_def(&def) }.into());
|
||||
}
|
||||
None
|
||||
|
@ -436,7 +436,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||
|
||||
fn normalize_projection_ty(&mut self, proj_ty: ProjectionTy) -> Ty {
|
||||
let var = self.new_type_var();
|
||||
let predicate = ProjectionPredicate { projection_ty: proj_ty.clone(), ty: var.clone() };
|
||||
let predicate = ProjectionPredicate { projection_ty: proj_ty, ty: var.clone() };
|
||||
let obligation = Obligation::Projection(predicate);
|
||||
self.obligations.push(obligation);
|
||||
var
|
||||
@ -953,7 +953,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||
arm_tys.push(self.infer_expr_inner(arm.expr, &expected));
|
||||
}
|
||||
|
||||
let lub_ty = calculate_least_upper_bound(expected.ty.clone(), &arm_tys);
|
||||
let lub_ty = calculate_least_upper_bound(expected.ty, &arm_tys);
|
||||
|
||||
for arm_ty in &arm_tys {
|
||||
self.coerce(arm_ty, &lub_ty);
|
||||
|
@ -290,7 +290,7 @@ pub(crate) fn implements_trait(
|
||||
return true;
|
||||
}
|
||||
let env = lower::trait_env(db, resolver);
|
||||
let goal = generic_implements_goal(db, env.clone(), trait_, ty.clone());
|
||||
let goal = generic_implements_goal(db, env, trait_, ty.clone());
|
||||
let solution = db.trait_solve(krate, goal);
|
||||
|
||||
solution.is_some()
|
||||
|
@ -94,7 +94,7 @@ impl<'a> CompletionContext<'a> {
|
||||
// actual completion.
|
||||
let file = {
|
||||
let edit = AtomTextEdit::insert(offset, "intellijRulezz".to_string());
|
||||
original_parse.reparse(&edit).tree().to_owned()
|
||||
original_parse.reparse(&edit).tree()
|
||||
};
|
||||
|
||||
// First, let's try to complete a reference to some declaration.
|
||||
|
@ -76,7 +76,7 @@ impl Completions {
|
||||
None,
|
||||
),
|
||||
ScopeDef::MacroDef(mac) => {
|
||||
self.add_macro(ctx, Some(local_name.clone()), *mac);
|
||||
self.add_macro(ctx, Some(local_name), *mac);
|
||||
return;
|
||||
}
|
||||
ScopeDef::Unknown => {
|
||||
|
@ -86,7 +86,7 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
|
||||
fix: Some(fix),
|
||||
})
|
||||
});
|
||||
let source_file = db.parse(file_id).tree().to_owned();
|
||||
let source_file = db.parse(file_id).tree();
|
||||
let src =
|
||||
hir::Source { file_id: file_id.into(), ast: hir::ModuleSource::SourceFile(source_file) };
|
||||
if let Some(m) = hir::Module::from_definition(db, src) {
|
||||
|
@ -304,7 +304,7 @@ impl NavigationTarget {
|
||||
|
||||
pub(crate) fn docs_from_symbol(db: &RootDatabase, symbol: &FileSymbol) -> Option<String> {
|
||||
let parse = db.parse(symbol.file_id);
|
||||
let node = symbol.ptr.to_node(parse.tree().syntax()).to_owned();
|
||||
let node = symbol.ptr.to_node(parse.tree().syntax());
|
||||
|
||||
visitor()
|
||||
.visit(|it: ast::FnDef| it.doc_comment_text())
|
||||
@ -326,7 +326,7 @@ pub(crate) fn docs_from_symbol(db: &RootDatabase, symbol: &FileSymbol) -> Option
|
||||
/// e.g. `struct Name`, `enum Name`, `fn Name`
|
||||
pub(crate) fn description_from_symbol(db: &RootDatabase, symbol: &FileSymbol) -> Option<String> {
|
||||
let parse = db.parse(symbol.file_id);
|
||||
let node = symbol.ptr.to_node(parse.tree().syntax()).to_owned();
|
||||
let node = symbol.ptr.to_node(parse.tree().syntax());
|
||||
|
||||
visitor()
|
||||
.visit(|node: ast::FnDef| node.short_label())
|
||||
|
Loading…
x
Reference in New Issue
Block a user