clippy::redundant_clone fixes

This commit is contained in:
Laurențiu Nicola 2021-05-26 18:34:50 +03:00
parent 8b049ec393
commit 8206939fed
7 changed files with 22 additions and 27 deletions

View File

@ -197,7 +197,7 @@ fn eager_macro_recur(
macro_resolver: &dyn Fn(ast::Path) -> Option<MacroDefId>, macro_resolver: &dyn Fn(ast::Path) -> Option<MacroDefId>,
mut diagnostic_sink: &mut dyn FnMut(mbe::ExpandError), mut diagnostic_sink: &mut dyn FnMut(mbe::ExpandError),
) -> Result<SyntaxNode, ErrorEmitted> { ) -> Result<SyntaxNode, ErrorEmitted> {
let original = curr.value.clone().clone_for_update(); let original = curr.value.clone_for_update();
let children = original.descendants().filter_map(ast::MacroCall::cast); let children = original.descendants().filter_map(ast::MacroCall::cast);
let mut replacements = Vec::new(); let mut replacements = Vec::new();

View File

@ -76,17 +76,17 @@ pub(super) fn coerce_merge_branch(&mut self, id: Option<ExprId>, ty1: &Ty, ty2:
// way around first would mean we make the type variable `!`, instead of // way around first would mean we make the type variable `!`, instead of
// just marking it as possibly diverging. // just marking it as possibly diverging.
if self.coerce(&ty2, &ty1) { if self.coerce(&ty2, &ty1) {
ty1.clone() ty1
} else if self.coerce(&ty1, &ty2) { } else if self.coerce(&ty1, &ty2) {
ty2.clone() ty2
} else { } else {
if let Some(id) = id { if let Some(id) = id {
self.result self.result
.type_mismatches .type_mismatches
.insert(id.into(), TypeMismatch { expected: ty1.clone(), actual: ty2.clone() }); .insert(id.into(), TypeMismatch { expected: ty1.clone(), actual: ty2 });
} }
cov_mark::hit!(coerce_merge_fail_fallback); cov_mark::hit!(coerce_merge_fail_fallback);
ty1.clone() ty1
} }
} }
@ -183,7 +183,7 @@ fn coerce_ref(&mut self, from_ty: Ty, to_ty: &Ty, to_mt: Mutability) -> InferRes
// details of coercion errors though, so I think it's useful to leave // details of coercion errors though, so I think it's useful to leave
// the structure like it is. // the structure like it is.
let canonicalized = self.canonicalize(from_ty.clone()); let canonicalized = self.canonicalize(from_ty);
let autoderef = autoderef::autoderef( let autoderef = autoderef::autoderef(
self.db, self.db,
self.resolver.krate(), self.resolver.krate(),
@ -389,7 +389,7 @@ fn try_coerce_unsized(&mut self, from_ty: &Ty, to_ty: &Ty) -> InferResult {
// The CoerceUnsized trait should have two generic params: Self and T. // The CoerceUnsized trait should have two generic params: Self and T.
return Err(TypeError); return Err(TypeError);
} }
b.push(coerce_from.clone()).push(to_ty.clone()).build() b.push(coerce_from).push(to_ty.clone()).build()
}; };
let goal: InEnvironment<DomainGoal> = let goal: InEnvironment<DomainGoal> =

View File

@ -44,7 +44,7 @@ pub(super) fn infer_expr(&mut self, tgt_expr: ExprId, expected: &Expectation) ->
if !could_unify { if !could_unify {
self.result.type_mismatches.insert( self.result.type_mismatches.insert(
tgt_expr.into(), tgt_expr.into(),
TypeMismatch { expected: expected_ty.clone(), actual: ty.clone() }, TypeMismatch { expected: expected_ty, actual: ty.clone() },
); );
} }
} }
@ -57,15 +57,14 @@ pub(super) fn infer_expr_coerce(&mut self, expr: ExprId, expected: &Expectation)
let ty = self.infer_expr_inner(expr, &expected); let ty = self.infer_expr_inner(expr, &expected);
let ty = if let Some(target) = expected.only_has_type(&mut self.table) { let ty = if let Some(target) = expected.only_has_type(&mut self.table) {
if !self.coerce(&ty, &target) { if !self.coerce(&ty, &target) {
self.result.type_mismatches.insert( self.result
expr.into(), .type_mismatches
TypeMismatch { expected: target.clone(), actual: ty.clone() }, .insert(expr.into(), TypeMismatch { expected: target, actual: ty.clone() });
);
// Return actual type when type mismatch. // Return actual type when type mismatch.
// This is needed for diagnostic when return type mismatch. // This is needed for diagnostic when return type mismatch.
ty ty
} else { } else {
target.clone() target
} }
} else { } else {
ty ty

View File

@ -196,7 +196,7 @@ pub(super) fn infer_pat(
let inner_ty = if let Some(subpat) = subpat { let inner_ty = if let Some(subpat) = subpat {
self.infer_pat(*subpat, &expected, default_bm) self.infer_pat(*subpat, &expected, default_bm)
} else { } else {
expected.clone() expected
}; };
let inner_ty = self.insert_type_vars_shallow(inner_ty); let inner_ty = self.insert_type_vars_shallow(inner_ty);
@ -266,10 +266,9 @@ pub(super) fn infer_pat(
// use a new type variable if we got error type here // use a new type variable if we got error type here
let ty = self.insert_type_vars_shallow(ty); let ty = self.insert_type_vars_shallow(ty);
if !self.unify(&ty, &expected) { if !self.unify(&ty, &expected) {
self.result.type_mismatches.insert( self.result
pat.into(), .type_mismatches
TypeMismatch { expected: expected.clone(), actual: ty.clone() }, .insert(pat.into(), TypeMismatch { expected: expected, actual: ty.clone() });
);
} }
self.write_pat_ty(pat, ty.clone()); self.write_pat_ty(pat, ty.clone());
ty ty

View File

@ -55,7 +55,7 @@ pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext) -> Opti
let refs_in_target = find_refs_in_mod(ctx, target_module, Some(current_module))?; let refs_in_target = find_refs_in_mod(ctx, target_module, Some(current_module))?;
let imported_defs = find_imported_defs(ctx, star)?; let imported_defs = find_imported_defs(ctx, star)?;
let target = parent.clone().either(|n| n.syntax().clone(), |n| n.syntax().clone()); let target = parent.either(|n| n.syntax().clone(), |n| n.syntax().clone());
acc.add( acc.add(
AssistId("expand_glob_import", AssistKind::RefactorRewrite), AssistId("expand_glob_import", AssistKind::RefactorRewrite),
"Expand glob import", "Expand glob import",

View File

@ -214,7 +214,7 @@ fn collect(
acc acc
}; };
let package_build_data = let package_build_data =
res.per_package.entry(package_id.repr.clone()).or_default(); res.per_package.entry(package_id.repr).or_default();
// cargo_metadata crate returns default (empty) path for // cargo_metadata crate returns default (empty) path for
// older cargos, which is not absolute, so work around that. // older cargos, which is not absolute, so work around that.
if !out_dir.as_str().is_empty() { if !out_dir.as_str().is_empty() {
@ -237,13 +237,13 @@ fn collect(
{ {
let filename = AbsPathBuf::assert(PathBuf::from(&filename)); let filename = AbsPathBuf::assert(PathBuf::from(&filename));
let package_build_data = let package_build_data =
res.per_package.entry(package_id.repr.clone()).or_default(); res.per_package.entry(package_id.repr).or_default();
package_build_data.proc_macro_dylib_path = Some(filename); package_build_data.proc_macro_dylib_path = Some(filename);
} }
} }
} }
Message::CompilerMessage(message) => { Message::CompilerMessage(message) => {
progress(message.target.name.clone()); progress(message.target.name);
} }
Message::BuildFinished(_) => {} Message::BuildFinished(_) => {}
Message::TextLine(_) => {} Message::TextLine(_) => {}

View File

@ -346,11 +346,8 @@ pub fn from_cargo_metadata(
let workspace_root = let workspace_root =
AbsPathBuf::assert(PathBuf::from(meta.workspace_root.into_os_string())); AbsPathBuf::assert(PathBuf::from(meta.workspace_root.into_os_string()));
let build_data_config = BuildDataConfig::new( let build_data_config =
cargo_toml.to_path_buf(), BuildDataConfig::new(cargo_toml.to_path_buf(), config.clone(), Arc::new(meta.packages));
config.clone(),
Arc::new(meta.packages.clone()),
);
Ok(CargoWorkspace { packages, targets, workspace_root, build_data_config }) Ok(CargoWorkspace { packages, targets, workspace_root, build_data_config })
} }