Auto merge of #16294 - matthiaskrgr:less_alloc, r=Veykril

minor: some minor clippy::perf fixes

can be read commit by commit if you want 🤷
This commit is contained in:
bors 2024-01-07 10:02:38 +00:00
commit 1c5fa447d9
34 changed files with 62 additions and 73 deletions

View File

@ -1610,7 +1610,7 @@ impl ExprCollector<'_> {
|name| self.alloc_expr_desugared(Expr::Path(Path::from(name))),
|name, span| {
if let Some(span) = span {
mappings.push((span, name.clone()))
mappings.push((span, name))
}
},
),

View File

@ -1397,7 +1397,7 @@ impl DefCollector<'_> {
always!(krate == loc.def.krate);
DefDiagnostic::unresolved_proc_macro(module_id, loc.kind.clone(), loc.def.krate)
}
_ => DefDiagnostic::macro_error(module_id, loc.kind.clone(), err.to_string()),
_ => DefDiagnostic::macro_error(module_id, loc.kind, err.to_string()),
};
self.def_map.diagnostics.push(diag);

View File

@ -609,7 +609,7 @@ fn render_const_scalar(
}
hir_def::AdtId::EnumId(e) => {
let Some((var_id, var_layout)) =
detect_variant_from_bytes(&layout, f.db, trait_env.clone(), b, e)
detect_variant_from_bytes(&layout, f.db, trait_env, b, e)
else {
return f.write_str("<failed-to-detect-variant>");
};

View File

@ -738,7 +738,7 @@ impl<'a> InferenceContext<'a> {
result.tuple_field_access_types = tuple_field_accesses_rev
.into_iter()
.enumerate()
.map(|(idx, subst)| (TupleId(idx as u32), table.resolve_completely(subst.clone())))
.map(|(idx, subst)| (TupleId(idx as u32), table.resolve_completely(subst)))
.collect();
result
}

View File

@ -130,7 +130,7 @@ impl HirPlace {
ctx.owner.module(ctx.db.upcast()).krate(),
);
}
ty.clone()
ty
}
fn capture_kind_of_truncated_place(
@ -245,7 +245,7 @@ pub(crate) struct CapturedItemWithoutTy {
impl CapturedItemWithoutTy {
fn with_ty(self, ctx: &mut InferenceContext<'_>) -> CapturedItem {
let ty = self.place.ty(ctx).clone();
let ty = self.place.ty(ctx);
let ty = match &self.kind {
CaptureKind::ByValue => ty,
CaptureKind::ByRef(bk) => {
@ -396,7 +396,7 @@ impl InferenceContext<'_> {
fn consume_place(&mut self, place: HirPlace, span: MirSpan) {
if self.is_upvar(&place) {
let ty = place.ty(self).clone();
let ty = place.ty(self);
let kind = if self.is_ty_copy(ty) {
CaptureKind::ByRef(BorrowKind::Shared)
} else {

View File

@ -978,7 +978,7 @@ impl InferenceContext<'_> {
.push(callee_ty.clone())
.push(TyBuilder::tuple_with(params.iter().cloned()))
.build();
self.write_method_resolution(tgt_expr, func, subst.clone());
self.write_method_resolution(tgt_expr, func, subst);
}
}

View File

@ -233,7 +233,6 @@ impl InferenceContext<'_> {
};
let mut expectations_iter = expectations
.iter()
.cloned()
.map(|a| a.assert_ty_ref(Interner).clone())
.chain(repeat_with(|| self.table.new_type_var()));
@ -336,7 +335,7 @@ impl InferenceContext<'_> {
&Pat::Lit(expr) => {
// Don't emit type mismatches again, the expression lowering already did that.
let ty = self.infer_lit_pat(expr, &expected);
self.write_pat_ty(pat, ty.clone());
self.write_pat_ty(pat, ty);
return self.pat_ty_after_adjustment(pat);
}
Pat::Box { inner } => match self.resolve_boxed_box() {

View File

@ -164,7 +164,7 @@ fn layout_of_simd_ty(
};
// Compute the ABI of the element type:
let e_ly = db.layout_of_ty(e_ty, env.clone())?;
let e_ly = db.layout_of_ty(e_ty, env)?;
let Abi::Scalar(e_abi) = e_ly.abi else {
return Err(LayoutError::Unknown);
};
@ -204,17 +204,17 @@ pub fn layout_of_ty_query(
};
let cx = LayoutCx { target: &target };
let dl = &*cx.current_data_layout();
let ty = normalize(db, trait_env.clone(), ty.clone());
let ty = normalize(db, trait_env.clone(), ty);
let result = match ty.kind(Interner) {
TyKind::Adt(AdtId(def), subst) => {
if let hir_def::AdtId::StructId(s) = def {
let data = db.struct_data(*s);
let repr = data.repr.unwrap_or_default();
if repr.simd() {
return layout_of_simd_ty(db, *s, subst, trait_env.clone(), &target);
return layout_of_simd_ty(db, *s, subst, trait_env, &target);
}
};
return db.layout_of_adt(*def, subst.clone(), trait_env.clone());
return db.layout_of_adt(*def, subst.clone(), trait_env);
}
TyKind::Scalar(s) => match s {
chalk_ir::Scalar::Bool => Layout::scalar(
@ -280,7 +280,7 @@ pub fn layout_of_ty_query(
}
TyKind::Array(element, count) => {
let count = try_const_usize(db, &count).ok_or(LayoutError::HasErrorConst)? as u64;
let element = db.layout_of_ty(element.clone(), trait_env.clone())?;
let element = db.layout_of_ty(element.clone(), trait_env)?;
let size = element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow)?;
let abi = if count != 0 && matches!(element.abi, Abi::Uninhabited) {
@ -303,7 +303,7 @@ pub fn layout_of_ty_query(
}
}
TyKind::Slice(element) => {
let element = db.layout_of_ty(element.clone(), trait_env.clone())?;
let element = db.layout_of_ty(element.clone(), trait_env)?;
Layout {
variants: Variants::Single { index: struct_variant_idx() },
fields: FieldsShape::Array { stride: element.size, count: 0 },
@ -345,7 +345,7 @@ pub fn layout_of_ty_query(
}))
.intern(Interner);
}
unsized_part = normalize(db, trait_env.clone(), unsized_part);
unsized_part = normalize(db, trait_env, unsized_part);
let metadata = match unsized_part.kind(Interner) {
TyKind::Slice(_) | TyKind::Str => {
scalar_unit(dl, Primitive::Int(dl.ptr_sized_integer(), false))
@ -384,7 +384,7 @@ pub fn layout_of_ty_query(
match impl_trait_id {
crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => {
let infer = db.infer(func.into());
return db.layout_of_ty(infer.type_of_rpit[idx].clone(), trait_env.clone());
return db.layout_of_ty(infer.type_of_rpit[idx].clone(), trait_env);
}
crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => {
return Err(LayoutError::NotImplemented)

View File

@ -1350,7 +1350,7 @@ pub(crate) fn resolve_indexing_op(
ty: Canonical<Ty>,
index_trait: TraitId,
) -> Option<ReceiverAdjustments> {
let mut table = InferenceTable::new(db, env.clone());
let mut table = InferenceTable::new(db, env);
let ty = table.instantiate_canonical(ty);
let deref_chain = autoderef_method_receiver(&mut table, ty);
for (ty, adj) in deref_chain {

View File

@ -386,7 +386,7 @@ impl MirEvalError {
write!(
f,
"Layout for type `{}` is not available due {err:?}",
ty.display(db).with_closure_style(ClosureStyle::ClosureWithId).to_string()
ty.display(db).with_closure_style(ClosureStyle::ClosureWithId)
)?;
}
MirEvalError::MirLowerError(func, err) => {
@ -1533,7 +1533,7 @@ impl Evaluator<'_> {
}
},
TyKind::Dyn(_) => {
let vtable = self.vtable_map.id(current_ty.clone());
let vtable = self.vtable_map.id(current_ty);
let mut r = Vec::with_capacity(16);
let addr = addr.get(self)?;
r.extend(addr.iter().copied());

View File

@ -177,7 +177,7 @@ impl MirLowerError {
)?;
writeln!(f, "Provided args: [")?;
for g in subst.iter(Interner) {
write!(f, " {},", g.display(db).to_string())?;
write!(f, " {},", g.display(db))?;
}
writeln!(f, "]")?;
}
@ -2070,8 +2070,8 @@ pub fn mir_body_for_closure_query(
prev_projs
.lookup(&store)
.iter()
.cloned()
.skip(it.0.place.projections.len()),
.skip(it.0.place.projections.len())
.cloned(),
);
p.projection = store.intern(next_projs.into());
}

View File

@ -1093,7 +1093,7 @@ impl Field {
pub fn layout(&self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> {
db.layout_of_ty(
self.ty(db).ty.clone(),
self.ty(db).ty,
db.trait_environment(match hir_def::VariantId::from(self.parent) {
hir_def::VariantId::EnumVariantId(id) => GenericDefId::EnumVariantId(id),
hir_def::VariantId::StructId(id) => GenericDefId::AdtId(id.into()),
@ -1854,7 +1854,7 @@ impl DefWithBody {
let local = Local { parent: self.into(), binding_id };
match (need_mut, local.is_mut(db)) {
(mir::MutabilityReason::Unused, _) => {
let should_ignore = matches!(body[binding_id].name.as_str(), Some(it) if it.starts_with("_"));
let should_ignore = matches!(body[binding_id].name.as_str(), Some(it) if it.starts_with('_'));
if !should_ignore {
acc.push(UnusedVariable { local }.into())
}
@ -1879,7 +1879,7 @@ impl DefWithBody {
}
(mir::MutabilityReason::Not, true) => {
if !infer.mutated_bindings_in_closure.contains(&binding_id) {
let should_ignore = matches!(body[binding_id].name.as_str(), Some(it) if it.starts_with("_"));
let should_ignore = matches!(body[binding_id].name.as_str(), Some(it) if it.starts_with('_'));
if !should_ignore {
acc.push(UnusedMut { local }.into())
}
@ -3673,7 +3673,6 @@ impl Closure {
let (captures, _) = infer.closure_info(&self.id);
captures
.iter()
.cloned()
.map(|capture| Type {
env: db.trait_environment_for_body(owner),
ty: capture.ty(&self.subst),

View File

@ -428,7 +428,7 @@ impl<'db> SemanticsImpl<'db> {
if let Some(original_string) = ast::String::cast(original_token.clone()) {
if let Some(quote) = original_string.open_quote_text_range() {
return self
.descend_into_macros(DescendPreference::SameText, original_token.clone())
.descend_into_macros(DescendPreference::SameText, original_token)
.into_iter()
.find_map(|token| {
self.resolve_offset_in_format_args(

View File

@ -301,7 +301,7 @@ fn replace_usages(
// add imports across modules where needed
if let Some((import_scope, path)) = import_data {
let scope = match import_scope.clone() {
let scope = match import_scope {
ImportScope::File(it) => ImportScope::File(edit.make_mut(it)),
ImportScope::Module(it) => ImportScope::Module(edit.make_mut(it)),
ImportScope::Block(it) => ImportScope::Block(edit.make_mut(it)),
@ -329,7 +329,7 @@ fn augment_references_with_imports(
references
.into_iter()
.filter_map(|FileReference { range, name, .. }| {
let name = name.clone().into_name_like()?;
let name = name.into_name_like()?;
ctx.sema.scope(name.syntax()).map(|scope| (range, name, scope.module()))
})
.map(|(range, name, ref_module)| {

View File

@ -49,8 +49,8 @@ pub(crate) fn convert_nested_function_to_closure(
target,
|edit| {
let params = &param_list.syntax().text().to_string();
let params = params.strip_prefix("(").unwrap_or(params);
let params = params.strip_suffix(")").unwrap_or(params);
let params = params.strip_prefix('(').unwrap_or(params);
let params = params.strip_suffix(')').unwrap_or(params);
let mut body = body.to_string();
if !has_semicolon(&function) {

View File

@ -190,7 +190,7 @@ fn augment_references_with_imports(
ctx.sema.scope(name.syntax()).map(|scope| (name, scope.module()))
})
.map(|(name, ref_module)| {
let new_name = edit.make_mut(name.clone());
let new_name = edit.make_mut(name);
// if the referenced module is not the same as the target one and has not been seen before, add an import
let import_data = if ref_module.nearest_non_block_module(ctx.db()) != *target_module

View File

@ -112,7 +112,7 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
let insert_place = edit.make_syntax_mut(place);
// Adjust ws to insert depending on if this is all inline or on separate lines
let trailing_ws = if prev_ws.is_some_and(|it| it.text().starts_with("\n")) {
let trailing_ws = if prev_ws.is_some_and(|it| it.text().starts_with('\n')) {
format!("\n{indent_to}")
} else {
format!(" ")

View File

@ -147,7 +147,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
None => {
let name = &strukt_name.to_string();
let params = strukt.generic_param_list();
let ty_params = params.clone();
let ty_params = params;
let where_clause = strukt.where_clause();
let impl_def = make::impl_(

View File

@ -266,11 +266,8 @@ fn generate_impl(
.clone_for_update();
// Goto link : https://doc.rust-lang.org/reference/paths.html#qualified-paths
let qualified_path_type = make::path_from_text(&format!(
"<{} as {}>",
field_ty.to_string(),
delegate.trait_()?.to_string()
));
let qualified_path_type =
make::path_from_text(&format!("<{} as {}>", field_ty, delegate.trait_()?));
let delegate_assoc_items = delegate.get_or_create_assoc_item_list();
match bound_def.assoc_item_list() {
@ -373,11 +370,8 @@ fn generate_impl(
.clone_for_update();
// Goto link : https://doc.rust-lang.org/reference/paths.html#qualified-paths
let qualified_path_type = make::path_from_text(&format!(
"<{} as {}>",
field_ty.to_string(),
delegate.trait_()?.to_string()
));
let qualified_path_type =
make::path_from_text(&format!("<{} as {}>", field_ty, delegate.trait_()?));
// 4) Transform associated items in delegte trait impl
let delegate_assoc_items = delegate.get_or_create_assoc_item_list();
@ -759,7 +753,7 @@ fn ty_assoc_item(item: syntax::ast::TypeAlias, qual_path_ty: Path) -> Option<Ass
}
fn qualified_path(qual_path_ty: ast::Path, path_expr_seg: ast::Path) -> ast::Path {
make::path_from_text(&format!("{}::{}", qual_path_ty.to_string(), path_expr_seg.to_string()))
make::path_from_text(&format!("{}::{}", qual_path_ty, path_expr_seg))
}
#[cfg(test)]

View File

@ -432,7 +432,7 @@ fn get_fn_target(
}
None => next_space_for_fn_after_call_site(ast::CallableExpr::Call(call))?,
};
Some((target.clone(), file))
Some((target, file))
}
fn get_method_target(

View File

@ -47,7 +47,7 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>
let impl_def = ctx.find_node_at_offset::<ast::Impl>()?.clone_for_update();
let trait_ = impl_def.trait_()?;
if let ast::Type::PathType(trait_path) = trait_.clone() {
if let ast::Type::PathType(trait_path) = trait_ {
let trait_type = ctx.sema.resolve_trait(&trait_path.path()?)?;
let scope = ctx.sema.scope(trait_path.syntax())?;
if trait_type != FamousDefs(&ctx.sema, scope.krate()).core_convert_Index()? {
@ -105,7 +105,7 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>
"Generate `IndexMut` impl from this `Index` trait",
target,
|edit| {
edit.insert(target.start(), format!("$0{}\n\n", impl_def.to_string()));
edit.insert(target.start(), format!("$0{}\n\n", impl_def));
},
)
}

View File

@ -128,7 +128,7 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_
builder.replace_snippet(
snippet_cap,
impl_name.syntax().text_range(),
format!("${{0:TraitName}}{} for {}", arg_list, impl_name.to_string()),
format!("${{0:TraitName}}{} for {}", arg_list, impl_name),
);
// Insert trait before TraitImpl
@ -144,17 +144,13 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_
} else {
builder.replace(
impl_name.syntax().text_range(),
format!("NewTrait{} for {}", arg_list, impl_name.to_string()),
format!("NewTrait{} for {}", arg_list, impl_name),
);
// Insert trait before TraitImpl
builder.insert(
impl_ast.syntax().text_range().start(),
format!(
"{}\n\n{}",
trait_ast.to_string(),
IndentLevel::from_node(impl_ast.syntax())
),
format!("{}\n\n{}", trait_ast, IndentLevel::from_node(impl_ast.syntax())),
);
}

View File

@ -43,7 +43,7 @@ pub(crate) fn remove_parentheses(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let prev_token = parens.syntax().first_token().and_then(|it| it.prev_token());
let need_to_add_ws = match prev_token {
Some(it) => {
let tokens = vec![T![&], T![!], T!['('], T!['['], T!['{']];
let tokens = [T![&], T![!], T!['('], T!['['], T!['{']];
it.kind() != SyntaxKind::WHITESPACE && !tokens.contains(&it.kind())
}
None => false,

View File

@ -55,7 +55,7 @@ pub fn items_with_name<'a>(
local_query.fuzzy();
local_query.assoc_search_mode(assoc_item_search);
let mut external_query = import_map::Query::new(fuzzy_search_string.clone())
let mut external_query = import_map::Query::new(fuzzy_search_string)
.fuzzy()
.assoc_search_mode(assoc_item_search);

View File

@ -159,7 +159,7 @@ impl<'a> PathTransform<'a> {
.for_each(|(k, v)| match (k.split(db), v) {
(Either::Right(k), Some(TypeOrConst::Either(v))) => {
if let Some(ty) = v.ty() {
type_substs.insert(k, ty.clone());
type_substs.insert(k, ty);
}
}
(Either::Right(k), None) => {

View File

@ -539,7 +539,7 @@ impl<'a> FindUsages<'a> {
tree.token_at_offset(offset).into_iter().for_each(|token| {
let Some(str_token) = ast::String::cast(token.clone()) else { return };
if let Some((range, nameres)) =
sema.check_for_format_args_template(token.clone(), offset)
sema.check_for_format_args_template(token, offset)
{
if self.found_format_args_ref(file_id, range, str_token, nameres, sink) {
return;

View File

@ -341,13 +341,13 @@ impl SourceChangeBuilder {
/// Adds a tabstop snippet to place the cursor before `token`
pub fn add_tabstop_before_token(&mut self, _cap: SnippetCap, token: SyntaxToken) {
assert!(token.parent().is_some());
self.add_snippet(PlaceSnippet::Before(token.clone().into()));
self.add_snippet(PlaceSnippet::Before(token.into()));
}
/// Adds a tabstop snippet to place the cursor after `token`
pub fn add_tabstop_after_token(&mut self, _cap: SnippetCap, token: SyntaxToken) {
assert!(token.parent().is_some());
self.add_snippet(PlaceSnippet::After(token.clone().into()));
self.add_snippet(PlaceSnippet::After(token.into()));
}
/// Adds a snippet to move the cursor selected over `node`

View File

@ -32,7 +32,7 @@ pub(crate) fn trait_impl_redundant_assoc_item(
.source(db)
.map(|it| it.syntax().value.text_range())
.unwrap_or(default_range),
format!("\n {};", function.display(db).to_string()),
format!("\n {};", function.display(db)),
)
}
hir::AssocItem::Const(id) => {
@ -43,7 +43,7 @@ pub(crate) fn trait_impl_redundant_assoc_item(
.source(db)
.map(|it| it.syntax().value.text_range())
.unwrap_or(default_range),
format!("\n {};", constant.display(db).to_string()),
format!("\n {};", constant.display(db)),
)
}
hir::AssocItem::TypeAlias(id) => {

View File

@ -160,7 +160,7 @@ fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -
// if receiver should be pass as first arg in the assoc func,
// we could omit generic parameters cause compiler can deduce it automatically
if !need_to_take_receiver_as_first_arg && !generic_parameters.is_empty() {
let generic_parameters = generic_parameters.join(", ").to_string();
let generic_parameters = generic_parameters.join(", ");
receiver_type_adt_name =
format!("{}::<{}>", receiver_type_adt_name, generic_parameters);
}

View File

@ -42,8 +42,9 @@ fn check_nth_fix(nth: usize, ra_fixture_before: &str, ra_fixture_after: &str) {
super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id)
.pop()
.expect("no diagnostics");
let fix =
&diagnostic.fixes.expect(&format!("{:?} diagnostic misses fixes", diagnostic.code))[nth];
let fix = &diagnostic
.fixes
.unwrap_or_else(|| panic!("{:?} diagnostic misses fixes", diagnostic.code))[nth];
let actual = {
let source_change = fix.source_change.as_ref().unwrap();
let file_id = *source_change.source_file_edits.keys().next().unwrap();

View File

@ -79,7 +79,7 @@ pub(crate) fn goto_definition(
return Some(vec![x]);
}
if let Some(x) = try_lookup_macro_def_in_macro_use(sema, token.clone()) {
if let Some(x) = try_lookup_macro_def_in_macro_use(sema, token) {
return Some(vec![x]);
}
}

View File

@ -69,7 +69,7 @@ impl FieldOrTupleIdx {
.as_str()
.map(|s| s.to_owned())
.unwrap_or_else(|| format!(".{}", f.name(db).as_tuple_index().unwrap())),
FieldOrTupleIdx::TupleIdx(i) => format!(".{i}").to_owned(),
FieldOrTupleIdx::TupleIdx(i) => format!(".{i}"),
}
}
}
@ -203,7 +203,7 @@ pub(crate) fn view_memory_layout(
let mut nodes = vec![MemoryLayoutNode {
item_name,
typename: typename.clone(),
typename,
size: layout.size(),
offset: 0,
alignment: layout.align(),

View File

@ -274,7 +274,7 @@ impl CargoWorkspace {
other_options.append(
&mut targets
.into_iter()
.flat_map(|target| ["--filter-platform".to_owned().to_string(), target])
.flat_map(|target| ["--filter-platform".to_owned(), target])
.collect(),
);
}

View File

@ -1277,7 +1277,7 @@ fn add_target_crate_root(
inject_cargo_env(pkg, &mut env);
if let Ok(cname) = String::from_str(cargo_name) {
// CARGO_CRATE_NAME is the name of the Cargo target with - converted to _, such as the name of the library, binary, example, integration test, or benchmark.
env.set("CARGO_CRATE_NAME", cname.replace("-", "_"));
env.set("CARGO_CRATE_NAME", cname.replace('-', "_"));
}
if let Some(envs) = build_data.map(|it| &it.envs) {