Auto merge of #87449 - matthiaskrgr:clippyy_v2, r=nagisa
more clippy::complexity fixes (also a couple of clippy::perf fixes)
This commit is contained in:
commit
aadd6189ad
@ -2189,8 +2189,7 @@ impl<'a> State<'a> {
|
||||
Options(InlineAsmOptions),
|
||||
}
|
||||
|
||||
let mut args = vec![];
|
||||
args.push(AsmArg::Template(InlineAsmTemplatePiece::to_string(&asm.template)));
|
||||
let mut args = vec![AsmArg::Template(InlineAsmTemplatePiece::to_string(&asm.template))];
|
||||
args.extend(asm.operands.iter().map(|(o, _)| AsmArg::Operand(o)));
|
||||
if !asm.options.is_empty() {
|
||||
args.push(AsmArg::Options(asm.options));
|
||||
|
@ -365,7 +365,7 @@ pub fn llvm_global_features(sess: &Session) -> Vec<String> {
|
||||
|
||||
features_string
|
||||
};
|
||||
features.extend(features_string.split(",").map(String::from));
|
||||
features.extend(features_string.split(',').map(String::from));
|
||||
}
|
||||
Some(_) | None => {}
|
||||
};
|
||||
@ -374,7 +374,7 @@ pub fn llvm_global_features(sess: &Session) -> Vec<String> {
|
||||
if s.is_empty() {
|
||||
return None;
|
||||
}
|
||||
let feature = if s.starts_with("+") || s.starts_with("-") {
|
||||
let feature = if s.starts_with('+') || s.starts_with('-') {
|
||||
&s[1..]
|
||||
} else {
|
||||
return Some(s.to_string());
|
||||
|
@ -834,7 +834,7 @@ fn ident_name_compatibility_hack(
|
||||
.flat_map(|c| c.as_os_str().to_str())
|
||||
.find(|c| c.starts_with("js-sys"))
|
||||
{
|
||||
let mut version = c.trim_start_matches("js-sys-").split(".");
|
||||
let mut version = c.trim_start_matches("js-sys-").split('.');
|
||||
if version.next() == Some("0")
|
||||
&& version.next() == Some("3")
|
||||
&& version
|
||||
|
@ -476,7 +476,7 @@ impl<T> PerNS<Option<T>> {
|
||||
|
||||
/// Returns an iterator over the items which are `Some`.
|
||||
pub fn present_items(self) -> impl Iterator<Item = T> {
|
||||
IntoIter::new([self.type_ns, self.value_ns, self.macro_ns]).filter_map(|it| it)
|
||||
IntoIter::new([self.type_ns, self.value_ns, self.macro_ns]).flatten()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1357,8 +1357,8 @@ impl<'a> State<'a> {
|
||||
Options(ast::InlineAsmOptions),
|
||||
}
|
||||
|
||||
let mut args = vec![];
|
||||
args.push(AsmArg::Template(ast::InlineAsmTemplatePiece::to_string(&asm.template)));
|
||||
let mut args =
|
||||
vec![AsmArg::Template(ast::InlineAsmTemplatePiece::to_string(&asm.template))];
|
||||
args.extend(asm.operands.iter().map(|(o, _)| AsmArg::Operand(o)));
|
||||
if !asm.options.is_empty() {
|
||||
args.push(AsmArg::Options(asm.options));
|
||||
|
@ -576,7 +576,7 @@ pub fn is_known_lint_tool(m_item: Symbol, sess: &Session, attrs: &[ast::Attribut
|
||||
// NOTE: does no error handling; error handling is done by rustc_resolve.
|
||||
sess.filter_by_name(attrs, sym::register_tool)
|
||||
.filter_map(|attr| attr.meta_item_list())
|
||||
.flat_map(std::convert::identity)
|
||||
.flatten()
|
||||
.filter_map(|nested_meta| nested_meta.ident())
|
||||
.map(|ident| ident.name)
|
||||
.any(|name| name == m_item)
|
||||
|
@ -906,7 +906,7 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> {
|
||||
} else {
|
||||
return FfiUnsafe {
|
||||
ty,
|
||||
reason: format!("box cannot be represented as a single pointer"),
|
||||
reason: "box cannot be represented as a single pointer".to_string(),
|
||||
help: None,
|
||||
};
|
||||
}
|
||||
|
@ -135,7 +135,7 @@ fn symbols_with_errors(input: TokenStream) -> (TokenStream, Vec<syn::Error>) {
|
||||
let mut check_dup = |span: Span, str: &str, errors: &mut Errors| {
|
||||
if let Some(prev_span) = keys.get(str) {
|
||||
errors.error(span, format!("Symbol `{}` is duplicated", str));
|
||||
errors.error(*prev_span, format!("location of previous definition"));
|
||||
errors.error(*prev_span, "location of previous definition".to_string());
|
||||
} else {
|
||||
keys.insert(str.to_string(), span);
|
||||
}
|
||||
|
@ -385,7 +385,7 @@ impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for [mir::abstract_const::N
|
||||
impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for ty::List<ty::BoundVariableKind> {
|
||||
fn decode(decoder: &mut D) -> Result<&'tcx Self, D::Error> {
|
||||
let len = decoder.read_usize()?;
|
||||
Ok(decoder.tcx().mk_bound_variable_kinds((0..len).map(|_| Decodable::decode(decoder)))?)
|
||||
decoder.tcx().mk_bound_variable_kinds((0..len).map(|_| Decodable::decode(decoder)))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -320,7 +320,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
.map(|n| format!("`{}`", n))
|
||||
.unwrap_or_else(|| "the mutable reference".to_string()),
|
||||
),
|
||||
format!("&mut *"),
|
||||
"&mut *".to_string(),
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
||||
|
@ -731,7 +731,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
if suggestions.peek().is_some() {
|
||||
err.span_suggestions(
|
||||
path_segment.ident.span,
|
||||
&format!("use mutable method"),
|
||||
"use mutable method",
|
||||
suggestions,
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
|
@ -46,7 +46,7 @@ pub fn merge_codegen_units<'tcx>(
|
||||
// Record that `second_smallest` now contains all the stuff that was in
|
||||
// `smallest` before.
|
||||
let mut consumed_cgu_names = cgu_contents.remove(&smallest.name()).unwrap();
|
||||
cgu_contents.get_mut(&second_smallest.name()).unwrap().extend(consumed_cgu_names.drain(..));
|
||||
cgu_contents.get_mut(&second_smallest.name()).unwrap().append(&mut consumed_cgu_names);
|
||||
|
||||
debug!(
|
||||
"CodegenUnit {} merged into CodegenUnit {}",
|
||||
|
@ -255,7 +255,7 @@ impl NonConstOp for CellBorrow {
|
||||
);
|
||||
err.span_label(
|
||||
span,
|
||||
format!("this borrow of an interior mutable value may end up in the final value"),
|
||||
"this borrow of an interior mutable value may end up in the final value",
|
||||
);
|
||||
if let hir::ConstContext::Static(_) = ccx.const_kind() {
|
||||
err.help(
|
||||
|
@ -344,7 +344,7 @@ impl DebugCounters {
|
||||
return if counter_format.id {
|
||||
format!("{}#{}", block_label, id.index())
|
||||
} else {
|
||||
format!("{}", block_label)
|
||||
block_label.to_string()
|
||||
};
|
||||
}
|
||||
}
|
||||
@ -369,7 +369,7 @@ impl DebugCounters {
|
||||
}
|
||||
return format!("({})", self.format_counter_kind(counter_kind));
|
||||
}
|
||||
return format!("{}", self.format_counter_kind(counter_kind));
|
||||
return self.format_counter_kind(counter_kind).to_string();
|
||||
}
|
||||
}
|
||||
format!("#{}", operand.index().to_string())
|
||||
|
@ -526,8 +526,8 @@ impl TraverseCoverageGraphWithLoops {
|
||||
pub fn new(basic_coverage_blocks: &CoverageGraph) -> Self {
|
||||
let start_bcb = basic_coverage_blocks.start_node();
|
||||
let backedges = find_loop_backedges(basic_coverage_blocks);
|
||||
let mut context_stack = Vec::new();
|
||||
context_stack.push(TraversalContext { loop_backedges: None, worklist: vec![start_bcb] });
|
||||
let context_stack =
|
||||
vec![TraversalContext { loop_backedges: None, worklist: vec![start_bcb] }];
|
||||
// `context_stack` starts with a `TraversalContext` for the main function context (beginning
|
||||
// with the `start` BasicCoverageBlock of the function). New worklists are pushed to the top
|
||||
// of the stack as loops are entered, and popped off of the stack when a loop's worklist is
|
||||
|
@ -614,8 +614,8 @@ impl Inliner<'tcx> {
|
||||
.vars_and_temps_iter()
|
||||
.map(|local| callee_body.local_decls[local].clone()),
|
||||
);
|
||||
caller_body.source_scopes.extend(callee_body.source_scopes.drain(..));
|
||||
caller_body.var_debug_info.extend(callee_body.var_debug_info.drain(..));
|
||||
caller_body.source_scopes.extend(&mut callee_body.source_scopes.drain(..));
|
||||
caller_body.var_debug_info.append(&mut callee_body.var_debug_info);
|
||||
caller_body.basic_blocks_mut().extend(callee_body.basic_blocks_mut().drain(..));
|
||||
|
||||
caller_body[callsite.block].terminator = Some(Terminator {
|
||||
|
@ -147,8 +147,8 @@ fn validate_simd_shuffle(tcx: TyCtxt<'tcx>, args: &[Operand<'tcx>], span: Span)
|
||||
match &args[2] {
|
||||
Operand::Constant(_) => {} // all good
|
||||
_ => {
|
||||
let msg = format!("last argument of `simd_shuffle` is required to be a `const` item");
|
||||
tcx.sess.span_err(span, &msg);
|
||||
let msg = "last argument of `simd_shuffle` is required to be a `const` item";
|
||||
tcx.sess.span_err(span, msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -479,7 +479,7 @@ impl Visitor<'tcx> for ExtraComments<'tcx> {
|
||||
uv.promoted
|
||||
),
|
||||
ty::ConstKind::Value(val) => format!("Value({:?})", val),
|
||||
ty::ConstKind::Error(_) => format!("Error"),
|
||||
ty::ConstKind::Error(_) => "Error".to_string(),
|
||||
};
|
||||
self.push(&format!("+ val: {}", val));
|
||||
}
|
||||
|
@ -1107,8 +1107,7 @@ impl<'a> Parser<'a> {
|
||||
e
|
||||
})?;
|
||||
|
||||
let enum_definition =
|
||||
EnumDef { variants: variants.into_iter().filter_map(|v| v).collect() };
|
||||
let enum_definition = EnumDef { variants: variants.into_iter().flatten().collect() };
|
||||
Ok((id, ItemKind::Enum(enum_definition, generics)))
|
||||
}
|
||||
|
||||
|
@ -855,7 +855,7 @@ impl CheckAttrVisitor<'tcx> {
|
||||
hir_id,
|
||||
meta.span(),
|
||||
|lint| {
|
||||
lint.build(&format!("invalid `doc` attribute")).emit();
|
||||
lint.build(&"invalid `doc` attribute").emit();
|
||||
},
|
||||
);
|
||||
is_valid = false;
|
||||
|
@ -229,7 +229,7 @@ fn no_main_err(tcx: TyCtxt<'_>, visitor: &EntryContext<'_, '_>) {
|
||||
if let Some(main_def) = tcx.resolutions(()).main_def {
|
||||
if main_def.opt_fn_def_id().is_none() {
|
||||
// There is something at `crate::main`, but it is not a function definition.
|
||||
err.span_label(main_def.span, &format!("non-function item at `crate::main` is found"));
|
||||
err.span_label(main_def.span, "non-function item at `crate::main` is found");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -620,8 +620,8 @@ fn incremental_verify_ich<CTX, K, V: Debug>(
|
||||
};
|
||||
tcx.sess().struct_err(&format!("internal compiler error: encountered incremental compilation error with {:?}", dep_node))
|
||||
.help(&format!("This is a known issue with the compiler. Run {} to allow your project to compile", run_cmd))
|
||||
.note(&format!("Please follow the instructions below to create a bug report with the provided information"))
|
||||
.note(&format!("See <https://github.com/rust-lang/rust/issues/84970> for more information"))
|
||||
.note(&"Please follow the instructions below to create a bug report with the provided information")
|
||||
.note(&"See <https://github.com/rust-lang/rust/issues/84970> for more information")
|
||||
.emit();
|
||||
panic!("Found unstable fingerprints for {:?}: {:?}", dep_node, result);
|
||||
}
|
||||
|
@ -1061,7 +1061,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
|
||||
}
|
||||
err.span_suggestion(
|
||||
span,
|
||||
&format!("use this syntax instead"),
|
||||
&"use this syntax instead",
|
||||
format!("{path_str}"),
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
|
@ -1867,7 +1867,7 @@ fn parse_extern_dep_specs(
|
||||
)
|
||||
});
|
||||
|
||||
let locparts: Vec<_> = loc.split(":").collect();
|
||||
let locparts: Vec<_> = loc.split(':').collect();
|
||||
let spec = match &locparts[..] {
|
||||
["raw", ..] => {
|
||||
// Don't want `:` split string
|
||||
|
@ -684,7 +684,7 @@ mod parse {
|
||||
Some(v) => v,
|
||||
};
|
||||
|
||||
*slot = Some(match v.trim_end_matches("s") {
|
||||
*slot = Some(match v.trim_end_matches('s') {
|
||||
"statement" | "stmt" => MirSpanview::Statement,
|
||||
"terminator" | "term" => MirSpanview::Terminator,
|
||||
"block" | "basicblock" => MirSpanview::Block,
|
||||
|
@ -2018,7 +2018,7 @@ impl Target {
|
||||
|
||||
if base.is_builtin {
|
||||
// This can cause unfortunate ICEs later down the line.
|
||||
return Err(format!("may not set is_builtin for targets not built-in"));
|
||||
return Err("may not set is_builtin for targets not built-in".to_string());
|
||||
}
|
||||
// Each field should have been read using `Json::remove_key` so any keys remaining are unused.
|
||||
let remaining_keys = obj.as_object().ok_or("Expected JSON object for target")?.keys();
|
||||
|
@ -124,11 +124,10 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
self.impl_similar_to(trait_ref, obligation).unwrap_or_else(|| trait_ref.def_id());
|
||||
let trait_ref = trait_ref.skip_binder();
|
||||
|
||||
let mut flags = vec![];
|
||||
flags.push((
|
||||
let mut flags = vec![(
|
||||
sym::ItemContext,
|
||||
self.describe_enclosure(obligation.cause.body_id).map(|s| s.to_owned()),
|
||||
));
|
||||
)];
|
||||
|
||||
match obligation.cause.code {
|
||||
ObligationCauseCode::BuiltinDerivedObligation(..)
|
||||
|
@ -290,13 +290,9 @@ fn suggest_restriction(
|
||||
} else {
|
||||
// Trivial case: `T` needs an extra bound: `T: Bound`.
|
||||
let (sp, suggestion) = match (
|
||||
generics
|
||||
.params
|
||||
.iter()
|
||||
.filter(|p| {
|
||||
!matches!(p.kind, hir::GenericParamKind::Type { synthetic: Some(_), .. })
|
||||
})
|
||||
.next(),
|
||||
generics.params.iter().find(|p| {
|
||||
!matches!(p.kind, hir::GenericParamKind::Type { synthetic: Some(_), .. })
|
||||
}),
|
||||
super_traits,
|
||||
) {
|
||||
(_, None) => predicate_constraint(
|
||||
|
@ -90,8 +90,8 @@ fn dropck_outlives<'tcx>(
|
||||
|
||||
// "outlives" represent types/regions that may be touched
|
||||
// by a destructor.
|
||||
result.kinds.extend(constraints.outlives.drain(..));
|
||||
result.overflows.extend(constraints.overflows.drain(..));
|
||||
result.kinds.append(&mut constraints.outlives);
|
||||
result.overflows.append(&mut constraints.overflows);
|
||||
|
||||
// If we have even one overflow, we should stop trying to evaluate further --
|
||||
// chances are, the subsequent overflows for this evaluation won't provide useful
|
||||
|
@ -357,7 +357,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
if let Ok(expr_text) = self.sess().source_map().span_to_snippet(expr.span) {
|
||||
(expr_text, true)
|
||||
} else {
|
||||
(format!("(..)"), false)
|
||||
("(..)".to_string(), false)
|
||||
};
|
||||
|
||||
let adjusted_text = if let Some(probe::AutorefOrPtrAdjustment::ToConstPtr) =
|
||||
|
@ -791,7 +791,7 @@ fn fn_sig_suggestion<'tcx>(
|
||||
})
|
||||
})
|
||||
.chain(std::iter::once(if sig.c_variadic { Some("...".to_string()) } else { None }))
|
||||
.filter_map(|arg| arg)
|
||||
.flatten()
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ");
|
||||
let output = sig.output();
|
||||
|
@ -488,7 +488,7 @@ crate fn href(did: DefId, cx: &Context<'_>) -> Result<(String, ItemType, Vec<Str
|
||||
let cache = &cx.cache();
|
||||
let relative_to = &cx.current;
|
||||
fn to_module_fqp(shortty: ItemType, fqp: &[String]) -> &[String] {
|
||||
if shortty == ItemType::Module { &fqp[..] } else { &fqp[..fqp.len() - 1] }
|
||||
if shortty == ItemType::Module { fqp } else { &fqp[..fqp.len() - 1] }
|
||||
}
|
||||
|
||||
if !did.is_local() && !cache.access_levels.is_public(did) && !cache.document_private {
|
||||
@ -509,7 +509,7 @@ crate fn href(did: DefId, cx: &Context<'_>) -> Result<(String, ItemType, Vec<Str
|
||||
match cache.extern_locations[&did.krate] {
|
||||
ExternalLocation::Remote(ref s) => {
|
||||
let s = s.trim_end_matches('/');
|
||||
let mut s = vec![&s[..]];
|
||||
let mut s = vec![s];
|
||||
s.extend(module_fqp[..].iter().map(String::as_str));
|
||||
s
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user