Auto merge of #109852 - Nilstrieb:rollup-g3mgxxw, r=Nilstrieb
Rollup of 4 pull requests Successful merges: - #109839 (Improve grammar of Iterator.partition_in_place) - #109840 (Fix typo in std/src/os/fd/owned.rs) - #109844 (a couple clippy::complexity fixes) - #109846 (more clippy::complexity fixes (iter_kv_map, map_flatten, nonminimal_bool)) Failed merges: r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
11cd4ff34d
@ -1190,8 +1190,8 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
// Set KCFI operand bundle
|
||||
let is_indirect_call = unsafe { llvm::LLVMIsAFunction(llfn).is_none() };
|
||||
let kcfi_bundle =
|
||||
if self.tcx.sess.is_sanitizer_kcfi_enabled() && fn_abi.is_some() && is_indirect_call {
|
||||
let kcfi_typeid = kcfi_typeid_for_fnabi(self.tcx, fn_abi.unwrap());
|
||||
if let Some(fn_abi) = fn_abi && self.tcx.sess.is_sanitizer_kcfi_enabled() && is_indirect_call {
|
||||
let kcfi_typeid = kcfi_typeid_for_fnabi(self.tcx, fn_abi);
|
||||
Some(llvm::OperandBundleDef::new("kcfi", &[self.const_u32(kcfi_typeid)]))
|
||||
} else {
|
||||
None
|
||||
|
@ -378,8 +378,7 @@ pub(crate) fn get_dllimport<'tcx>(
|
||||
name: &str,
|
||||
) -> Option<&'tcx DllImport> {
|
||||
tcx.native_library(id)
|
||||
.map(|lib| lib.dll_imports.iter().find(|di| di.name.as_str() == name))
|
||||
.flatten()
|
||||
.and_then(|lib| lib.dll_imports.iter().find(|di| di.name.as_str() == name))
|
||||
}
|
||||
|
||||
pub(crate) fn is_mingw_gnu_toolchain(target: &Target) -> bool {
|
||||
|
@ -677,8 +677,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
|
||||
);
|
||||
}
|
||||
if let Rvalue::CopyForDeref(place) = rvalue {
|
||||
if !place.ty(&self.body.local_decls, self.tcx).ty.builtin_deref(true).is_some()
|
||||
{
|
||||
if place.ty(&self.body.local_decls, self.tcx).ty.builtin_deref(true).is_none() {
|
||||
self.fail(
|
||||
location,
|
||||
"`CopyForDeref` should only be used for dereferenceable types",
|
||||
|
@ -2235,7 +2235,7 @@ impl EmitterWriter {
|
||||
}
|
||||
} else if is_multiline {
|
||||
buffer.puts(*row_num, 0, &self.maybe_anonymized(line_num), Style::LineNumber);
|
||||
match &highlight_parts[..] {
|
||||
match &highlight_parts {
|
||||
[SubstitutionHighlight { start: 0, end }] if *end == line_to_add.len() => {
|
||||
buffer.puts(*row_num, max_line_num_len + 1, "+ ", Style::Addition);
|
||||
}
|
||||
|
@ -483,8 +483,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
[segment] if segment.args.is_none() => {
|
||||
trait_bound_spans = vec![segment.ident.span];
|
||||
associated_types = associated_types
|
||||
.into_iter()
|
||||
.map(|(_, items)| (segment.ident.span, items))
|
||||
.into_values()
|
||||
.map(|items| (segment.ident.span, items))
|
||||
.collect();
|
||||
}
|
||||
_ => {}
|
||||
|
@ -559,7 +559,7 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> {
|
||||
// struct; however, when EUV is run during typeck, it
|
||||
// may not. This will generate an error earlier in typeck,
|
||||
// so we can just ignore it.
|
||||
if !self.tcx().sess.has_errors().is_some() {
|
||||
if self.tcx().sess.has_errors().is_none() {
|
||||
span_bug!(with_expr.span, "with expression doesn't evaluate to a struct");
|
||||
}
|
||||
}
|
||||
|
@ -978,7 +978,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
||||
let (_, sig, reg) = ty::print::FmtPrinter::new(self.tcx, Namespace::TypeNS)
|
||||
.name_all_regions(sig)
|
||||
.unwrap();
|
||||
let lts: Vec<String> = reg.into_iter().map(|(_, kind)| kind.to_string()).collect();
|
||||
let lts: Vec<String> = reg.into_values().map(|kind| kind.to_string()).collect();
|
||||
(if lts.is_empty() { String::new() } else { format!("for<{}> ", lts.join(", ")) }, sig)
|
||||
};
|
||||
|
||||
@ -2399,10 +2399,8 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
||||
let suggestion =
|
||||
if has_lifetimes { format!(" + {}", sub) } else { format!(": {}", sub) };
|
||||
let mut suggestions = vec![(sp, suggestion)];
|
||||
for add_lt_sugg in add_lt_suggs {
|
||||
if let Some(add_lt_sugg) = add_lt_sugg {
|
||||
suggestions.push(add_lt_sugg);
|
||||
}
|
||||
for add_lt_sugg in add_lt_suggs.into_iter().flatten() {
|
||||
suggestions.push(add_lt_sugg);
|
||||
}
|
||||
err.multipart_suggestion_verbose(
|
||||
format!("{msg}..."),
|
||||
@ -2426,11 +2424,9 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
||||
};
|
||||
let mut sugg =
|
||||
vec![(sp, suggestion), (span.shrink_to_hi(), format!(" + {}", new_lt))];
|
||||
for add_lt_sugg in add_lt_suggs.clone() {
|
||||
if let Some(lt) = add_lt_sugg {
|
||||
sugg.push(lt);
|
||||
sugg.rotate_right(1);
|
||||
}
|
||||
for lt in add_lt_suggs.clone().into_iter().flatten() {
|
||||
sugg.push(lt);
|
||||
sugg.rotate_right(1);
|
||||
}
|
||||
// `MaybeIncorrect` due to issue #41966.
|
||||
err.multipart_suggestion(msg, sugg, Applicability::MaybeIncorrect);
|
||||
|
@ -19,7 +19,7 @@ pub mod lib_features {
|
||||
.stable
|
||||
.iter()
|
||||
.map(|(f, (s, _))| (*f, Some(*s)))
|
||||
.chain(self.unstable.iter().map(|(f, _)| (*f, None)))
|
||||
.chain(self.unstable.keys().map(|f| (*f, None)))
|
||||
.collect();
|
||||
all_features.sort_unstable_by(|a, b| a.0.as_str().partial_cmp(b.0.as_str()).unwrap());
|
||||
all_features
|
||||
|
@ -79,7 +79,7 @@ impl<'tcx> ValTree<'tcx> {
|
||||
}
|
||||
|
||||
pub fn try_to_target_usize(self, tcx: TyCtxt<'tcx>) -> Option<u64> {
|
||||
self.try_to_scalar_int().map(|s| s.try_to_target_usize(tcx).ok()).flatten()
|
||||
self.try_to_scalar_int().and_then(|s| s.try_to_target_usize(tcx).ok())
|
||||
}
|
||||
|
||||
/// Get the values inside the ValTree as a slice of bytes. This only works for
|
||||
|
@ -62,21 +62,21 @@ pub fn as_constant_inner<'tcx>(
|
||||
Constant { span, user_ty: None, literal }
|
||||
}
|
||||
ExprKind::NonHirLiteral { lit, ref user_ty } => {
|
||||
let user_ty = user_ty.as_ref().map(push_cuta).flatten();
|
||||
let user_ty = user_ty.as_ref().and_then(push_cuta);
|
||||
|
||||
let literal = ConstantKind::Val(ConstValue::Scalar(Scalar::Int(lit)), ty);
|
||||
|
||||
Constant { span, user_ty, literal }
|
||||
}
|
||||
ExprKind::ZstLiteral { ref user_ty } => {
|
||||
let user_ty = user_ty.as_ref().map(push_cuta).flatten();
|
||||
let user_ty = user_ty.as_ref().and_then(push_cuta);
|
||||
|
||||
let literal = ConstantKind::Val(ConstValue::ZeroSized, ty);
|
||||
|
||||
Constant { span, user_ty, literal }
|
||||
}
|
||||
ExprKind::NamedConst { def_id, substs, ref user_ty } => {
|
||||
let user_ty = user_ty.as_ref().map(push_cuta).flatten();
|
||||
let user_ty = user_ty.as_ref().and_then(push_cuta);
|
||||
|
||||
let uneval = mir::UnevaluatedConst::new(ty::WithOptConstParam::unknown(def_id), substs);
|
||||
let literal = ConstantKind::Unevaluated(uneval, ty);
|
||||
|
@ -651,8 +651,8 @@ fn check_type_length_limit<'tcx>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) {
|
||||
let (shrunk, written_to_path) = shrunk_instance_name(tcx, &instance);
|
||||
let span = tcx.def_span(instance.def_id());
|
||||
let mut path = PathBuf::new();
|
||||
let was_written = if written_to_path.is_some() {
|
||||
path = written_to_path.unwrap();
|
||||
let was_written = if let Some(path2) = written_to_path {
|
||||
path = path2;
|
||||
Some(())
|
||||
} else {
|
||||
None
|
||||
|
@ -89,10 +89,7 @@ impl<'tcx> Partitioner<'tcx> for DefaultPartitioning {
|
||||
}
|
||||
|
||||
PreInliningPartitioning {
|
||||
codegen_units: codegen_units
|
||||
.into_iter()
|
||||
.map(|(_, codegen_unit)| codegen_unit)
|
||||
.collect(),
|
||||
codegen_units: codegen_units.into_values().map(|codegen_unit| codegen_unit).collect(),
|
||||
roots,
|
||||
internalization_candidates,
|
||||
}
|
||||
|
@ -206,7 +206,7 @@ fn no_main_err(tcx: TyCtxt<'_>, visitor: &EntryContext<'_>) {
|
||||
// The file may be empty, which leads to the diagnostic machinery not emitting this
|
||||
// note. This is a relatively simple way to detect that case and emit a span-less
|
||||
// note instead.
|
||||
let file_empty = !tcx.sess.source_map().lookup_line(sp.hi()).is_ok();
|
||||
let file_empty = tcx.sess.source_map().lookup_line(sp.hi()).is_err();
|
||||
|
||||
tcx.sess.emit_err(NoMainErr {
|
||||
sp,
|
||||
|
@ -1669,8 +1669,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
) -> Option<Symbol> {
|
||||
let mut candidates = self
|
||||
.extern_prelude
|
||||
.iter()
|
||||
.map(|(ident, _)| ident.name)
|
||||
.keys()
|
||||
.map(|ident| ident.name)
|
||||
.chain(
|
||||
self.module_map
|
||||
.iter()
|
||||
@ -2007,7 +2007,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
// 1) some consistent ordering for emitted diagnostics, and
|
||||
// 2) `std` suggestions before `core` suggestions.
|
||||
let mut extern_crate_names =
|
||||
self.extern_prelude.iter().map(|(ident, _)| ident.name).collect::<Vec<_>>();
|
||||
self.extern_prelude.keys().map(|ident| ident.name).collect::<Vec<_>>();
|
||||
extern_crate_names.sort_by(|a, b| b.as_str().partial_cmp(a.as_str()).unwrap());
|
||||
|
||||
for name in extern_crate_names.into_iter() {
|
||||
|
@ -389,7 +389,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
assert!(force || !finalize.is_some()); // `finalize` implies `force`
|
||||
assert!(force || finalize.is_none()); // `finalize` implies `force`
|
||||
|
||||
// Make sure `self`, `super` etc produce an error when passed to here.
|
||||
if orig_ident.is_path_segment_keyword() {
|
||||
|
@ -2421,8 +2421,7 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
||||
.iter()
|
||||
.rfind(|r| matches!(r.kind, ItemRibKind(_)))
|
||||
.expect("associated item outside of an item");
|
||||
seen_bindings
|
||||
.extend(parent_rib.bindings.iter().map(|(ident, _)| (*ident, ident.span)));
|
||||
seen_bindings.extend(parent_rib.bindings.keys().map(|ident| (*ident, ident.span)));
|
||||
};
|
||||
add_bindings_for_ns(ValueNS);
|
||||
add_bindings_for_ns(TypeNS);
|
||||
|
@ -911,7 +911,7 @@ mod parse {
|
||||
let mut seen_instruction_threshold = false;
|
||||
let mut seen_skip_entry = false;
|
||||
let mut seen_skip_exit = false;
|
||||
for option in v.into_iter().map(|v| v.split(',')).flatten() {
|
||||
for option in v.into_iter().flat_map(|v| v.split(',')) {
|
||||
match option {
|
||||
"always" if !seen_always && !seen_never => {
|
||||
options.always = true;
|
||||
|
@ -109,7 +109,7 @@ fn assert_default_hashing_controls<CTX: HashStableContext>(ctx: &CTX, msg: &str)
|
||||
// This is the case for instance when building a hash for name mangling.
|
||||
// Such configuration must not be used for metadata.
|
||||
HashingControls { hash_spans }
|
||||
if hash_spans == !ctx.unstable_opts_incremental_ignore_spans() => {}
|
||||
if hash_spans != ctx.unstable_opts_incremental_ignore_spans() => {}
|
||||
other => panic!("Attempted hashing of {msg} with non-default HashingControls: {other:?}"),
|
||||
}
|
||||
}
|
||||
|
@ -3888,8 +3888,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
|
||||
if let Some(slice_ty) = candidate_impls
|
||||
.iter()
|
||||
.map(|trait_ref| trait_ref.trait_ref.self_ty())
|
||||
.filter(|t| is_slice(*t))
|
||||
.next()
|
||||
.find(|t| is_slice(*t))
|
||||
{
|
||||
let msg = &format!("convert the array to a `{}` slice instead", slice_ty);
|
||||
|
||||
@ -3936,7 +3935,7 @@ fn hint_missing_borrow<'tcx>(
|
||||
// This could be a variant constructor, for example.
|
||||
let Some(fn_decl) = found_node.fn_decl() else { return; };
|
||||
|
||||
let args = fn_decl.inputs.iter().map(|ty| ty);
|
||||
let args = fn_decl.inputs.iter();
|
||||
|
||||
fn get_deref_type_and_refs(mut ty: Ty<'_>) -> (Ty<'_>, Vec<hir::Mutability>) {
|
||||
let mut refs = vec![];
|
||||
|
@ -110,8 +110,6 @@ impl<'a, 'tcx: 'a> InferCtxtExt<'a, 'tcx> for InferCtxt<'tcx> {
|
||||
body_id: LocalDefId,
|
||||
tys: FxIndexSet<Ty<'tcx>>,
|
||||
) -> Bounds<'a, 'tcx> {
|
||||
tys.into_iter()
|
||||
.map(move |ty| self.implied_outlives_bounds(param_env, body_id, ty))
|
||||
.flatten()
|
||||
tys.into_iter().flat_map(move |ty| self.implied_outlives_bounds(param_env, body_id, ty))
|
||||
}
|
||||
}
|
||||
|
@ -909,8 +909,7 @@ pub(crate) fn collect_bound_vars<'tcx, T: TypeFoldable<TyCtxt<'tcx>>>(
|
||||
.or_else(|| bug!("Skipped bound var index: parameters={:?}", parameters));
|
||||
});
|
||||
|
||||
let binders =
|
||||
chalk_ir::VariableKinds::from_iter(interner, parameters.into_iter().map(|(_, v)| v));
|
||||
let binders = chalk_ir::VariableKinds::from_iter(interner, parameters.into_values());
|
||||
|
||||
(new_ty, binders, named_parameters)
|
||||
}
|
||||
|
@ -2120,8 +2120,8 @@ pub trait Iterator {
|
||||
///
|
||||
/// # Current implementation
|
||||
///
|
||||
/// Current algorithms tries finding the first element for which the predicate evaluates
|
||||
/// to false, and the last element for which it evaluates to true and repeatedly swaps them.
|
||||
/// The current algorithm tries to find the first element for which the predicate evaluates
|
||||
/// to false and the last element for which it evaluates to true, and repeatedly swaps them.
|
||||
///
|
||||
/// Time complexity: *O*(*n*)
|
||||
///
|
||||
|
@ -268,7 +268,7 @@ impl AsFd for OwnedFd {
|
||||
#[inline]
|
||||
fn as_fd(&self) -> BorrowedFd<'_> {
|
||||
// Safety: `OwnedFd` and `BorrowedFd` have the same validity
|
||||
// invariants, and the `BorrowdFd` is bounded by the lifetime
|
||||
// invariants, and the `BorrowedFd` is bounded by the lifetime
|
||||
// of `&self`.
|
||||
unsafe { BorrowedFd::borrow_raw(self.as_raw_fd()) }
|
||||
}
|
||||
|
@ -31,18 +31,13 @@ use crate::passes::{self, Condition};
|
||||
use crate::scrape_examples::{AllCallLocations, ScrapeExamplesOptions};
|
||||
use crate::theme;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Default)]
|
||||
pub(crate) enum OutputFormat {
|
||||
Json,
|
||||
#[default]
|
||||
Html,
|
||||
}
|
||||
|
||||
impl Default for OutputFormat {
|
||||
fn default() -> OutputFormat {
|
||||
OutputFormat::Html
|
||||
}
|
||||
}
|
||||
|
||||
impl OutputFormat {
|
||||
pub(crate) fn is_json(&self) -> bool {
|
||||
matches!(self, OutputFormat::Json)
|
||||
|
@ -177,8 +177,8 @@ impl<'a, 'tcx, F: Write> TokenHandler<'a, 'tcx, F> {
|
||||
} else {
|
||||
// We only want to "open" the tag ourselves if we have more than one pending and if the
|
||||
// current parent tag is not the same as our pending content.
|
||||
let close_tag = if self.pending_elems.len() > 1 && current_class.is_some() {
|
||||
Some(enter_span(self.out, current_class.unwrap(), &self.href_context))
|
||||
let close_tag = if self.pending_elems.len() > 1 && let Some(current_class) = current_class {
|
||||
Some(enter_span(self.out, current_class, &self.href_context))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
@ -113,11 +113,8 @@ pub(super) fn print_sidebar(cx: &Context<'_>, it: &clean::Item, buffer: &mut Buf
|
||||
} else {
|
||||
("", "")
|
||||
};
|
||||
let version = if it.is_crate() {
|
||||
cx.cache().crate_version.as_ref().map(String::as_str).unwrap_or_default()
|
||||
} else {
|
||||
""
|
||||
};
|
||||
let version =
|
||||
if it.is_crate() { cx.cache().crate_version.as_deref().unwrap_or_default() } else { "" };
|
||||
let path: String = if !it.is_mod() {
|
||||
cx.current.iter().map(|s| s.as_str()).intersperse("::").collect()
|
||||
} else {
|
||||
|
@ -810,7 +810,7 @@ fn trait_impls_for<'a>(
|
||||
///
|
||||
/// These are common and we should just resolve to the trait in that case.
|
||||
fn is_derive_trait_collision<T>(ns: &PerNS<Result<Vec<(Res, T)>, ResolutionFailure<'_>>>) -> bool {
|
||||
if let (&Ok(ref type_ns), &Ok(ref macro_ns)) = (&ns.type_ns, &ns.macro_ns) {
|
||||
if let (Ok(type_ns), Ok(macro_ns)) = (&ns.type_ns, &ns.macro_ns) {
|
||||
type_ns.iter().any(|(res, _)| matches!(res, Res::Def(DefKind::Trait, _)))
|
||||
&& macro_ns
|
||||
.iter()
|
||||
|
Loading…
x
Reference in New Issue
Block a user