Rollup merge of #119004 - matthiaskrgr:conv, r=compiler-errors

NFC don't convert types to identical types
This commit is contained in:
Jubilee 2023-12-15 21:33:00 -08:00 committed by GitHub
commit c5a3d98cc6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
24 changed files with 35 additions and 50 deletions

View File

@ -1260,9 +1260,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
); );
// `a = lhs1; b = lhs2;`. // `a = lhs1; b = lhs2;`.
let stmts = self let stmts = self.arena.alloc_from_iter(std::iter::once(destructure_let).chain(assignments));
.arena
.alloc_from_iter(std::iter::once(destructure_let).chain(assignments.into_iter()));
// Wrap everything in a block. // Wrap everything in a block.
hir::ExprKind::Block(self.block_all(whole_span, stmts, None), None) hir::ExprKind::Block(self.block_all(whole_span, stmts, None), None)

View File

@ -607,7 +607,7 @@ pub fn file_metadata<'ll>(cx: &CodegenCx<'ll, '_>, source_file: &SourceFile) ->
if let Ok(rel_path) = abs_path.strip_prefix(working_directory) { if let Ok(rel_path) = abs_path.strip_prefix(working_directory) {
( (
working_directory.to_string_lossy().into(), working_directory.to_string_lossy(),
rel_path.to_string_lossy().into_owned(), rel_path.to_string_lossy().into_owned(),
) )
} else { } else {

View File

@ -396,7 +396,7 @@ pub fn build_coroutine_variant_struct_type_di_node<'ll, 'tcx>(
}) })
.collect(); .collect();
state_specific_fields.into_iter().chain(common_fields.into_iter()).collect() state_specific_fields.into_iter().chain(common_fields).collect()
}, },
|cx| build_generic_type_param_di_nodes(cx, coroutine_type_and_layout.ty), |cx| build_generic_type_param_di_nodes(cx, coroutine_type_and_layout.ty),
) )

View File

@ -198,7 +198,7 @@ impl<K: Ord, V> SortedMap<K, V> {
if index == self.data.len() || elements.last().unwrap().0 < self.data[index].0 { if index == self.data.len() || elements.last().unwrap().0 < self.data[index].0 {
// We can copy the whole range without having to mix with // We can copy the whole range without having to mix with
// existing elements. // existing elements.
self.data.splice(index..index, elements.into_iter()); self.data.splice(index..index, elements);
return; return;
} }

View File

@ -1557,7 +1557,7 @@ impl Expr<'_> {
ExprKind::Call(..) => ExprPrecedence::Call, ExprKind::Call(..) => ExprPrecedence::Call,
ExprKind::MethodCall(..) => ExprPrecedence::MethodCall, ExprKind::MethodCall(..) => ExprPrecedence::MethodCall,
ExprKind::Tup(_) => ExprPrecedence::Tup, ExprKind::Tup(_) => ExprPrecedence::Tup,
ExprKind::Binary(op, ..) => ExprPrecedence::Binary(op.node.into()), ExprKind::Binary(op, ..) => ExprPrecedence::Binary(op.node),
ExprKind::Unary(..) => ExprPrecedence::Unary, ExprKind::Unary(..) => ExprPrecedence::Unary,
ExprKind::Lit(_) => ExprPrecedence::Lit, ExprKind::Lit(_) => ExprPrecedence::Lit,
ExprKind::Type(..) | ExprKind::Cast(..) => ExprPrecedence::Cast, ExprKind::Type(..) | ExprKind::Cast(..) => ExprPrecedence::Cast,
@ -1697,11 +1697,9 @@ impl Expr<'_> {
// them being used only for its side-effects. // them being used only for its side-effects.
base.can_have_side_effects() base.can_have_side_effects()
} }
ExprKind::Struct(_, fields, init) => fields ExprKind::Struct(_, fields, init) => {
.iter() fields.iter().map(|field| field.expr).chain(init).any(|e| e.can_have_side_effects())
.map(|field| field.expr) }
.chain(init.into_iter())
.any(|e| e.can_have_side_effects()),
ExprKind::Array(args) ExprKind::Array(args)
| ExprKind::Tup(args) | ExprKind::Tup(args)

View File

@ -350,7 +350,7 @@ impl<'a, 'tcx> BoundVarContext<'a, 'tcx> {
// Nested poly trait refs have the binders concatenated // Nested poly trait refs have the binders concatenated
let mut full_binders = let mut full_binders =
self.map.late_bound_vars.entry(*hir_id).or_default().clone(); self.map.late_bound_vars.entry(*hir_id).or_default().clone();
full_binders.extend(supertrait_bound_vars.into_iter()); full_binders.extend(supertrait_bound_vars);
break (full_binders, BinderScopeType::Concatenating); break (full_binders, BinderScopeType::Concatenating);
} }
} }

View File

@ -573,13 +573,13 @@ impl rustc_errors::AddToDiagnostic for CastUnknownPointerSub {
{ {
match self { match self {
CastUnknownPointerSub::To(span) => { CastUnknownPointerSub::To(span) => {
let msg = f(diag, crate::fluent_generated::hir_typeck_label_to.into()); let msg = f(diag, crate::fluent_generated::hir_typeck_label_to);
diag.span_label(span, msg); diag.span_label(span, msg);
let msg = f(diag, crate::fluent_generated::hir_typeck_note.into()); let msg = f(diag, crate::fluent_generated::hir_typeck_note);
diag.note(msg); diag.note(msg);
} }
CastUnknownPointerSub::From(span) => { CastUnknownPointerSub::From(span) => {
let msg = f(diag, crate::fluent_generated::hir_typeck_label_from.into()); let msg = f(diag, crate::fluent_generated::hir_typeck_label_from);
diag.span_label(span, msg); diag.span_label(span, msg);
} }
} }

View File

@ -1546,9 +1546,9 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
); );
let candidate_obligations = impl_obligations let candidate_obligations = impl_obligations
.chain(norm_obligations.into_iter()) .chain(norm_obligations)
.chain(ref_obligations.iter().cloned()) .chain(ref_obligations.iter().cloned())
.chain(normalization_obligations.into_iter()); .chain(normalization_obligations);
// Evaluate those obligations to see if they might possibly hold. // Evaluate those obligations to see if they might possibly hold.
for o in candidate_obligations { for o in candidate_obligations {

View File

@ -130,12 +130,7 @@ impl<'cx, 'tcx> VerifyBoundCx<'cx, 'tcx> {
// see the extensive comment in projection_must_outlive // see the extensive comment in projection_must_outlive
let recursive_bound = { let recursive_bound = {
let mut components = smallvec![]; let mut components = smallvec![];
compute_alias_components_recursive( compute_alias_components_recursive(self.tcx, alias_ty_as_ty, &mut components, visited);
self.tcx,
alias_ty_as_ty.into(),
&mut components,
visited,
);
self.bound_from_components(&components, visited) self.bound_from_components(&components, visited)
}; };

View File

@ -552,7 +552,7 @@ impl<'infcx, 'tcx> CombineFields<'infcx, 'tcx> {
} }
pub fn register_obligations(&mut self, obligations: PredicateObligations<'tcx>) { pub fn register_obligations(&mut self, obligations: PredicateObligations<'tcx>) {
self.obligations.extend(obligations.into_iter()); self.obligations.extend(obligations);
} }
pub fn register_predicates(&mut self, obligations: impl IntoIterator<Item: ToPredicate<'tcx>>) { pub fn register_predicates(&mut self, obligations: impl IntoIterator<Item: ToPredicate<'tcx>>) {

View File

@ -956,11 +956,7 @@ impl Map {
// The local is not tracked at all, so it does not alias anything. // The local is not tracked at all, so it does not alias anything.
return; return;
}; };
let elems = place let elems = place.projection.iter().map(|&elem| elem.try_into()).chain(tail_elem.map(Ok));
.projection
.iter()
.map(|&elem| elem.try_into())
.chain(tail_elem.map(Ok).into_iter());
for elem in elems { for elem in elems {
// A field aliases the parent place. // A field aliases the parent place.
if let Some(vi) = self.places[index].value_index { if let Some(vi) = self.places[index].value_index {

View File

@ -496,7 +496,7 @@ impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> {
FlatSet::Elem(scalar) => { FlatSet::Elem(scalar) => {
let ty = op.ty(self.local_decls, self.tcx); let ty = op.ty(self.local_decls, self.tcx);
self.tcx.layout_of(self.param_env.and(ty)).map_or(FlatSet::Top, |layout| { self.tcx.layout_of(self.param_env.and(ty)).map_or(FlatSet::Top, |layout| {
FlatSet::Elem(ImmTy::from_scalar(scalar.into(), layout)) FlatSet::Elem(ImmTy::from_scalar(scalar, layout))
}) })
} }
FlatSet::Bottom => FlatSet::Bottom, FlatSet::Bottom => FlatSet::Bottom,

View File

@ -923,7 +923,7 @@ impl<'a> Parser<'a> {
); );
let where_predicates_split = before_where_clause.predicates.len(); let where_predicates_split = before_where_clause.predicates.len();
let mut predicates = before_where_clause.predicates; let mut predicates = before_where_clause.predicates;
predicates.extend(after_where_clause.predicates.into_iter()); predicates.extend(after_where_clause.predicates);
let where_clause = WhereClause { let where_clause = WhereClause {
has_where_token: before_where_clause.has_where_token has_where_token: before_where_clause.has_where_token
|| after_where_clause.has_where_token, || after_where_clause.has_where_token,

View File

@ -215,7 +215,7 @@ fn emit_malformed_attribute(
} else { } else {
"the following are the possible correct uses" "the following are the possible correct uses"
}, },
suggestions.into_iter(), suggestions,
Applicability::HasPlaceholders, Applicability::HasPlaceholders,
) )
.emit(); .emit();

View File

@ -407,8 +407,7 @@ fn parse_links<'md>(doc: &'md str) -> Vec<Box<str>> {
doc, doc,
main_body_opts(), main_body_opts(),
Some(&mut broken_link_callback), Some(&mut broken_link_callback),
) );
.into_iter();
let mut links = Vec::new(); let mut links = Vec::new();
while let Some(event) = event_iter.next() { while let Some(event) = event_iter.next() {

View File

@ -1579,7 +1579,7 @@ impl CheckCfg {
pub fn build_configuration(sess: &Session, mut user_cfg: Cfg) -> Cfg { pub fn build_configuration(sess: &Session, mut user_cfg: Cfg) -> Cfg {
// Combine the configuration requested by the session (command line) with // Combine the configuration requested by the session (command line) with
// some default and generated configuration items. // some default and generated configuration items.
user_cfg.extend(default_configuration(sess).into_iter()); user_cfg.extend(default_configuration(sess));
user_cfg user_cfg
} }

View File

@ -158,7 +158,6 @@ impl<'tcx> Context for TablesWrapper<'tcx> {
let crate_name = tables.tcx.crate_name(*crate_num).to_string(); let crate_name = tables.tcx.crate_name(*crate_num).to_string();
(name == crate_name).then(|| smir_crate(tables.tcx, *crate_num)) (name == crate_name).then(|| smir_crate(tables.tcx, *crate_num))
}) })
.into_iter()
.flatten() .flatten()
.collect(); .collect();
crates crates

View File

@ -344,7 +344,7 @@ fn generics_require_sized_self(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
// Search for a predicate like `Self : Sized` amongst the trait bounds. // Search for a predicate like `Self : Sized` amongst the trait bounds.
let predicates = tcx.predicates_of(def_id); let predicates = tcx.predicates_of(def_id);
let predicates = predicates.instantiate_identity(tcx).predicates; let predicates = predicates.instantiate_identity(tcx).predicates;
elaborate(tcx, predicates.into_iter()).any(|pred| match pred.kind().skip_binder() { elaborate(tcx, predicates).any(|pred| match pred.kind().skip_binder() {
ty::ClauseKind::Trait(ref trait_pred) => { ty::ClauseKind::Trait(ref trait_pred) => {
trait_pred.def_id() == sized_def_id && trait_pred.self_ty().is_param(0) trait_pred.def_id() == sized_def_id && trait_pred.self_ty().is_param(0)
} }

View File

@ -220,9 +220,8 @@ pub fn impl_subject_and_oblig<'a, 'tcx>(
selcx.infcx.at(&ObligationCause::dummy(), param_env).normalize(predicates); selcx.infcx.at(&ObligationCause::dummy(), param_env).normalize(predicates);
let impl_obligations = super::predicates_for_generics(cause, param_env, predicates); let impl_obligations = super::predicates_for_generics(cause, param_env, predicates);
let impl_obligations = impl_obligations let impl_obligations =
.chain(normalization_obligations1.into_iter()) impl_obligations.chain(normalization_obligations1).chain(normalization_obligations2);
.chain(normalization_obligations2.into_iter());
(subject, impl_obligations) (subject, impl_obligations)
} }

View File

@ -727,7 +727,7 @@ pub(crate) fn clean_generics<'tcx>(
.into_iter() .into_iter()
.map(|(lifetime, bounds)| WherePredicate::RegionPredicate { lifetime, bounds }), .map(|(lifetime, bounds)| WherePredicate::RegionPredicate { lifetime, bounds }),
) )
.chain(eq_predicates.into_iter()) .chain(eq_predicates)
.collect(), .collect(),
} }
} }

View File

@ -1381,7 +1381,7 @@ impl LangString {
}; };
if custom_code_classes_in_docs { if custom_code_classes_in_docs {
call(&mut TagIterator::new(string, extra).into_iter()) call(&mut TagIterator::new(string, extra))
} else { } else {
call(&mut tokens(string)) call(&mut tokens(string))
} }

View File

@ -33,7 +33,7 @@ where
let lints = || { let lints = || {
lint::builtin::HardwiredLints::get_lints() lint::builtin::HardwiredLints::get_lints()
.into_iter() .into_iter()
.chain(rustc_lint::SoftLints::get_lints().into_iter()) .chain(rustc_lint::SoftLints::get_lints())
}; };
let lint_opts = lints() let lint_opts = lints()
@ -46,7 +46,7 @@ where
filter_call(lint) filter_call(lint)
} }
}) })
.chain(lint_opts.into_iter()) .chain(lint_opts)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let lint_caps = lints() let lint_caps = lints()

View File

@ -48,7 +48,7 @@ struct TestsWithCustomClasses {
impl crate::doctest::Tester for TestsWithCustomClasses { impl crate::doctest::Tester for TestsWithCustomClasses {
fn add_test(&mut self, _: String, config: LangString, _: usize) { fn add_test(&mut self, _: String, config: LangString, _: usize) {
self.custom_classes_found.extend(config.added_classes.into_iter()); self.custom_classes_found.extend(config.added_classes);
} }
} }

View File

@ -168,10 +168,11 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
}) })
.collect::<Vec<_>>() .collect::<Vec<_>>()
}) })
.chain( .chain([
[Cfg::Cfg(sym::test, None), Cfg::Cfg(sym::doc, None), Cfg::Cfg(sym::doctest, None)] Cfg::Cfg(sym::test, None),
.into_iter(), Cfg::Cfg(sym::doc, None),
) Cfg::Cfg(sym::doctest, None),
])
.collect(); .collect();
self.cx.cache.exact_paths = self.exact_paths; self.cx.cache.exact_paths = self.exact_paths;