Merge #10610
10610: minor: Use array `IntoIter` r=lnicola a=lnicola bors r+ Co-authored-by: Laurențiu Nicola <lnicola@dend.ro>
This commit is contained in:
commit
bbbb0e5f9a
@ -283,7 +283,7 @@ pub fn mirror(input: TokenStream) -> TokenStream {
|
||||
input
|
||||
}
|
||||
"#;
|
||||
let proc_macros = std::array::IntoIter::new([
|
||||
let proc_macros = [
|
||||
ProcMacro {
|
||||
name: "identity".into(),
|
||||
kind: crate::ProcMacroKind::Attr,
|
||||
@ -304,7 +304,8 @@ pub fn mirror(input: TokenStream) -> TokenStream {
|
||||
kind: crate::ProcMacroKind::FuncLike,
|
||||
expander: Arc::new(MirrorProcMacroExpander),
|
||||
},
|
||||
])
|
||||
]
|
||||
.into_iter()
|
||||
.filter(|pm| proc_macros.iter().any(|name| name == pm.name))
|
||||
.collect();
|
||||
(proc_macros, source.into())
|
||||
|
@ -2392,7 +2392,7 @@ pub fn normalize_trait_assoc_type(
|
||||
}
|
||||
.cast(&Interner),
|
||||
),
|
||||
[TyVariableKind::General].iter().copied(),
|
||||
[TyVariableKind::General].into_iter(),
|
||||
);
|
||||
|
||||
match db.trait_solve(self.krate, goal)? {
|
||||
|
@ -507,12 +507,12 @@ fn inject_prelude(&mut self, crate_attrs: &Attrs) {
|
||||
};
|
||||
let path = ModPath::from_segments(
|
||||
path_kind.clone(),
|
||||
[krate.clone(), name![prelude], edition].iter().cloned(),
|
||||
[krate.clone(), name![prelude], edition].into_iter(),
|
||||
);
|
||||
// Fall back to the older `std::prelude::v1` for compatibility with Rust <1.52.0
|
||||
// FIXME remove this fallback
|
||||
let fallback_path =
|
||||
ModPath::from_segments(path_kind, [krate, name![prelude], name![v1]].iter().cloned());
|
||||
ModPath::from_segments(path_kind, [krate, name![prelude], name![v1]].into_iter());
|
||||
|
||||
for path in &[path, fallback_path] {
|
||||
let (per_ns, _) = self.def_map.resolve_path(
|
||||
|
@ -1,7 +1,7 @@
|
||||
//! Helper functions for working with def, which don't need to be a separate
|
||||
//! query, but can't be computed directly from `*Data` (ie, which need a `db`).
|
||||
|
||||
use std::{array, iter};
|
||||
use std::iter;
|
||||
|
||||
use base_db::CrateId;
|
||||
use chalk_ir::{fold::Shift, BoundVar, DebruijnIndex};
|
||||
@ -25,12 +25,14 @@
|
||||
};
|
||||
|
||||
pub(crate) fn fn_traits(db: &dyn DefDatabase, krate: CrateId) -> impl Iterator<Item = TraitId> {
|
||||
let fn_traits = [
|
||||
[
|
||||
db.lang_item(krate, "fn".into()),
|
||||
db.lang_item(krate, "fn_mut".into()),
|
||||
db.lang_item(krate, "fn_once".into()),
|
||||
];
|
||||
array::IntoIter::new(fn_traits).into_iter().flatten().flat_map(|it| it.as_trait())
|
||||
]
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.flat_map(|it| it.as_trait())
|
||||
}
|
||||
|
||||
fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> Vec<TraitId> {
|
||||
|
@ -76,8 +76,7 @@ pub(crate) fn replace_try_expr_with_match(acc: &mut Assists, ctx: &AssistContext
|
||||
);
|
||||
let sad_arm = make::match_arm(iter::once(sad_pat), None, sad_expr);
|
||||
|
||||
let match_arms = [happy_arm, sad_arm];
|
||||
let match_arm_list = make::match_arm_list(std::array::IntoIter::new(match_arms));
|
||||
let match_arm_list = make::match_arm_list([happy_arm, sad_arm]);
|
||||
|
||||
let expr_match = make::expr_match(expr, match_arm_list)
|
||||
.indent(IndentLevel::from_node(qm_kw_parent.syntax()));
|
||||
|
@ -268,7 +268,7 @@ fn invert_special_case(expr: &ast::Expr) -> Option<ast::Expr> {
|
||||
}
|
||||
|
||||
pub(crate) fn next_prev() -> impl Iterator<Item = Direction> {
|
||||
[Direction::Next, Direction::Prev].iter().copied()
|
||||
[Direction::Next, Direction::Prev].into_iter()
|
||||
}
|
||||
|
||||
pub(crate) fn does_pat_match_variant(pat: &ast::Pat, var: &ast::Pat) -> bool {
|
||||
|
@ -177,7 +177,7 @@ macro_rules! attrs {
|
||||
#[rustfmt::skip]
|
||||
static KIND_TO_ATTRIBUTES: Lazy<FxHashMap<SyntaxKind, &[&str]>> = Lazy::new(|| {
|
||||
use SyntaxKind::*;
|
||||
std::array::IntoIter::new([
|
||||
[
|
||||
(
|
||||
SOURCE_FILE,
|
||||
attrs!(
|
||||
@ -229,7 +229,8 @@ macro_rules! attrs {
|
||||
(MATCH_ARM, attrs!()),
|
||||
(IDENT_PAT, attrs!()),
|
||||
(RECORD_PAT_FIELD, attrs!()),
|
||||
])
|
||||
]
|
||||
.into_iter()
|
||||
.collect()
|
||||
});
|
||||
const EXPR_ATTRIBUTES: &[&str] = attrs!();
|
||||
|
@ -10,9 +10,9 @@
|
||||
};
|
||||
|
||||
pub(crate) fn complete_cfg(acc: &mut Completions, ctx: &CompletionContext) {
|
||||
let add_completion = |item: &&str| {
|
||||
let add_completion = |item: &str| {
|
||||
let mut completion =
|
||||
CompletionItem::new(CompletionKind::Attribute, ctx.source_range(), *item);
|
||||
CompletionItem::new(CompletionKind::Attribute, ctx.source_range(), item);
|
||||
completion.insert_text(format!(r#""{}""#, item));
|
||||
completion.kind(CompletionItemKind::Attribute);
|
||||
acc.add(completion.build());
|
||||
@ -26,11 +26,11 @@ pub(crate) fn complete_cfg(acc: &mut Completions, ctx: &CompletionContext) {
|
||||
.find(|t| matches!(t.kind(), SyntaxKind::IDENT));
|
||||
|
||||
match previous.as_ref().map(|p| p.text()) {
|
||||
Some("target_arch") => KNOWN_ARCH.iter().for_each(add_completion),
|
||||
Some("target_env") => KNOWN_ENV.iter().for_each(add_completion),
|
||||
Some("target_os") => KNOWN_OS.iter().for_each(add_completion),
|
||||
Some("target_vendor") => KNOWN_VENDOR.iter().for_each(add_completion),
|
||||
Some("target_endian") => ["little", "big"].iter().for_each(add_completion),
|
||||
Some("target_arch") => KNOWN_ARCH.iter().copied().for_each(add_completion),
|
||||
Some("target_env") => KNOWN_ENV.iter().copied().for_each(add_completion),
|
||||
Some("target_os") => KNOWN_OS.iter().copied().for_each(add_completion),
|
||||
Some("target_vendor") => KNOWN_VENDOR.iter().copied().for_each(add_completion),
|
||||
Some("target_endian") => ["little", "big"].into_iter().for_each(add_completion),
|
||||
Some(name) => {
|
||||
if let Some(krate) = ctx.krate {
|
||||
krate.potential_cfg(ctx.db).get_cfg_values(&name).iter().for_each(|s| {
|
||||
|
@ -19,11 +19,10 @@ pub(crate) fn complete_unqualified_path(acc: &mut Completions, ctx: &CompletionC
|
||||
}
|
||||
});
|
||||
|
||||
std::array::IntoIter::new(["self::", "super::", "crate::"])
|
||||
.for_each(|kw| acc.add_keyword(ctx, kw));
|
||||
["self::", "super::", "crate::"].into_iter().for_each(|kw| acc.add_keyword(ctx, kw));
|
||||
return;
|
||||
}
|
||||
std::array::IntoIter::new(["self", "super", "crate"]).for_each(|kw| acc.add_keyword(ctx, kw));
|
||||
["self", "super", "crate"].into_iter().for_each(|kw| acc.add_keyword(ctx, kw));
|
||||
|
||||
match &ctx.completion_location {
|
||||
Some(ImmediateLocation::Visibility(_)) => return,
|
||||
|
@ -28,8 +28,7 @@ macro_rules! quote_ts {
|
||||
[
|
||||
TokenTree::from(Punct::new(':', Spacing::Joint)),
|
||||
TokenTree::from(Punct::new(':', Spacing::Alone)),
|
||||
].iter()
|
||||
.cloned()
|
||||
].into_iter()
|
||||
.map(|mut x| {
|
||||
x.set_span(Span::def_site());
|
||||
x
|
||||
@ -52,7 +51,7 @@ macro_rules! quote {
|
||||
($($t:tt)*) => {
|
||||
[
|
||||
$(TokenStream::from(quote_ts!($t)),)*
|
||||
].iter().cloned().collect::<TokenStream>()
|
||||
].into_iter().collect::<TokenStream>()
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -12,8 +12,8 @@ pub(crate) fn get(cargo_toml: Option<&ManifestPath>, target: Option<&str>) -> Ve
|
||||
|
||||
// Some nightly-only cfgs, which are required for stdlib
|
||||
res.push(CfgFlag::Atom("target_thread_local".into()));
|
||||
for &ty in ["8", "16", "32", "64", "cas", "ptr"].iter() {
|
||||
for &key in ["target_has_atomic", "target_has_atomic_load_store"].iter() {
|
||||
for ty in ["8", "16", "32", "64", "cas", "ptr"] {
|
||||
for key in ["target_has_atomic", "target_has_atomic_load_store"] {
|
||||
res.push(CfgFlag::KeyValue { key: key.to_string(), value: ty.into() });
|
||||
}
|
||||
}
|
||||
|
@ -706,7 +706,7 @@ pub(crate) fn handle_runnables(
|
||||
let config = snap.config.runnables();
|
||||
match cargo_spec {
|
||||
Some(spec) => {
|
||||
for &cmd in ["check", "test"].iter() {
|
||||
for cmd in ["check", "test"] {
|
||||
res.push(lsp_ext::Runnable {
|
||||
label: format!("cargo {} -p {} --all-targets", cmd, spec.package),
|
||||
location: None,
|
||||
|
@ -56,12 +56,12 @@ fn collect_file(acc: &mut Vec<Feature>, path: PathBuf) -> io::Result<()> {
|
||||
|
||||
fn is_valid_feature_name(feature: &str) -> Result<(), String> {
|
||||
'word: for word in feature.split_whitespace() {
|
||||
for &short in ["to", "and"].iter() {
|
||||
for short in ["to", "and"] {
|
||||
if word == short {
|
||||
continue 'word;
|
||||
}
|
||||
}
|
||||
for &short in ["To", "And"].iter() {
|
||||
for short in ["To", "And"] {
|
||||
if word == short {
|
||||
return Err(format!("Don't capitalize {:?}", word));
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user