Auto merge of #12981 - kadiwa4:remove_some_imports, r=Veykril

Remove imports that are also in edition 2021's prelude

small cleanup
This commit is contained in:
bors 2022-08-09 07:03:35 +00:00
commit e1e93c4438
18 changed files with 15 additions and 27 deletions

View File

@ -6,7 +6,7 @@
//! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how //! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how
//! actual IO is done and lowered to input. //! actual IO is done and lowered to input.
use std::{fmt, iter::FromIterator, ops, panic::RefUnwindSafe, str::FromStr, sync::Arc}; use std::{fmt, ops, panic::RefUnwindSafe, str::FromStr, sync::Arc};
use cfg::CfgOptions; use cfg::CfgOptions;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};

View File

@ -2,7 +2,6 @@
use std::{ use std::{
collections::HashMap, collections::HashMap,
convert::TryInto,
fmt::{Display, Write}, fmt::{Display, Write},
}; };

View File

@ -4,7 +4,7 @@
//! get a super-set of matches. Then, we we confirm each match using precise //! get a super-set of matches. Then, we we confirm each match using precise
//! name resolution. //! name resolution.
use std::{convert::TryInto, mem, sync::Arc}; use std::{mem, sync::Arc};
use base_db::{FileId, FileRange, SourceDatabase, SourceDatabaseExt}; use base_db::{FileId, FileRange, SourceDatabase, SourceDatabaseExt};
use hir::{DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility}; use hir::{DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility};

View File

@ -1,4 +1,4 @@
use std::{convert::TryInto, mem::discriminant}; use std::mem::discriminant;
use crate::{doc_links::token_as_doc_comment, FilePosition, NavigationTarget, RangeInfo, TryToNav}; use crate::{doc_links::token_as_doc_comment, FilePosition, NavigationTarget, RangeInfo, TryToNav};
use hir::{AsAssocItem, AssocItem, Semantics}; use hir::{AsAssocItem, AssocItem, Semantics};

View File

@ -1,4 +1,4 @@
use std::{fmt, iter::FromIterator, sync::Arc}; use std::{fmt, sync::Arc};
use hir::{ExpandResult, MacroFile}; use hir::{ExpandResult, MacroFile};
use ide_db::base_db::{ use ide_db::base_db::{

View File

@ -35,10 +35,7 @@
//! as we don't have bincode in Cargo.toml yet, lets stick with serde_json for //! as we don't have bincode in Cargo.toml yet, lets stick with serde_json for
//! the time being. //! the time being.
use std::{ use std::collections::{HashMap, VecDeque};
collections::{HashMap, VecDeque},
convert::TryInto,
};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tt::TokenId; use tt::TokenId;

View File

@ -157,7 +157,7 @@ impl From<TokenTree> for TokenStream {
} }
/// Collects a number of token trees into a single stream. /// Collects a number of token trees into a single stream.
impl iter::FromIterator<TokenTree> for TokenStream { impl FromIterator<TokenTree> for TokenStream {
fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self { fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
trees.into_iter().map(TokenStream::from).collect() trees.into_iter().map(TokenStream::from).collect()
} }
@ -165,7 +165,7 @@ impl iter::FromIterator<TokenTree> for TokenStream {
/// A "flattening" operation on token streams, collects token trees /// A "flattening" operation on token streams, collects token trees
/// from multiple token streams into a single stream. /// from multiple token streams into a single stream.
impl iter::FromIterator<TokenStream> for TokenStream { impl FromIterator<TokenStream> for TokenStream {
fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self { fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
let mut builder = bridge::client::TokenStreamBuilder::new(); let mut builder = bridge::client::TokenStreamBuilder::new();
streams.into_iter().for_each(|stream| builder.push(stream.0)); streams.into_iter().for_each(|stream| builder.push(stream.0));

View File

@ -12,7 +12,6 @@ use super::proc_macro::bridge::{self, server};
use std::collections::HashMap; use std::collections::HashMap;
use std::hash::Hash; use std::hash::Hash;
use std::iter::FromIterator;
use std::ops::Bound; use std::ops::Bound;
use std::{ascii, vec::IntoIter}; use std::{ascii, vec::IntoIter};

View File

@ -207,7 +207,7 @@ impl ConcatStreamsHelper {
} }
/// Collects a number of token trees into a single stream. /// Collects a number of token trees into a single stream.
impl iter::FromIterator<TokenTree> for TokenStream { impl FromIterator<TokenTree> for TokenStream {
fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self { fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
trees.into_iter().map(TokenStream::from).collect() trees.into_iter().map(TokenStream::from).collect()
} }
@ -215,7 +215,7 @@ impl iter::FromIterator<TokenTree> for TokenStream {
/// A "flattening" operation on token streams, collects token trees /// A "flattening" operation on token streams, collects token trees
/// from multiple token streams into a single stream. /// from multiple token streams into a single stream.
impl iter::FromIterator<TokenStream> for TokenStream { impl FromIterator<TokenStream> for TokenStream {
fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self { fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
let iter = streams.into_iter(); let iter = streams.into_iter();
let mut builder = ConcatStreamsHelper::new(iter.size_hint().0); let mut builder = ConcatStreamsHelper::new(iter.size_hint().0);

View File

@ -12,7 +12,6 @@ use super::proc_macro::bridge::{self, server};
use std::collections::HashMap; use std::collections::HashMap;
use std::hash::Hash; use std::hash::Hash;
use std::iter::FromIterator;
use std::ops::Bound; use std::ops::Bound;
use std::{ascii, vec::IntoIter}; use std::{ascii, vec::IntoIter};

View File

@ -207,7 +207,7 @@ impl ConcatStreamsHelper {
} }
/// Collects a number of token trees into a single stream. /// Collects a number of token trees into a single stream.
impl iter::FromIterator<TokenTree> for TokenStream { impl FromIterator<TokenTree> for TokenStream {
fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self { fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
trees.into_iter().map(TokenStream::from).collect() trees.into_iter().map(TokenStream::from).collect()
} }
@ -215,7 +215,7 @@ impl iter::FromIterator<TokenTree> for TokenStream {
/// A "flattening" operation on token streams, collects token trees /// A "flattening" operation on token streams, collects token trees
/// from multiple token streams into a single stream. /// from multiple token streams into a single stream.
impl iter::FromIterator<TokenStream> for TokenStream { impl FromIterator<TokenStream> for TokenStream {
fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self { fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
let iter = streams.into_iter(); let iter = streams.into_iter();
let mut builder = ConcatStreamsHelper::new(iter.size_hint().0); let mut builder = ConcatStreamsHelper::new(iter.size_hint().0);

View File

@ -12,7 +12,6 @@ use super::proc_macro::bridge::{self, server};
use std::collections::HashMap; use std::collections::HashMap;
use std::hash::Hash; use std::hash::Hash;
use std::iter::FromIterator;
use std::ops::Bound; use std::ops::Bound;
use std::{ascii, vec::IntoIter}; use std::{ascii, vec::IntoIter};

View File

@ -20,7 +20,7 @@ use token_stream::TokenStreamBuilder;
mod symbol; mod symbol;
pub use symbol::*; pub use symbol::*;
use std::{iter::FromIterator, ops::Bound}; use std::ops::Bound;
type Group = tt::Subtree; type Group = tt::Subtree;
type TokenTree = tt::TokenTree; type TokenTree = tt::TokenTree;

View File

@ -1,7 +1,6 @@
//! Handles dynamic library loading for proc macro //! Handles dynamic library loading for proc macro
use std::{ use std::{
convert::TryInto,
fmt, fmt,
fs::File, fs::File,
io, io,

View File

@ -512,7 +512,7 @@ fn clippy_code_description(code: Option<&str>) -> Option<lsp_types::CodeDescript
#[cfg(test)] #[cfg(test)]
#[cfg(not(windows))] #[cfg(not(windows))]
mod tests { mod tests {
use std::{convert::TryInto, path::Path}; use std::path::Path;
use crate::{config::Config, global_state::GlobalState}; use crate::{config::Config, global_state::GlobalState};

View File

@ -1386,7 +1386,7 @@ fn main() {
#[test] #[test]
#[cfg(target_os = "windows")] #[cfg(target_os = "windows")]
fn test_lowercase_drive_letter() { fn test_lowercase_drive_letter() {
use std::{convert::TryInto, path::Path}; use std::path::Path;
let url = url_from_abs_path(Path::new("C:\\Test").try_into().unwrap()); let url = url_from_abs_path(Path::new("C:\\Test").try_into().unwrap());
assert_eq!(url.to_string(), "file:///c:/Test"); assert_eq!(url.to_string(), "file:///c:/Test");

View File

@ -2,10 +2,7 @@
//! //!
//! We don't normally run fuzzying, so this is hopelessly bitrotten :( //! We don't normally run fuzzying, so this is hopelessly bitrotten :(
use std::{ use std::str::{self, FromStr};
convert::TryInto,
str::{self, FromStr},
};
use text_edit::Indel; use text_edit::Indel;

View File

@ -6,7 +6,6 @@
use std::{ use std::{
fmt, fmt,
hash::{Hash, Hasher}, hash::{Hash, Hasher},
iter::FromIterator,
marker::PhantomData, marker::PhantomData,
ops::{Index, IndexMut, Range, RangeInclusive}, ops::{Index, IndexMut, Range, RangeInclusive},
}; };