Revert "Auto merge of #89709 - clemenswasser:apply_clippy_suggestions_2, r=petrochenkov"

The PR had some unforseen perf regressions that are not as easy to find.
Revert the PR for now.

This reverts commit 6ae8912a3e, reversing
changes made to 86d6d2b738.
This commit is contained in:
Matthias Krüger 2021-10-15 11:24:20 +02:00
parent 72d66064e7
commit 4457014398
22 changed files with 72 additions and 56 deletions

View File

@ -389,7 +389,6 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let _: Loss = sig::shift_right(&mut sig, &mut exp, trailing_zeros as usize);
// Change the exponent from 2^e to 10^e.
#[allow(clippy::comparison_chain)]
if exp == 0 {
// Nothing to do.
} else if exp > 0 {
@ -2527,7 +2526,6 @@ pub(super) fn add_or_sub(
if *a_sign ^ b_sign {
let (reverse, loss);
#[allow(clippy::comparison_chain)]
if bits == 0 {
reverse = cmp(a_sig, b_sig) == Ordering::Less;
loss = Loss::ExactlyZero;

View File

@ -14,7 +14,7 @@
#[inline]
pub fn push_str(mut n: u128, base: usize, output: &mut String) {
debug_assert!((2..=MAX_BASE).contains(&base));
debug_assert!(base >= 2 && base <= MAX_BASE);
let mut s = [0u8; 128];
let mut index = 0;

View File

@ -206,11 +206,17 @@ pub fn adjacent_edges(
AdjacentEdges { graph: self, direction, next: first_edge }
}
pub fn successor_nodes(&self, source: NodeIndex) -> impl Iterator<Item = NodeIndex> + '_ {
pub fn successor_nodes<'a>(
&'a self,
source: NodeIndex,
) -> impl Iterator<Item = NodeIndex> + 'a {
self.outgoing_edges(source).targets()
}
pub fn predecessor_nodes(&self, target: NodeIndex) -> impl Iterator<Item = NodeIndex> + '_ {
pub fn predecessor_nodes<'a>(
&'a self,
target: NodeIndex,
) -> impl Iterator<Item = NodeIndex> + 'a {
self.incoming_edges(target).sources()
}

View File

@ -48,7 +48,7 @@ struct PostOrderFrame<Node, Iter> {
let node = frame.node;
visited[node] = true;
for successor in frame.iter.by_ref() {
while let Some(successor) = frame.iter.next() {
if !visited[successor] {
stack.push(PostOrderFrame { node: successor, iter: graph.successors(successor) });
continue 'recurse;
@ -112,7 +112,7 @@ pub fn push_start_node(&mut self, start_node: G::Node) {
/// This is equivalent to just invoke `next` repeatedly until
/// you get a `None` result.
pub fn complete_search(&mut self) {
for _ in self {}
while let Some(_) = self.next() {}
}
/// Returns true if node has been visited thus far.

View File

@ -390,7 +390,7 @@ pub fn to_errors<E: Clone>(&mut self, error: E) -> Vec<Error<O, E>> {
.map(|(index, _node)| Error { error: error.clone(), backtrace: self.error_at(index) })
.collect();
self.compress(|_| unreachable!());
self.compress(|_| assert!(false));
errors
}
@ -612,7 +612,7 @@ fn find_cycles_from_node<P>(&self, stack: &mut Vec<usize>, processor: &mut P, in
fn compress(&mut self, mut outcome_cb: impl FnMut(&O)) {
let orig_nodes_len = self.nodes.len();
let mut node_rewrites: Vec<_> = std::mem::take(&mut self.reused_node_vec);
assert!(node_rewrites.is_empty());
debug_assert!(node_rewrites.is_empty());
node_rewrites.extend(0..orig_nodes_len);
let mut dead_nodes = 0;
@ -623,13 +623,13 @@ fn compress(&mut self, mut outcome_cb: impl FnMut(&O)) {
// self.nodes[0..index - dead_nodes] are the first remaining nodes
// self.nodes[index - dead_nodes..index] are all dead
// self.nodes[index..] are unchanged
for (index, node_rewrite) in node_rewrites.iter_mut().enumerate() {
for index in 0..orig_nodes_len {
let node = &self.nodes[index];
match node.state.get() {
NodeState::Pending | NodeState::Waiting => {
if dead_nodes > 0 {
self.nodes.swap(index, index - dead_nodes);
*node_rewrite -= dead_nodes;
node_rewrites[index] -= dead_nodes;
}
}
NodeState::Done => {
@ -646,7 +646,7 @@ fn compress(&mut self, mut outcome_cb: impl FnMut(&O)) {
}
// Extract the success stories.
outcome_cb(&node.obligation);
*node_rewrite = orig_nodes_len;
node_rewrites[index] = orig_nodes_len;
dead_nodes += 1;
}
NodeState::Error => {
@ -655,7 +655,7 @@ fn compress(&mut self, mut outcome_cb: impl FnMut(&O)) {
// check against.
self.active_cache.remove(&node.obligation.as_cache_key());
self.insert_into_error_cache(index);
*node_rewrite = orig_nodes_len;
node_rewrites[index] = orig_nodes_len;
dead_nodes += 1;
}
NodeState::Success => unreachable!(),

View File

@ -205,10 +205,10 @@ fn range_slice_indices<R>(&self, range: R) -> (usize, usize)
R: RangeBounds<K>,
{
let start = match range.start_bound() {
Bound::Included(k) => match self.lookup_index_for(k) {
Bound::Included(ref k) => match self.lookup_index_for(k) {
Ok(index) | Err(index) => index,
},
Bound::Excluded(k) => match self.lookup_index_for(k) {
Bound::Excluded(ref k) => match self.lookup_index_for(k) {
Ok(index) => index + 1,
Err(index) => index,
},
@ -216,11 +216,11 @@ fn range_slice_indices<R>(&self, range: R) -> (usize, usize)
};
let end = match range.end_bound() {
Bound::Included(k) => match self.lookup_index_for(k) {
Bound::Included(ref k) => match self.lookup_index_for(k) {
Ok(index) => index + 1,
Err(index) => index,
},
Bound::Excluded(k) => match self.lookup_index_for(k) {
Bound::Excluded(ref k) => match self.lookup_index_for(k) {
Ok(index) | Err(index) => index,
},
Bound::Unbounded => self.data.len(),

View File

@ -75,7 +75,7 @@ pub fn get(&self, idx: I) -> Option<&(K, V)> {
///
/// If there are multiple items that are equivalent to `key`, they will be yielded in
/// insertion order.
pub fn get_by_key(&self, key: K) -> impl Iterator<Item = &V> {
pub fn get_by_key(&'a self, key: K) -> impl 'a + Iterator<Item = &'a V> {
self.get_by_key_enumerated(key).map(|(_, v)| v)
}
@ -84,7 +84,7 @@ pub fn get_by_key(&self, key: K) -> impl Iterator<Item = &V> {
///
/// If there are multiple items that are equivalent to `key`, they will be yielded in
/// insertion order.
pub fn get_by_key_enumerated(&self, key: K) -> impl Iterator<Item = (I, &V)> {
pub fn get_by_key_enumerated(&'a self, key: K) -> impl '_ + Iterator<Item = (I, &V)> {
let lower_bound = self.idx_sorted_by_item_key.partition_point(|&i| self.items[i].0 < key);
self.idx_sorted_by_item_key[lower_bound..].iter().map_while(move |&i| {
let (k, v) = &self.items[i];

View File

@ -257,7 +257,11 @@ pub fn insert(&mut self, key: K, value: V) -> Option<V> {
pub fn remove(&mut self, key: &K) -> Option<V> {
match self {
SsoHashMap::Array(array) => {
array.iter().position(|(k, _v)| k == key).map(|index| array.swap_remove(index).1)
if let Some(index) = array.iter().position(|(k, _v)| k == key) {
Some(array.swap_remove(index).1)
} else {
None
}
}
SsoHashMap::Map(map) => map.remove(key),
}
@ -268,7 +272,11 @@ pub fn remove(&mut self, key: &K) -> Option<V> {
pub fn remove_entry(&mut self, key: &K) -> Option<(K, V)> {
match self {
SsoHashMap::Array(array) => {
array.iter().position(|(k, _v)| k == key).map(|index| array.swap_remove(index))
if let Some(index) = array.iter().position(|(k, _v)| k == key) {
Some(array.swap_remove(index))
} else {
None
}
}
SsoHashMap::Map(map) => map.remove_entry(key),
}
@ -415,14 +423,14 @@ fn into_iter(self) -> Self::IntoIter {
/// adapts Item of array reference iterator to Item of hashmap reference iterator.
#[inline(always)]
fn adapt_array_ref_it<K, V>(pair: &(K, V)) -> (&K, &V) {
fn adapt_array_ref_it<K, V>(pair: &'a (K, V)) -> (&'a K, &'a V) {
let (a, b) = pair;
(a, b)
}
/// adapts Item of array mut reference iterator to Item of hashmap mut reference iterator.
#[inline(always)]
fn adapt_array_mut_it<K, V>(pair: &mut (K, V)) -> (&K, &mut V) {
fn adapt_array_mut_it<K, V>(pair: &'a mut (K, V)) -> (&'a K, &'a mut V) {
let (a, b) = pair;
(a, b)
}

View File

@ -75,7 +75,7 @@ pub fn is_empty(&self) -> bool {
/// An iterator visiting all elements in arbitrary order.
/// The iterator element type is `&'a T`.
#[inline]
pub fn iter(&self) -> impl Iterator<Item = &T> {
pub fn iter(&'a self) -> impl Iterator<Item = &'a T> {
self.into_iter()
}

View File

@ -229,14 +229,14 @@ fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
impl<CTX> HashStable<CTX> for f32 {
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
let val: u32 = self.to_bits();
let val: u32 = unsafe { ::std::mem::transmute(*self) };
val.hash_stable(ctx, hasher);
}
}
impl<CTX> HashStable<CTX> for f64 {
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
let val: u64 = self.to_bits();
let val: u64 = unsafe { ::std::mem::transmute(*self) };
val.hash_stable(ctx, hasher);
}
}

View File

@ -5,7 +5,6 @@
// Only the first stack that is pushed, grows exponentially (2^n * STACK_PER_RECURSION) from then
// on. This flag has performance relevant characteristics. Don't set it too high.
#[allow(clippy::identity_op)]
const STACK_PER_RECURSION: usize = 1 * 1024 * 1024; // 1MB
/// Grows the stack on demand to prevent stack overflow. Call this in strategic locations

View File

@ -34,7 +34,7 @@ pub fn new(value: T) -> Self {
#[track_caller]
pub fn borrow(&self) -> MappedReadGuard<'_, T> {
let borrow = self.value.borrow();
if borrow.is_none() {
if let None = &*borrow {
panic!("attempted to read from stolen value: {}", std::any::type_name::<T>());
}
ReadGuard::map(borrow, |opt| opt.as_ref().unwrap())

View File

@ -48,7 +48,7 @@ pub fn remove(&mut self, data: &T) -> bool {
#[inline]
pub fn contains(&self, data: &T) -> bool {
let mut elem = self.head.as_ref();
while let Some(e) = elem {
while let Some(ref e) = elem {
if &e.data == data {
return true;
}

View File

@ -2,8 +2,8 @@
pub fn iter<Ls>(
first: Option<Ls::LinkIndex>,
links: &Ls,
) -> impl Iterator<Item = Ls::LinkIndex> + '_
links: &'a Ls,
) -> impl Iterator<Item = Ls::LinkIndex> + 'a
where
Ls: Links,
{

View File

@ -512,7 +512,7 @@ fn escape_str(s: &str) -> String {
pub fn to_dot_string(&self) -> String {
match *self {
LabelStr(ref s) => format!("\"{}\"", s.escape_default()),
EscStr(ref s) => format!("\"{}\"", LabelText::escape_str(s)),
EscStr(ref s) => format!("\"{}\"", LabelText::escape_str(&s)),
HtmlStr(ref s) => format!("<{}>", s),
}
}

View File

@ -990,8 +990,9 @@ pub fn union_row_with(&mut self, with: &BitSet<C>, write: R) -> bool {
pub fn insert_all_into_row(&mut self, row: R) {
assert!(row.index() < self.num_rows);
let (start, end) = self.range(row);
for word in self.words[start..end].iter_mut() {
*word = !0;
let words = &mut self.words[..];
for index in start..end {
words[index] = !0;
}
self.clear_excess_bits(row);
}
@ -1143,7 +1144,7 @@ pub fn rows(&self) -> impl Iterator<Item = R> {
/// Iterates through all the columns set to true in a given row of
/// the matrix.
pub fn iter(&self, row: R) -> impl Iterator<Item = C> + '_ {
pub fn iter<'a>(&'a self, row: R) -> impl Iterator<Item = C> + 'a {
self.row(row).into_iter().flat_map(|r| r.iter())
}

View File

@ -634,15 +634,18 @@ pub fn iter_enumerated_mut(
}
#[inline]
pub fn drain<R: RangeBounds<usize>>(&mut self, range: R) -> impl Iterator<Item = T> + '_ {
pub fn drain<'a, R: RangeBounds<usize>>(
&'a mut self,
range: R,
) -> impl Iterator<Item = T> + 'a {
self.raw.drain(range)
}
#[inline]
pub fn drain_enumerated<R: RangeBounds<usize>>(
&mut self,
pub fn drain_enumerated<'a, R: RangeBounds<usize>>(
&'a mut self,
range: R,
) -> impl Iterator<Item = (I, T)> + '_ {
) -> impl Iterator<Item = (I, T)> + 'a {
self.raw.drain(range).enumerate().map(|(n, t)| (I::new(n), t))
}

View File

@ -68,10 +68,11 @@ pub enum EscapeError {
impl EscapeError {
/// Returns true for actual errors, as opposed to warnings.
pub fn is_fatal(&self) -> bool {
!matches!(
self,
EscapeError::UnskippedWhitespaceWarning | EscapeError::MultipleSkippedLinesWarning
)
match self {
EscapeError::UnskippedWhitespaceWarning => false,
EscapeError::MultipleSkippedLinesWarning => false,
_ => true,
}
}
}
@ -329,7 +330,7 @@ fn skip_ascii_whitespace<F>(chars: &mut Chars<'_>, start: usize, callback: &mut
callback(start..end, Err(EscapeError::MultipleSkippedLinesWarning));
}
let tail = &tail[first_non_space..];
if let Some(c) = tail.chars().next() {
if let Some(c) = tail.chars().nth(0) {
// For error reporting, we would like the span to contain the character that was not
// skipped. The +1 is necessary to account for the leading \ that started the escape.
let end = start + first_non_space + c.len_utf8() + 1;

View File

@ -24,7 +24,8 @@ fn parse_attributes(field: &syn::Field) -> Attributes {
}
if meta.path().is_ident("project") {
if let Meta::List(list) = meta {
if let Some(NestedMeta::Meta(meta)) = list.nested.iter().next() {
if let Some(nested) = list.nested.iter().next() {
if let NestedMeta::Meta(meta) = nested {
attrs.project = meta.path().get_ident().cloned();
any_attr = true;
}
@ -33,6 +34,7 @@ fn parse_attributes(field: &syn::Field) -> Attributes {
}
}
}
}
if !any_attr {
panic!("error parsing stable_hasher");
}

View File

@ -349,14 +349,14 @@ fn generate_field_code(
) -> Result<proc_macro2::TokenStream, SessionDiagnosticDeriveError> {
let field_binding = &info.binding.binding;
let option_ty = option_inner_ty(info.ty);
let option_ty = option_inner_ty(&info.ty);
let generated_code = self.generate_non_option_field_code(
attr,
FieldInfo {
vis: info.vis,
binding: info.binding,
ty: option_ty.unwrap_or(info.ty),
ty: option_ty.unwrap_or(&info.ty),
span: info.span,
},
)?;
@ -388,7 +388,7 @@ fn generate_non_option_field_code(
let formatted_str = self.build_format(&s.value(), attr.span());
match name {
"message" => {
if type_matches_path(info.ty, &["rustc_span", "Span"]) {
if type_matches_path(&info.ty, &["rustc_span", "Span"]) {
quote! {
#diag.set_span(*#field_binding);
#diag.set_primary_message(#formatted_str);
@ -401,7 +401,7 @@ fn generate_non_option_field_code(
}
}
"label" => {
if type_matches_path(info.ty, &["rustc_span", "Span"]) {
if type_matches_path(&info.ty, &["rustc_span", "Span"]) {
quote! {
#diag.span_label(*#field_binding, #formatted_str);
}

View File

@ -500,8 +500,8 @@ fn encode(&self, s: &mut S) -> Result<(), S::Error> {
d.read_seq(|d, len| {
assert!(len == N);
let mut v = [0u8; N];
for x in &mut v {
*x = d.read_seq_elt(|d| Decodable::decode(d))?;
for i in 0..len {
v[i] = d.read_seq_elt(|d| Decodable::decode(d))?;
}
Ok(v)
})

View File

@ -31,10 +31,8 @@ pub fn len(&self) -> usize {
} else {
3
}
} else if v < 10_000 {
4
} else {
5
if v < 10_000 { 4 } else { 5 }
}
}
Part::Copy(buf) => buf.len(),