Auto merge of #89905 - matthiaskrgr:rev_89709_entirely, r=michaelwoerister

Revert "Auto merge of #89709 - clemenswasser:apply_clippy_suggestions…

…_2, r=petrochenkov"

The PR had some unforseen perf regressions that are not as easy to find.
Revert the PR for now.

This reverts commit 6ae8912a3e, reversing
changes made to 86d6d2b738.
This commit is contained in:
bors 2021-10-19 02:03:21 +00:00
commit cd8b56f528
22 changed files with 72 additions and 56 deletions

View File

@ -389,7 +389,6 @@ impl<S: Semantics> fmt::Display for IeeeFloat<S> {
let _: Loss = sig::shift_right(&mut sig, &mut exp, trailing_zeros as usize);
// Change the exponent from 2^e to 10^e.
#[allow(clippy::comparison_chain)]
if exp == 0 {
// Nothing to do.
} else if exp > 0 {
@ -2527,7 +2526,6 @@ mod sig {
if *a_sign ^ b_sign {
let (reverse, loss);
#[allow(clippy::comparison_chain)]
if bits == 0 {
reverse = cmp(a_sig, b_sig) == Ordering::Less;
loss = Loss::ExactlyZero;

View File

@ -14,7 +14,7 @@ const BASE_64: &[u8; MAX_BASE as usize] =
#[inline]
pub fn push_str(mut n: u128, base: usize, output: &mut String) {
debug_assert!((2..=MAX_BASE).contains(&base));
debug_assert!(base >= 2 && base <= MAX_BASE);
let mut s = [0u8; 128];
let mut index = 0;

View File

@ -206,11 +206,17 @@ impl<N: Debug, E: Debug> Graph<N, E> {
AdjacentEdges { graph: self, direction, next: first_edge }
}
pub fn successor_nodes(&self, source: NodeIndex) -> impl Iterator<Item = NodeIndex> + '_ {
pub fn successor_nodes<'a>(
&'a self,
source: NodeIndex,
) -> impl Iterator<Item = NodeIndex> + 'a {
self.outgoing_edges(source).targets()
}
pub fn predecessor_nodes(&self, target: NodeIndex) -> impl Iterator<Item = NodeIndex> + '_ {
pub fn predecessor_nodes<'a>(
&'a self,
target: NodeIndex,
) -> impl Iterator<Item = NodeIndex> + 'a {
self.incoming_edges(target).sources()
}

View File

@ -48,7 +48,7 @@ fn post_order_walk<G: DirectedGraph + WithSuccessors + WithNumNodes>(
let node = frame.node;
visited[node] = true;
for successor in frame.iter.by_ref() {
while let Some(successor) = frame.iter.next() {
if !visited[successor] {
stack.push(PostOrderFrame { node: successor, iter: graph.successors(successor) });
continue 'recurse;
@ -112,7 +112,7 @@ where
/// This is equivalent to just invoke `next` repeatedly until
/// you get a `None` result.
pub fn complete_search(&mut self) {
for _ in self {}
while let Some(_) = self.next() {}
}
/// Returns true if node has been visited thus far.

View File

@ -390,7 +390,7 @@ impl<O: ForestObligation> ObligationForest<O> {
.map(|(index, _node)| Error { error: error.clone(), backtrace: self.error_at(index) })
.collect();
self.compress(|_| unreachable!());
self.compress(|_| assert!(false));
errors
}
@ -612,7 +612,7 @@ impl<O: ForestObligation> ObligationForest<O> {
fn compress(&mut self, mut outcome_cb: impl FnMut(&O)) {
let orig_nodes_len = self.nodes.len();
let mut node_rewrites: Vec<_> = std::mem::take(&mut self.reused_node_vec);
assert!(node_rewrites.is_empty());
debug_assert!(node_rewrites.is_empty());
node_rewrites.extend(0..orig_nodes_len);
let mut dead_nodes = 0;
@ -623,13 +623,13 @@ impl<O: ForestObligation> ObligationForest<O> {
// self.nodes[0..index - dead_nodes] are the first remaining nodes
// self.nodes[index - dead_nodes..index] are all dead
// self.nodes[index..] are unchanged
for (index, node_rewrite) in node_rewrites.iter_mut().enumerate() {
for index in 0..orig_nodes_len {
let node = &self.nodes[index];
match node.state.get() {
NodeState::Pending | NodeState::Waiting => {
if dead_nodes > 0 {
self.nodes.swap(index, index - dead_nodes);
*node_rewrite -= dead_nodes;
node_rewrites[index] -= dead_nodes;
}
}
NodeState::Done => {
@ -646,7 +646,7 @@ impl<O: ForestObligation> ObligationForest<O> {
}
// Extract the success stories.
outcome_cb(&node.obligation);
*node_rewrite = orig_nodes_len;
node_rewrites[index] = orig_nodes_len;
dead_nodes += 1;
}
NodeState::Error => {
@ -655,7 +655,7 @@ impl<O: ForestObligation> ObligationForest<O> {
// check against.
self.active_cache.remove(&node.obligation.as_cache_key());
self.insert_into_error_cache(index);
*node_rewrite = orig_nodes_len;
node_rewrites[index] = orig_nodes_len;
dead_nodes += 1;
}
NodeState::Success => unreachable!(),

View File

@ -205,10 +205,10 @@ impl<K: Ord, V> SortedMap<K, V> {
R: RangeBounds<K>,
{
let start = match range.start_bound() {
Bound::Included(k) => match self.lookup_index_for(k) {
Bound::Included(ref k) => match self.lookup_index_for(k) {
Ok(index) | Err(index) => index,
},
Bound::Excluded(k) => match self.lookup_index_for(k) {
Bound::Excluded(ref k) => match self.lookup_index_for(k) {
Ok(index) => index + 1,
Err(index) => index,
},
@ -216,11 +216,11 @@ impl<K: Ord, V> SortedMap<K, V> {
};
let end = match range.end_bound() {
Bound::Included(k) => match self.lookup_index_for(k) {
Bound::Included(ref k) => match self.lookup_index_for(k) {
Ok(index) => index + 1,
Err(index) => index,
},
Bound::Excluded(k) => match self.lookup_index_for(k) {
Bound::Excluded(ref k) => match self.lookup_index_for(k) {
Ok(index) | Err(index) => index,
},
Bound::Unbounded => self.data.len(),

View File

@ -75,7 +75,7 @@ impl<I: Idx, K: Ord, V> SortedIndexMultiMap<I, K, V> {
///
/// If there are multiple items that are equivalent to `key`, they will be yielded in
/// insertion order.
pub fn get_by_key(&self, key: K) -> impl Iterator<Item = &V> {
pub fn get_by_key(&'a self, key: K) -> impl 'a + Iterator<Item = &'a V> {
self.get_by_key_enumerated(key).map(|(_, v)| v)
}
@ -84,7 +84,7 @@ impl<I: Idx, K: Ord, V> SortedIndexMultiMap<I, K, V> {
///
/// If there are multiple items that are equivalent to `key`, they will be yielded in
/// insertion order.
pub fn get_by_key_enumerated(&self, key: K) -> impl Iterator<Item = (I, &V)> {
pub fn get_by_key_enumerated(&'a self, key: K) -> impl '_ + Iterator<Item = (I, &V)> {
let lower_bound = self.idx_sorted_by_item_key.partition_point(|&i| self.items[i].0 < key);
self.idx_sorted_by_item_key[lower_bound..].iter().map_while(move |&i| {
let (k, v) = &self.items[i];

View File

@ -257,7 +257,11 @@ impl<K: Eq + Hash, V> SsoHashMap<K, V> {
pub fn remove(&mut self, key: &K) -> Option<V> {
match self {
SsoHashMap::Array(array) => {
array.iter().position(|(k, _v)| k == key).map(|index| array.swap_remove(index).1)
if let Some(index) = array.iter().position(|(k, _v)| k == key) {
Some(array.swap_remove(index).1)
} else {
None
}
}
SsoHashMap::Map(map) => map.remove(key),
}
@ -268,7 +272,11 @@ impl<K: Eq + Hash, V> SsoHashMap<K, V> {
pub fn remove_entry(&mut self, key: &K) -> Option<(K, V)> {
match self {
SsoHashMap::Array(array) => {
array.iter().position(|(k, _v)| k == key).map(|index| array.swap_remove(index))
if let Some(index) = array.iter().position(|(k, _v)| k == key) {
Some(array.swap_remove(index))
} else {
None
}
}
SsoHashMap::Map(map) => map.remove_entry(key),
}
@ -415,14 +423,14 @@ impl<K, V> IntoIterator for SsoHashMap<K, V> {
/// adapts Item of array reference iterator to Item of hashmap reference iterator.
#[inline(always)]
fn adapt_array_ref_it<K, V>(pair: &(K, V)) -> (&K, &V) {
fn adapt_array_ref_it<K, V>(pair: &'a (K, V)) -> (&'a K, &'a V) {
let (a, b) = pair;
(a, b)
}
/// adapts Item of array mut reference iterator to Item of hashmap mut reference iterator.
#[inline(always)]
fn adapt_array_mut_it<K, V>(pair: &mut (K, V)) -> (&K, &mut V) {
fn adapt_array_mut_it<K, V>(pair: &'a mut (K, V)) -> (&'a K, &'a mut V) {
let (a, b) = pair;
(a, b)
}

View File

@ -75,7 +75,7 @@ impl<T> SsoHashSet<T> {
/// An iterator visiting all elements in arbitrary order.
/// The iterator element type is `&'a T`.
#[inline]
pub fn iter(&self) -> impl Iterator<Item = &T> {
pub fn iter(&'a self) -> impl Iterator<Item = &'a T> {
self.into_iter()
}

View File

@ -229,14 +229,14 @@ impl<CTX> HashStable<CTX> for ::std::num::NonZeroUsize {
impl<CTX> HashStable<CTX> for f32 {
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
let val: u32 = self.to_bits();
let val: u32 = unsafe { ::std::mem::transmute(*self) };
val.hash_stable(ctx, hasher);
}
}
impl<CTX> HashStable<CTX> for f64 {
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
let val: u64 = self.to_bits();
let val: u64 = unsafe { ::std::mem::transmute(*self) };
val.hash_stable(ctx, hasher);
}
}

View File

@ -5,7 +5,6 @@ const RED_ZONE: usize = 100 * 1024; // 100k
// Only the first stack that is pushed, grows exponentially (2^n * STACK_PER_RECURSION) from then
// on. This flag has performance relevant characteristics. Don't set it too high.
#[allow(clippy::identity_op)]
const STACK_PER_RECURSION: usize = 1 * 1024 * 1024; // 1MB
/// Grows the stack on demand to prevent stack overflow. Call this in strategic locations

View File

@ -34,7 +34,7 @@ impl<T> Steal<T> {
#[track_caller]
pub fn borrow(&self) -> MappedReadGuard<'_, T> {
let borrow = self.value.borrow();
if borrow.is_none() {
if let None = &*borrow {
panic!("attempted to read from stolen value: {}", std::any::type_name::<T>());
}
ReadGuard::map(borrow, |opt| opt.as_ref().unwrap())

View File

@ -48,7 +48,7 @@ impl<T: PartialEq> TinyList<T> {
#[inline]
pub fn contains(&self, data: &T) -> bool {
let mut elem = self.head.as_ref();
while let Some(e) = elem {
while let Some(ref e) = elem {
if &e.data == data {
return true;
}

View File

@ -2,8 +2,8 @@ use rustc_index::vec::{Idx, IndexVec};
pub fn iter<Ls>(
first: Option<Ls::LinkIndex>,
links: &Ls,
) -> impl Iterator<Item = Ls::LinkIndex> + '_
links: &'a Ls,
) -> impl Iterator<Item = Ls::LinkIndex> + 'a
where
Ls: Links,
{

View File

@ -512,7 +512,7 @@ impl<'a> LabelText<'a> {
pub fn to_dot_string(&self) -> String {
match *self {
LabelStr(ref s) => format!("\"{}\"", s.escape_default()),
EscStr(ref s) => format!("\"{}\"", LabelText::escape_str(s)),
EscStr(ref s) => format!("\"{}\"", LabelText::escape_str(&s)),
HtmlStr(ref s) => format!("<{}>", s),
}
}

View File

@ -990,8 +990,9 @@ impl<R: Idx, C: Idx> BitMatrix<R, C> {
pub fn insert_all_into_row(&mut self, row: R) {
assert!(row.index() < self.num_rows);
let (start, end) = self.range(row);
for word in self.words[start..end].iter_mut() {
*word = !0;
let words = &mut self.words[..];
for index in start..end {
words[index] = !0;
}
self.clear_excess_bits(row);
}
@ -1143,7 +1144,7 @@ impl<R: Idx, C: Idx> SparseBitMatrix<R, C> {
/// Iterates through all the columns set to true in a given row of
/// the matrix.
pub fn iter(&self, row: R) -> impl Iterator<Item = C> + '_ {
pub fn iter<'a>(&'a self, row: R) -> impl Iterator<Item = C> + 'a {
self.row(row).into_iter().flat_map(|r| r.iter())
}

View File

@ -634,15 +634,18 @@ impl<I: Idx, T> IndexVec<I, T> {
}
#[inline]
pub fn drain<R: RangeBounds<usize>>(&mut self, range: R) -> impl Iterator<Item = T> + '_ {
pub fn drain<'a, R: RangeBounds<usize>>(
&'a mut self,
range: R,
) -> impl Iterator<Item = T> + 'a {
self.raw.drain(range)
}
#[inline]
pub fn drain_enumerated<R: RangeBounds<usize>>(
&mut self,
pub fn drain_enumerated<'a, R: RangeBounds<usize>>(
&'a mut self,
range: R,
) -> impl Iterator<Item = (I, T)> + '_ {
) -> impl Iterator<Item = (I, T)> + 'a {
self.raw.drain(range).enumerate().map(|(n, t)| (I::new(n), t))
}

View File

@ -68,10 +68,11 @@ pub enum EscapeError {
impl EscapeError {
/// Returns true for actual errors, as opposed to warnings.
pub fn is_fatal(&self) -> bool {
!matches!(
self,
EscapeError::UnskippedWhitespaceWarning | EscapeError::MultipleSkippedLinesWarning
)
match self {
EscapeError::UnskippedWhitespaceWarning => false,
EscapeError::MultipleSkippedLinesWarning => false,
_ => true,
}
}
}
@ -329,7 +330,7 @@ where
callback(start..end, Err(EscapeError::MultipleSkippedLinesWarning));
}
let tail = &tail[first_non_space..];
if let Some(c) = tail.chars().next() {
if let Some(c) = tail.chars().nth(0) {
// For error reporting, we would like the span to contain the character that was not
// skipped. The +1 is necessary to account for the leading \ that started the escape.
let end = start + first_non_space + c.len_utf8() + 1;

View File

@ -24,9 +24,11 @@ fn parse_attributes(field: &syn::Field) -> Attributes {
}
if meta.path().is_ident("project") {
if let Meta::List(list) = meta {
if let Some(NestedMeta::Meta(meta)) = list.nested.iter().next() {
attrs.project = meta.path().get_ident().cloned();
any_attr = true;
if let Some(nested) = list.nested.iter().next() {
if let NestedMeta::Meta(meta) = nested {
attrs.project = meta.path().get_ident().cloned();
any_attr = true;
}
}
}
}

View File

@ -349,14 +349,14 @@ impl<'a> SessionDiagnosticDeriveBuilder<'a> {
) -> Result<proc_macro2::TokenStream, SessionDiagnosticDeriveError> {
let field_binding = &info.binding.binding;
let option_ty = option_inner_ty(info.ty);
let option_ty = option_inner_ty(&info.ty);
let generated_code = self.generate_non_option_field_code(
attr,
FieldInfo {
vis: info.vis,
binding: info.binding,
ty: option_ty.unwrap_or(info.ty),
ty: option_ty.unwrap_or(&info.ty),
span: info.span,
},
)?;
@ -388,7 +388,7 @@ impl<'a> SessionDiagnosticDeriveBuilder<'a> {
let formatted_str = self.build_format(&s.value(), attr.span());
match name {
"message" => {
if type_matches_path(info.ty, &["rustc_span", "Span"]) {
if type_matches_path(&info.ty, &["rustc_span", "Span"]) {
quote! {
#diag.set_span(*#field_binding);
#diag.set_primary_message(#formatted_str);
@ -401,7 +401,7 @@ impl<'a> SessionDiagnosticDeriveBuilder<'a> {
}
}
"label" => {
if type_matches_path(info.ty, &["rustc_span", "Span"]) {
if type_matches_path(&info.ty, &["rustc_span", "Span"]) {
quote! {
#diag.span_label(*#field_binding, #formatted_str);
}

View File

@ -500,8 +500,8 @@ impl<D: Decoder, const N: usize> Decodable<D> for [u8; N] {
d.read_seq(|d, len| {
assert!(len == N);
let mut v = [0u8; N];
for x in &mut v {
*x = d.read_seq_elt(|d| Decodable::decode(d))?;
for i in 0..len {
v[i] = d.read_seq_elt(|d| Decodable::decode(d))?;
}
Ok(v)
})

View File

@ -31,10 +31,8 @@ impl<'a> Part<'a> {
} else {
3
}
} else if v < 10_000 {
4
} else {
5
if v < 10_000 { 4 } else { 5 }
}
}
Part::Copy(buf) => buf.len(),