Rollup merge of #53223 - ljedrz:cleanup_data_structures, r=oli-obk
A few cleanups for rustc_data_structures - remove a redundant `clone()` - make some calls to `.iter()` implicit - collapse/simplify a few operations - remove some explicit `return`s - make `SnapshotMap::{commit, rollback_to}` take references - remove unnecessary struct field names - change `transmute()`s in `IdxSet::{from_slice, from_slice_mut}` to casts - remove some unnecessary lifetime annotations - split 2 long literals
This commit is contained in:
commit
f1ae95f7e2
@ -709,7 +709,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||
|
||||
self.projection_cache
|
||||
.borrow_mut()
|
||||
.commit(projection_cache_snapshot);
|
||||
.commit(&projection_cache_snapshot);
|
||||
self.type_variables
|
||||
.borrow_mut()
|
||||
.commit(type_snapshot);
|
||||
|
@ -1668,15 +1668,15 @@ impl<'tcx> ProjectionCache<'tcx> {
|
||||
}
|
||||
|
||||
pub fn rollback_to(&mut self, snapshot: ProjectionCacheSnapshot) {
|
||||
self.map.rollback_to(snapshot.snapshot);
|
||||
self.map.rollback_to(&snapshot.snapshot);
|
||||
}
|
||||
|
||||
pub fn rollback_skolemized(&mut self, snapshot: &ProjectionCacheSnapshot) {
|
||||
self.map.partial_rollback(&snapshot.snapshot, &|k| k.ty.has_re_skol());
|
||||
}
|
||||
|
||||
pub fn commit(&mut self, snapshot: ProjectionCacheSnapshot) {
|
||||
self.map.commit(snapshot.snapshot);
|
||||
pub fn commit(&mut self, snapshot: &ProjectionCacheSnapshot) {
|
||||
self.map.commit(&snapshot.snapshot);
|
||||
}
|
||||
|
||||
/// Try to start normalize `key`; returns an error if
|
||||
|
@ -17,7 +17,7 @@ pub const MAX_BASE: usize = 64;
|
||||
pub const ALPHANUMERIC_ONLY: usize = 62;
|
||||
pub const CASE_INSENSITIVE: usize = 36;
|
||||
|
||||
const BASE_64: &'static [u8; MAX_BASE as usize] =
|
||||
const BASE_64: &[u8; MAX_BASE as usize] =
|
||||
b"0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ@$";
|
||||
|
||||
#[inline]
|
||||
@ -37,7 +37,8 @@ pub fn push_str(mut n: u128, base: usize, output: &mut String) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
&mut s[0..index].reverse();
|
||||
s[0..index].reverse();
|
||||
|
||||
output.push_str(str::from_utf8(&s[0..index]).unwrap());
|
||||
}
|
||||
|
||||
|
@ -75,7 +75,7 @@ fn bit_lookup(bit: usize) -> BitLookup {
|
||||
let word = bit / word_bits;
|
||||
let bit_in_word = bit % word_bits;
|
||||
let bit_mask = 1 << bit_in_word;
|
||||
BitLookup { word: word, bit_in_word: bit_in_word, bit_mask: bit_mask }
|
||||
BitLookup { word, bit_in_word, bit_mask }
|
||||
}
|
||||
|
||||
pub fn bits_to_string(words: &[Word], bits: usize) -> String {
|
||||
@ -105,7 +105,8 @@ pub fn bits_to_string(words: &[Word], bits: usize) -> String {
|
||||
sep = '|';
|
||||
}
|
||||
result.push(']');
|
||||
return result
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
@ -196,7 +196,8 @@ impl<'a, C: Idx> Iterator for BitIter<'a, C> {
|
||||
self.current >>= offset;
|
||||
self.current >>= 1; // shift otherwise overflows for 0b1000_0000_…_0000
|
||||
self.idx += offset + 1;
|
||||
return Some(C::new(self.idx - 1));
|
||||
|
||||
Some(C::new(self.idx - 1))
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
@ -299,7 +300,7 @@ impl<R: Idx, C: Idx> BitMatrix<R, C> {
|
||||
let v1 = vector[write_index];
|
||||
let v2 = v1 | vector[read_index];
|
||||
vector[write_index] = v2;
|
||||
changed = changed | (v1 != v2);
|
||||
changed |= v1 != v2;
|
||||
}
|
||||
changed
|
||||
}
|
||||
|
@ -254,8 +254,8 @@ mod imp {
|
||||
type ULONG_PTR = usize;
|
||||
|
||||
type LPOVERLAPPED = *mut OVERLAPPED;
|
||||
const LOCKFILE_EXCLUSIVE_LOCK: DWORD = 0x00000002;
|
||||
const LOCKFILE_FAIL_IMMEDIATELY: DWORD = 0x00000001;
|
||||
const LOCKFILE_EXCLUSIVE_LOCK: DWORD = 0x0000_0002;
|
||||
const LOCKFILE_FAIL_IMMEDIATELY: DWORD = 0x0000_0001;
|
||||
|
||||
const FILE_SHARE_DELETE: DWORD = 0x4;
|
||||
const FILE_SHARE_READ: DWORD = 0x1;
|
||||
|
@ -107,7 +107,8 @@ fn intersect<Node: Idx>(
|
||||
node2 = immediate_dominators[node2].unwrap();
|
||||
}
|
||||
}
|
||||
return node1;
|
||||
|
||||
node1
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
|
@ -90,7 +90,7 @@ pub const INCOMING: Direction = Direction { repr: 1 };
|
||||
|
||||
impl NodeIndex {
|
||||
/// Returns unique id (unique with respect to the graph holding associated node).
|
||||
pub fn node_id(&self) -> usize {
|
||||
pub fn node_id(self) -> usize {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
@ -187,7 +187,7 @@ impl<N: Debug, E: Debug> Graph<N, E> {
|
||||
self.nodes[source.0].first_edge[OUTGOING.repr] = idx;
|
||||
self.nodes[target.0].first_edge[INCOMING.repr] = idx;
|
||||
|
||||
return idx;
|
||||
idx
|
||||
}
|
||||
|
||||
pub fn edge(&self, idx: EdgeIndex) -> &Edge<E> {
|
||||
@ -261,8 +261,8 @@ impl<N: Debug, E: Debug> Graph<N, E> {
|
||||
DepthFirstTraversal::with_start_node(self, start, direction)
|
||||
}
|
||||
|
||||
pub fn nodes_in_postorder<'a>(
|
||||
&'a self,
|
||||
pub fn nodes_in_postorder(
|
||||
&self,
|
||||
direction: Direction,
|
||||
entry_node: NodeIndex,
|
||||
) -> Vec<NodeIndex> {
|
||||
|
@ -59,16 +59,13 @@ impl<T: Idx> rustc_serialize::Decodable for IdxSetBuf<T> {
|
||||
|
||||
// pnkfelix wants to have this be `IdxSet<T>([Word]) and then pass
|
||||
// around `&mut IdxSet<T>` or `&IdxSet<T>`.
|
||||
//
|
||||
// WARNING: Mapping a `&IdxSetBuf<T>` to `&IdxSet<T>` (at least today)
|
||||
// requires a transmute relying on representation guarantees that may
|
||||
// not hold in the future.
|
||||
|
||||
/// Represents a set (or packed family of sets), of some element type
|
||||
/// E, where each E is identified by some unique index type `T`.
|
||||
///
|
||||
/// In other words, `T` is the type used to index into the bitslice
|
||||
/// this type uses to represent the set of object it holds.
|
||||
#[repr(transparent)]
|
||||
pub struct IdxSet<T: Idx> {
|
||||
_pd: PhantomData<fn(&T)>,
|
||||
bits: [Word],
|
||||
@ -134,11 +131,11 @@ impl<T: Idx> IdxSetBuf<T> {
|
||||
|
||||
impl<T: Idx> IdxSet<T> {
|
||||
unsafe fn from_slice(s: &[Word]) -> &Self {
|
||||
mem::transmute(s) // (see above WARNING)
|
||||
&*(s as *const [Word] as *const Self)
|
||||
}
|
||||
|
||||
unsafe fn from_slice_mut(s: &mut [Word]) -> &mut Self {
|
||||
mem::transmute(s) // (see above WARNING)
|
||||
&mut *(s as *mut [Word] as *mut Self)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -573,7 +573,7 @@ impl<O: ForestObligation> ObligationForest<O> {
|
||||
}
|
||||
|
||||
let mut kill_list = vec![];
|
||||
for (predicate, index) in self.waiting_cache.iter_mut() {
|
||||
for (predicate, index) in &mut self.waiting_cache {
|
||||
let new_index = node_rewrites[index.get()];
|
||||
if new_index >= nodes_len {
|
||||
kill_list.push(predicate.clone());
|
||||
|
@ -92,7 +92,7 @@ impl<K, V> SnapshotMap<K, V>
|
||||
pub fn snapshot(&mut self) -> Snapshot {
|
||||
self.undo_log.push(UndoLog::OpenSnapshot);
|
||||
let len = self.undo_log.len() - 1;
|
||||
Snapshot { len: len }
|
||||
Snapshot { len }
|
||||
}
|
||||
|
||||
fn assert_open_snapshot(&self, snapshot: &Snapshot) {
|
||||
@ -103,8 +103,8 @@ impl<K, V> SnapshotMap<K, V>
|
||||
});
|
||||
}
|
||||
|
||||
pub fn commit(&mut self, snapshot: Snapshot) {
|
||||
self.assert_open_snapshot(&snapshot);
|
||||
pub fn commit(&mut self, snapshot: &Snapshot) {
|
||||
self.assert_open_snapshot(snapshot);
|
||||
if snapshot.len == 0 {
|
||||
// The root snapshot.
|
||||
self.undo_log.truncate(0);
|
||||
@ -135,8 +135,8 @@ impl<K, V> SnapshotMap<K, V>
|
||||
}
|
||||
}
|
||||
|
||||
pub fn rollback_to(&mut self, snapshot: Snapshot) {
|
||||
self.assert_open_snapshot(&snapshot);
|
||||
pub fn rollback_to(&mut self, snapshot: &Snapshot) {
|
||||
self.assert_open_snapshot(snapshot);
|
||||
while self.undo_log.len() > snapshot.len + 1 {
|
||||
let entry = self.undo_log.pop().unwrap();
|
||||
self.reverse(entry);
|
||||
|
@ -20,7 +20,7 @@ fn basic() {
|
||||
map.insert(44, "fourty-four");
|
||||
assert_eq!(map[&44], "fourty-four");
|
||||
assert_eq!(map.get(&33), None);
|
||||
map.rollback_to(snapshot);
|
||||
map.rollback_to(&snapshot);
|
||||
assert_eq!(map[&22], "twenty-two");
|
||||
assert_eq!(map.get(&33), None);
|
||||
assert_eq!(map.get(&44), None);
|
||||
@ -33,7 +33,7 @@ fn out_of_order() {
|
||||
map.insert(22, "twenty-two");
|
||||
let snapshot1 = map.snapshot();
|
||||
let _snapshot2 = map.snapshot();
|
||||
map.rollback_to(snapshot1);
|
||||
map.rollback_to(&snapshot1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -43,8 +43,8 @@ fn nested_commit_then_rollback() {
|
||||
let snapshot1 = map.snapshot();
|
||||
let snapshot2 = map.snapshot();
|
||||
map.insert(22, "thirty-three");
|
||||
map.commit(snapshot2);
|
||||
map.commit(&snapshot2);
|
||||
assert_eq!(map[&22], "thirty-three");
|
||||
map.rollback_to(snapshot1);
|
||||
map.rollback_to(&snapshot1);
|
||||
assert_eq!(map[&22], "twenty-two");
|
||||
}
|
||||
|
@ -107,7 +107,8 @@ impl<T: PartialEq> Element<T> {
|
||||
};
|
||||
|
||||
self.next = new_next;
|
||||
return true
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
fn len(&self) -> usize {
|
||||
|
@ -77,7 +77,7 @@ impl<T: Clone + Debug + Eq + Hash> TransitiveRelation<T> {
|
||||
..
|
||||
} = self;
|
||||
|
||||
map.entry(a.clone())
|
||||
*map.entry(a.clone())
|
||||
.or_insert_with(|| {
|
||||
elements.push(a);
|
||||
|
||||
@ -86,7 +86,6 @@ impl<T: Clone + Debug + Eq + Hash> TransitiveRelation<T> {
|
||||
|
||||
Index(elements.len() - 1)
|
||||
})
|
||||
.clone()
|
||||
}
|
||||
|
||||
/// Applies the (partial) function to each edge and returns a new
|
||||
@ -98,14 +97,12 @@ impl<T: Clone + Debug + Eq + Hash> TransitiveRelation<T> {
|
||||
{
|
||||
let mut result = TransitiveRelation::new();
|
||||
for edge in &self.edges {
|
||||
let r = f(&self.elements[edge.source.0]).and_then(|source| {
|
||||
f(&self.elements[edge.source.0]).and_then(|source| {
|
||||
f(&self.elements[edge.target.0]).and_then(|target| {
|
||||
Some(result.add(source, target))
|
||||
result.add(source, target);
|
||||
Some(())
|
||||
})
|
||||
});
|
||||
if r.is_none() {
|
||||
return None;
|
||||
}
|
||||
})?;
|
||||
}
|
||||
Some(result)
|
||||
}
|
||||
@ -372,7 +369,7 @@ impl<T: Clone + Debug + Eq + Hash> TransitiveRelation<T> {
|
||||
let mut changed = true;
|
||||
while changed {
|
||||
changed = false;
|
||||
for edge in self.edges.iter() {
|
||||
for edge in &self.edges {
|
||||
// add an edge from S -> T
|
||||
changed |= matrix.add(edge.source.0, edge.target.0);
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user