Derive src pointers in sort drop guards from &T
The src pointers in CopyOnDrop and InsertionHole used to be *mut T, and were derived via automatic conversion from &mut T. According to Stacked Borrows 2.1, this means that those pointers become invalidated by interior mutation in the comparison function. But there's no need for mutability in this code path. Thus, we can change the drop guards to use *const and derive those from &T.
This commit is contained in:
parent
daf2204aa4
commit
a5a91c8e07
@ -892,7 +892,7 @@ fn insert_head<T, F>(v: &mut [T], is_less: &mut F)
|
||||
// performance than with the 2nd method.
|
||||
//
|
||||
// All methods were benchmarked, and the 3rd showed best results. So we chose that one.
|
||||
let mut tmp = mem::ManuallyDrop::new(ptr::read(&v[0]));
|
||||
let tmp = mem::ManuallyDrop::new(ptr::read(&v[0]));
|
||||
|
||||
// Intermediate state of the insertion process is always tracked by `hole`, which
|
||||
// serves two purposes:
|
||||
@ -904,7 +904,7 @@ fn insert_head<T, F>(v: &mut [T], is_less: &mut F)
|
||||
// If `is_less` panics at any point during the process, `hole` will get dropped and
|
||||
// fill the hole in `v` with `tmp`, thus ensuring that `v` still holds every object it
|
||||
// initially held exactly once.
|
||||
let mut hole = InsertionHole { src: &mut *tmp, dest: &mut v[1] };
|
||||
let mut hole = InsertionHole { src: &*tmp, dest: &mut v[1] };
|
||||
ptr::copy_nonoverlapping(&v[1], &mut v[0], 1);
|
||||
|
||||
for i in 2..v.len() {
|
||||
@ -920,7 +920,7 @@ fn insert_head<T, F>(v: &mut [T], is_less: &mut F)
|
||||
|
||||
// When dropped, copies from `src` into `dest`.
|
||||
struct InsertionHole<T> {
|
||||
src: *mut T,
|
||||
src: *const T,
|
||||
dest: *mut T,
|
||||
}
|
||||
|
||||
|
@ -12,7 +12,7 @@
|
||||
|
||||
/// When dropped, copies from `src` into `dest`.
|
||||
struct CopyOnDrop<T> {
|
||||
src: *mut T,
|
||||
src: *const T,
|
||||
dest: *mut T,
|
||||
}
|
||||
|
||||
@ -54,9 +54,9 @@ fn shift_head<T, F>(v: &mut [T], is_less: &mut F)
|
||||
// Read the first element into a stack-allocated variable. If a following comparison
|
||||
// operation panics, `hole` will get dropped and automatically write the element back
|
||||
// into the slice.
|
||||
let mut tmp = mem::ManuallyDrop::new(ptr::read(v.get_unchecked(0)));
|
||||
let tmp = mem::ManuallyDrop::new(ptr::read(v.get_unchecked(0)));
|
||||
let v = v.as_mut_ptr();
|
||||
let mut hole = CopyOnDrop { src: &mut *tmp, dest: v.add(1) };
|
||||
let mut hole = CopyOnDrop { src: &*tmp, dest: v.add(1) };
|
||||
ptr::copy_nonoverlapping(v.add(1), v.add(0), 1);
|
||||
|
||||
for i in 2..len {
|
||||
@ -100,9 +100,9 @@ fn shift_tail<T, F>(v: &mut [T], is_less: &mut F)
|
||||
// Read the last element into a stack-allocated variable. If a following comparison
|
||||
// operation panics, `hole` will get dropped and automatically write the element back
|
||||
// into the slice.
|
||||
let mut tmp = mem::ManuallyDrop::new(ptr::read(v.get_unchecked(len - 1)));
|
||||
let tmp = mem::ManuallyDrop::new(ptr::read(v.get_unchecked(len - 1)));
|
||||
let v = v.as_mut_ptr();
|
||||
let mut hole = CopyOnDrop { src: &mut *tmp, dest: v.add(len - 2) };
|
||||
let mut hole = CopyOnDrop { src: &*tmp, dest: v.add(len - 2) };
|
||||
ptr::copy_nonoverlapping(v.add(len - 2), v.add(len - 1), 1);
|
||||
|
||||
for i in (0..len - 2).rev() {
|
||||
@ -498,8 +498,8 @@ fn partition<T, F>(v: &mut [T], pivot: usize, is_less: &mut F) -> (usize, bool)
|
||||
// operation panics, the pivot will be automatically written back into the slice.
|
||||
|
||||
// SAFETY: `pivot` is a reference to the first element of `v`, so `ptr::read` is safe.
|
||||
let mut tmp = mem::ManuallyDrop::new(unsafe { ptr::read(pivot) });
|
||||
let _pivot_guard = CopyOnDrop { src: &mut *tmp, dest: pivot };
|
||||
let tmp = mem::ManuallyDrop::new(unsafe { ptr::read(pivot) });
|
||||
let _pivot_guard = CopyOnDrop { src: &*tmp, dest: pivot };
|
||||
let pivot = &*tmp;
|
||||
|
||||
// Find the first pair of out-of-order elements.
|
||||
@ -551,8 +551,8 @@ fn partition_equal<T, F>(v: &mut [T], pivot: usize, is_less: &mut F) -> usize
|
||||
// Read the pivot into a stack-allocated variable for efficiency. If a following comparison
|
||||
// operation panics, the pivot will be automatically written back into the slice.
|
||||
// SAFETY: The pointer here is valid because it is obtained from a reference to a slice.
|
||||
let mut tmp = mem::ManuallyDrop::new(unsafe { ptr::read(pivot) });
|
||||
let _pivot_guard = CopyOnDrop { src: &mut *tmp, dest: pivot };
|
||||
let tmp = mem::ManuallyDrop::new(unsafe { ptr::read(pivot) });
|
||||
let _pivot_guard = CopyOnDrop { src: &*tmp, dest: pivot };
|
||||
let pivot = &*tmp;
|
||||
|
||||
// Now partition the slice.
|
||||
|
Loading…
Reference in New Issue
Block a user