Rollup merge of #120952 - saethlin:vec-into-iter, r=the8472

Don't use mem::zeroed in vec::IntoIter

`mem::zeroed` is not a trivial function. Maybe it was once, but now it involves multiple locals, copies, and an intrinsic that gets monomorphized into a call to `panic_nounwind` for iterators of types like `Vec<&T>`. Of course all that complexity is trivially optimized out, but generating a bunch of IR where we don't need to just so we can optimize it away later is silly.
This commit is contained in:
Matthias Krüger 2024-02-17 18:47:40 +01:00 committed by GitHub
commit 59972868e6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 44 additions and 36 deletions

View File

@ -11,7 +11,7 @@
TrustedRandomAccessNoCoerce, TrustedRandomAccessNoCoerce,
}; };
use core::marker::PhantomData; use core::marker::PhantomData;
use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties}; use core::mem::{ManuallyDrop, MaybeUninit, SizedTypeProperties};
use core::num::NonZero; use core::num::NonZero;
#[cfg(not(no_global_oom_handling))] #[cfg(not(no_global_oom_handling))]
use core::ops::Deref; use core::ops::Deref;
@ -200,27 +200,23 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
#[inline] #[inline]
fn next(&mut self) -> Option<T> { fn next(&mut self) -> Option<T> {
if T::IS_ZST { let ptr = if T::IS_ZST {
if self.ptr.as_ptr() == self.end as *mut _ { if self.ptr.as_ptr() == self.end as *mut T {
None return None;
} else { }
// `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by // `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by
// reducing the `end`. // reducing the `end`.
self.end = self.end.wrapping_byte_sub(1); self.end = self.end.wrapping_byte_sub(1);
self.ptr
// Make up a value of this ZST.
Some(unsafe { mem::zeroed() })
}
} else { } else {
if self.ptr == non_null!(self.end, T) { if self.ptr == non_null!(self.end, T) {
None return None;
} else { }
let old = self.ptr; let old = self.ptr;
self.ptr = unsafe { old.add(1) }; self.ptr = unsafe { old.add(1) };
old
Some(unsafe { ptr::read(old.as_ptr()) }) };
} Some(unsafe { ptr.read() })
}
} }
#[inline] #[inline]
@ -305,7 +301,7 @@ unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item
// Also note the implementation of `Self: TrustedRandomAccess` requires // Also note the implementation of `Self: TrustedRandomAccess` requires
// that `T: Copy` so reading elements from the buffer doesn't invalidate // that `T: Copy` so reading elements from the buffer doesn't invalidate
// them for `Drop`. // them for `Drop`.
unsafe { if T::IS_ZST { mem::zeroed() } else { self.ptr.add(i).read() } } unsafe { self.ptr.add(i).read() }
} }
} }
@ -314,23 +310,22 @@ impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
#[inline] #[inline]
fn next_back(&mut self) -> Option<T> { fn next_back(&mut self) -> Option<T> {
if T::IS_ZST { if T::IS_ZST {
if self.end as *mut _ == self.ptr.as_ptr() { if self.ptr.as_ptr() == self.end as *mut _ {
None return None;
} else { }
// See above for why 'ptr.offset' isn't used // See above for why 'ptr.offset' isn't used
self.end = self.end.wrapping_byte_sub(1); self.end = self.end.wrapping_byte_sub(1);
// Note that even though this is next_back() we're reading from `self.ptr`, not
// Make up a value of this ZST. // `self.end`. We track our length using the byte offset from `self.ptr` to `self.end`,
Some(unsafe { mem::zeroed() }) // so the end pointer may not be suitably aligned for T.
Some(unsafe { ptr::read(self.ptr.as_ptr()) })
} else {
if self.ptr == non_null!(self.end, T) {
return None;
} }
} else { unsafe {
if non_null!(self.end, T) == self.ptr { self.end = self.end.sub(1);
None Some(ptr::read(self.end))
} else {
let new_end = unsafe { non_null!(self.end, T).sub(1) };
*non_null!(mut self.end, T) = new_end;
Some(unsafe { ptr::read(new_end.as_ptr()) })
} }
} }
} }

View File

@ -32,9 +32,9 @@ pub fn vec_iter_is_empty_nonnull(it: &vec::IntoIter<u8>) -> bool {
it.is_empty() it.is_empty()
} }
// CHECK-LABEL: @vec_iter_next // CHECK-LABEL: @vec_iter_next_nonnull
#[no_mangle] #[no_mangle]
pub fn vec_iter_next(it: &mut vec::IntoIter<u8>) -> Option<u8> { pub fn vec_iter_next_nonnull(it: &mut vec::IntoIter<u8>) -> Option<u8> {
// CHECK: load ptr // CHECK: load ptr
// CHECK-SAME: !nonnull // CHECK-SAME: !nonnull
// CHECK-SAME: !noundef // CHECK-SAME: !noundef
@ -44,3 +44,16 @@ pub fn vec_iter_next(it: &mut vec::IntoIter<u8>) -> Option<u8> {
// CHECK: ret // CHECK: ret
it.next() it.next()
} }
// CHECK-LABEL: @vec_iter_next_back_nonnull
#[no_mangle]
pub fn vec_iter_next_back_nonnull(it: &mut vec::IntoIter<u8>) -> Option<u8> {
// CHECK: load ptr
// CHECK-SAME: !nonnull
// CHECK-SAME: !noundef
// CHECK: load ptr
// CHECK-SAME: !nonnull
// CHECK-SAME: !noundef
// CHECK: ret
it.next_back()
}