Rollup merge of #72709 - LeSeulArtichaut:unsafe-liballoc, r=nikomatsakis

`#[deny(unsafe_op_in_unsafe_fn)]` in liballoc

This PR proposes to make use of the new `unsafe_op_in_unsafe_fn` lint, i.e. no longer consider the body of an unsafe function as an unsafe block and require explicit unsafe block to perform unsafe operations.

This has been first (partly) suggested by @Mark-Simulacrum in https://github.com/rust-lang/rust/pull/69245#issuecomment-587817065

Tracking issue for the feature: #71668.
~~Blocked on #71862.~~
r? @Mark-Simulacrum cc @nikomatsakis can you confirm that those changes are desirable? Should I restrict it to only BTree for the moment?
This commit is contained in:
Manish Goregaokar 2020-06-19 09:14:58 -07:00 committed by GitHub
commit 55479de299
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 389 additions and 259 deletions

View File

@ -77,7 +77,7 @@ pub struct Global;
#[stable(feature = "global_alloc", since = "1.28.0")]
#[inline]
pub unsafe fn alloc(layout: Layout) -> *mut u8 {
__rust_alloc(layout.size(), layout.align())
unsafe { __rust_alloc(layout.size(), layout.align()) }
}
/// Deallocate memory with the global allocator.
@ -99,7 +99,7 @@ pub unsafe fn alloc(layout: Layout) -> *mut u8 {
#[stable(feature = "global_alloc", since = "1.28.0")]
#[inline]
pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) {
__rust_dealloc(ptr, layout.size(), layout.align())
unsafe { __rust_dealloc(ptr, layout.size(), layout.align()) }
}
/// Reallocate memory with the global allocator.
@ -121,7 +121,7 @@ pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) {
#[stable(feature = "global_alloc", since = "1.28.0")]
#[inline]
pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
__rust_realloc(ptr, layout.size(), layout.align(), new_size)
unsafe { __rust_realloc(ptr, layout.size(), layout.align(), new_size) }
}
/// Allocate zero-initialized memory with the global allocator.
@ -158,7 +158,7 @@ pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8
#[stable(feature = "global_alloc", since = "1.28.0")]
#[inline]
pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
__rust_alloc_zeroed(layout.size(), layout.align())
unsafe { __rust_alloc_zeroed(layout.size(), layout.align()) }
}
#[unstable(feature = "allocator_api", issue = "32838")]
@ -183,7 +183,7 @@ unsafe impl AllocRef for Global {
#[inline]
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
if layout.size() != 0 {
dealloc(ptr.as_ptr(), layout)
unsafe { dealloc(ptr.as_ptr(), layout) }
}
}
@ -209,16 +209,21 @@ unsafe impl AllocRef for Global {
match placement {
ReallocPlacement::InPlace => Err(AllocErr),
ReallocPlacement::MayMove if layout.size() == 0 => {
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
let new_layout =
unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) };
self.alloc(new_layout, init)
}
ReallocPlacement::MayMove => {
// `realloc` probably checks for `new_size > size` or something similar.
intrinsics::assume(new_size > size);
let ptr = realloc(ptr.as_ptr(), layout, new_size);
let ptr = unsafe {
intrinsics::assume(new_size > size);
realloc(ptr.as_ptr(), layout, new_size)
};
let memory =
MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size };
init.init_offset(memory, size);
unsafe {
init.init_offset(memory, size);
}
Ok(memory)
}
}
@ -245,13 +250,17 @@ unsafe impl AllocRef for Global {
match placement {
ReallocPlacement::InPlace => Err(AllocErr),
ReallocPlacement::MayMove if new_size == 0 => {
self.dealloc(ptr, layout);
unsafe {
self.dealloc(ptr, layout);
}
Ok(MemoryBlock { ptr: layout.dangling(), size: 0 })
}
ReallocPlacement::MayMove => {
// `realloc` probably checks for `new_size < size` or something similar.
intrinsics::assume(new_size < size);
let ptr = realloc(ptr.as_ptr(), layout, new_size);
let ptr = unsafe {
intrinsics::assume(new_size < size);
realloc(ptr.as_ptr(), layout, new_size)
};
Ok(MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size })
}
}
@ -264,7 +273,7 @@ unsafe impl AllocRef for Global {
#[lang = "exchange_malloc"]
#[inline]
unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
let layout = Layout::from_size_align_unchecked(size, align);
let layout = unsafe { Layout::from_size_align_unchecked(size, align) };
match Global.alloc(layout, AllocInit::Uninitialized) {
Ok(memory) => memory.ptr.as_ptr(),
Err(_) => handle_alloc_error(layout),
@ -279,10 +288,12 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
// For example if `Box` is changed to `struct Box<T: ?Sized, A: AllocRef>(Unique<T>, A)`,
// this function has to be changed to `fn box_free<T: ?Sized, A: AllocRef>(Unique<T>, A)` as well.
pub(crate) unsafe fn box_free<T: ?Sized>(ptr: Unique<T>) {
let size = size_of_val(ptr.as_ref());
let align = min_align_of_val(ptr.as_ref());
let layout = Layout::from_size_align_unchecked(size, align);
Global.dealloc(ptr.cast().into(), layout)
unsafe {
let size = size_of_val(ptr.as_ref());
let align = min_align_of_val(ptr.as_ref());
let layout = Layout::from_size_align_unchecked(size, align);
Global.dealloc(ptr.cast().into(), layout)
}
}
/// Abort on memory allocation error or failure.

View File

@ -311,7 +311,7 @@ impl<T> Box<mem::MaybeUninit<T>> {
#[unstable(feature = "new_uninit", issue = "63291")]
#[inline]
pub unsafe fn assume_init(self) -> Box<T> {
Box::from_raw(Box::into_raw(self) as *mut T)
unsafe { Box::from_raw(Box::into_raw(self) as *mut T) }
}
}
@ -349,7 +349,7 @@ impl<T> Box<[mem::MaybeUninit<T>]> {
#[unstable(feature = "new_uninit", issue = "63291")]
#[inline]
pub unsafe fn assume_init(self) -> Box<[T]> {
Box::from_raw(Box::into_raw(self) as *mut [T])
unsafe { Box::from_raw(Box::into_raw(self) as *mut [T]) }
}
}
@ -393,7 +393,7 @@ impl<T: ?Sized> Box<T> {
#[stable(feature = "box_raw", since = "1.4.0")]
#[inline]
pub unsafe fn from_raw(raw: *mut T) -> Self {
Box(Unique::new_unchecked(raw))
Box(unsafe { Unique::new_unchecked(raw) })
}
/// Consumes the `Box`, returning a wrapped raw pointer.

View File

@ -1003,7 +1003,7 @@ impl<'a, T> Hole<'a, T> {
unsafe fn new(data: &'a mut [T], pos: usize) -> Self {
debug_assert!(pos < data.len());
// SAFE: pos should be inside the slice
let elt = ptr::read(data.get_unchecked(pos));
let elt = unsafe { ptr::read(data.get_unchecked(pos)) };
Hole { data, elt: ManuallyDrop::new(elt), pos }
}
@ -1025,7 +1025,7 @@ impl<'a, T> Hole<'a, T> {
unsafe fn get(&self, index: usize) -> &T {
debug_assert!(index != self.pos);
debug_assert!(index < self.data.len());
self.data.get_unchecked(index)
unsafe { self.data.get_unchecked(index) }
}
/// Move hole to new location
@ -1035,9 +1035,11 @@ impl<'a, T> Hole<'a, T> {
unsafe fn move_to(&mut self, index: usize) {
debug_assert!(index != self.pos);
debug_assert!(index < self.data.len());
let index_ptr: *const _ = self.data.get_unchecked(index);
let hole_ptr = self.data.get_unchecked_mut(self.pos);
ptr::copy_nonoverlapping(index_ptr, hole_ptr, 1);
unsafe {
let index_ptr: *const _ = self.data.get_unchecked(index);
let hole_ptr = self.data.get_unchecked_mut(self.pos);
ptr::copy_nonoverlapping(index_ptr, hole_ptr, 1);
}
self.pos = index;
}
}

View File

@ -1725,7 +1725,7 @@ impl<'a, K: 'a, V: 'a> DrainFilterInner<'a, K, V> {
&mut self,
) -> Option<Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV>> {
let edge = self.cur_leaf_edge.as_ref()?;
ptr::read(edge).next_kv().ok()
unsafe { ptr::read(edge).next_kv().ok() }
}
/// Implementation of a typical `DrainFilter::next` method, given the predicate.
@ -1808,7 +1808,7 @@ impl<'a, K, V> Range<'a, K, V> {
}
unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
unwrap_unchecked(self.front.as_mut()).next_unchecked()
unsafe { unwrap_unchecked(self.front.as_mut()).next_unchecked() }
}
}
@ -1821,7 +1821,7 @@ impl<'a, K, V> DoubleEndedIterator for Range<'a, K, V> {
impl<'a, K, V> Range<'a, K, V> {
unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
unwrap_unchecked(self.back.as_mut()).next_back_unchecked()
unsafe { unwrap_unchecked(self.back.as_mut()).next_back_unchecked() }
}
}
@ -1859,7 +1859,7 @@ impl<'a, K, V> RangeMut<'a, K, V> {
}
unsafe fn next_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
unwrap_unchecked(self.front.as_mut()).next_unchecked()
unsafe { unwrap_unchecked(self.front.as_mut()).next_unchecked() }
}
}
@ -1880,7 +1880,7 @@ impl<K, V> FusedIterator for RangeMut<'_, K, V> {}
impl<'a, K, V> RangeMut<'a, K, V> {
unsafe fn next_back_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
unwrap_unchecked(self.back.as_mut()).next_back_unchecked()
unsafe { unwrap_unchecked(self.back.as_mut()).next_back_unchecked() }
}
}

View File

@ -19,7 +19,9 @@ pub unsafe fn unwrap_unchecked<T>(val: Option<T>) -> T {
if cfg!(debug_assertions) {
panic!("'unchecked' unwrap on None in BTreeMap");
} else {
core::intrinsics::unreachable();
unsafe {
core::intrinsics::unreachable();
}
}
})
}

View File

@ -64,8 +64,10 @@ macro_rules! def_next_kv_uncheched_dealloc {
edge = match edge.$adjacent_kv() {
Ok(internal_kv) => return internal_kv,
Err(last_edge) => {
let parent_edge = last_edge.into_node().deallocate_and_ascend();
unwrap_unchecked(parent_edge).forget_node_type()
unsafe {
let parent_edge = last_edge.into_node().deallocate_and_ascend();
unwrap_unchecked(parent_edge).forget_node_type()
}
}
}
}
@ -82,9 +84,11 @@ def_next_kv_uncheched_dealloc! {unsafe fn next_back_kv_unchecked_dealloc: left_k
/// Safety: The change closure must not panic.
#[inline]
unsafe fn replace<T, R>(v: &mut T, change: impl FnOnce(T) -> (T, R)) -> R {
let value = ptr::read(v);
let value = unsafe { ptr::read(v) };
let (new_value, ret) = change(value);
ptr::write(v, new_value);
unsafe {
ptr::write(v, new_value);
}
ret
}
@ -93,22 +97,26 @@ impl<'a, K, V> Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Ed
/// key and value in between.
/// Unsafe because the caller must ensure that the leaf edge is not the last one in the tree.
pub unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
replace(self, |leaf_edge| {
let kv = leaf_edge.next_kv();
let kv = unwrap_unchecked(kv.ok());
(kv.next_leaf_edge(), kv.into_kv())
})
unsafe {
replace(self, |leaf_edge| {
let kv = leaf_edge.next_kv();
let kv = unwrap_unchecked(kv.ok());
(kv.next_leaf_edge(), kv.into_kv())
})
}
}
/// Moves the leaf edge handle to the previous leaf edge and returns references to the
/// key and value in between.
/// Unsafe because the caller must ensure that the leaf edge is not the first one in the tree.
pub unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
replace(self, |leaf_edge| {
let kv = leaf_edge.next_back_kv();
let kv = unwrap_unchecked(kv.ok());
(kv.next_back_leaf_edge(), kv.into_kv())
})
unsafe {
replace(self, |leaf_edge| {
let kv = leaf_edge.next_back_kv();
let kv = unwrap_unchecked(kv.ok());
(kv.next_back_leaf_edge(), kv.into_kv())
})
}
}
}
@ -119,14 +127,16 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge
/// - The caller must ensure that the leaf edge is not the last one in the tree.
/// - Using the updated handle may well invalidate the returned references.
pub unsafe fn next_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
let kv = replace(self, |leaf_edge| {
let kv = leaf_edge.next_kv();
let kv = unwrap_unchecked(kv.ok());
(ptr::read(&kv).next_leaf_edge(), kv)
});
// Doing the descend (and perhaps another move) invalidates the references
// returned by `into_kv_mut`, so we have to do this last.
kv.into_kv_mut()
unsafe {
let kv = replace(self, |leaf_edge| {
let kv = leaf_edge.next_kv();
let kv = unwrap_unchecked(kv.ok());
(ptr::read(&kv).next_leaf_edge(), kv)
});
// Doing the descend (and perhaps another move) invalidates the references
// returned by `into_kv_mut`, so we have to do this last.
kv.into_kv_mut()
}
}
/// Moves the leaf edge handle to the previous leaf and returns references to the
@ -135,14 +145,16 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge
/// - The caller must ensure that the leaf edge is not the first one in the tree.
/// - Using the updated handle may well invalidate the returned references.
pub unsafe fn next_back_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
let kv = replace(self, |leaf_edge| {
let kv = leaf_edge.next_back_kv();
let kv = unwrap_unchecked(kv.ok());
(ptr::read(&kv).next_back_leaf_edge(), kv)
});
// Doing the descend (and perhaps another move) invalidates the references
// returned by `into_kv_mut`, so we have to do this last.
kv.into_kv_mut()
unsafe {
let kv = replace(self, |leaf_edge| {
let kv = leaf_edge.next_back_kv();
let kv = unwrap_unchecked(kv.ok());
(ptr::read(&kv).next_back_leaf_edge(), kv)
});
// Doing the descend (and perhaps another move) invalidates the references
// returned by `into_kv_mut`, so we have to do this last.
kv.into_kv_mut()
}
}
}
@ -159,12 +171,14 @@ impl<K, V> Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge> {
/// if the two preconditions above hold.
/// - Using the updated handle may well invalidate the returned references.
pub unsafe fn next_unchecked(&mut self) -> (K, V) {
replace(self, |leaf_edge| {
let kv = next_kv_unchecked_dealloc(leaf_edge);
let k = ptr::read(kv.reborrow().into_kv().0);
let v = ptr::read(kv.reborrow().into_kv().1);
(kv.next_leaf_edge(), (k, v))
})
unsafe {
replace(self, |leaf_edge| {
let kv = next_kv_unchecked_dealloc(leaf_edge);
let k = ptr::read(kv.reborrow().into_kv().0);
let v = ptr::read(kv.reborrow().into_kv().1);
(kv.next_leaf_edge(), (k, v))
})
}
}
/// Moves the leaf edge handle to the previous leaf edge and returns the key
@ -179,12 +193,14 @@ impl<K, V> Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge> {
/// if the two preconditions above hold.
/// - Using the updated handle may well invalidate the returned references.
pub unsafe fn next_back_unchecked(&mut self) -> (K, V) {
replace(self, |leaf_edge| {
let kv = next_back_kv_unchecked_dealloc(leaf_edge);
let k = ptr::read(kv.reborrow().into_kv().0);
let v = ptr::read(kv.reborrow().into_kv().1);
(kv.next_back_leaf_edge(), (k, v))
})
unsafe {
replace(self, |leaf_edge| {
let kv = next_back_kv_unchecked_dealloc(leaf_edge);
let k = ptr::read(kv.reborrow().into_kv().0);
let v = ptr::read(kv.reborrow().into_kv().1);
(kv.next_back_leaf_edge(), (k, v))
})
}
}
}

View File

@ -107,7 +107,7 @@ impl<K, V> InternalNode<K, V> {
/// `len` of 0), there must be one initialized and valid edge. This function does not set up
/// such an edge.
unsafe fn new() -> Self {
InternalNode { data: LeafNode::new(), edges: [MaybeUninit::UNINIT; 2 * B] }
InternalNode { data: unsafe { LeafNode::new() }, edges: [MaybeUninit::UNINIT; 2 * B] }
}
}
@ -131,7 +131,7 @@ impl<K, V> BoxedNode<K, V> {
}
unsafe fn from_ptr(ptr: NonNull<LeafNode<K, V>>) -> Self {
BoxedNode { ptr: Unique::new_unchecked(ptr.as_ptr()) }
BoxedNode { ptr: unsafe { Unique::new_unchecked(ptr.as_ptr()) } }
}
fn as_ptr(&self) -> NonNull<LeafNode<K, V>> {
@ -392,14 +392,16 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
let height = self.height;
let node = self.node;
let ret = self.ascend().ok();
Global.dealloc(
node.cast(),
if height > 0 {
Layout::new::<InternalNode<K, V>>()
} else {
Layout::new::<LeafNode<K, V>>()
},
);
unsafe {
Global.dealloc(
node.cast(),
if height > 0 {
Layout::new::<InternalNode<K, V>>()
} else {
Layout::new::<LeafNode<K, V>>()
},
);
}
ret
}
}
@ -565,7 +567,7 @@ impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
debug_assert!(first <= self.len());
debug_assert!(after_last <= self.len() + 1);
for i in first..after_last {
Handle::new_edge(self.reborrow_mut(), i).correct_parent_link();
unsafe { Handle::new_edge(self.reborrow_mut(), i) }.correct_parent_link();
}
}
@ -789,7 +791,7 @@ impl<'a, K, V, NodeType, HandleType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeT
&mut self,
) -> Handle<NodeRef<marker::Mut<'_>, K, V, NodeType>, HandleType> {
// We can't use Handle::new_kv or Handle::new_edge because we don't know our type
Handle { node: self.node.reborrow_mut(), idx: self.idx, _marker: PhantomData }
Handle { node: unsafe { self.node.reborrow_mut() }, idx: self.idx, _marker: PhantomData }
}
}
@ -885,7 +887,7 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
unsafe fn cast_unchecked<NewType>(
&mut self,
) -> Handle<NodeRef<marker::Mut<'_>, K, V, NewType>, marker::Edge> {
Handle::new_edge(self.node.cast_unchecked(), self.idx)
unsafe { Handle::new_edge(self.node.cast_unchecked(), self.idx) }
}
/// Inserts a new key/value pair and an edge that will go to the right of that new pair
@ -1330,8 +1332,10 @@ unsafe fn move_kv<K, V>(
dest_offset: usize,
count: usize,
) {
ptr::copy_nonoverlapping(source.0.add(source_offset), dest.0.add(dest_offset), count);
ptr::copy_nonoverlapping(source.1.add(source_offset), dest.1.add(dest_offset), count);
unsafe {
ptr::copy_nonoverlapping(source.0.add(source_offset), dest.0.add(dest_offset), count);
ptr::copy_nonoverlapping(source.1.add(source_offset), dest.1.add(dest_offset), count);
}
}
// Source and destination must have the same height.
@ -1344,8 +1348,10 @@ unsafe fn move_edges<K, V>(
) {
let source_ptr = source.as_internal_mut().edges.as_mut_ptr();
let dest_ptr = dest.as_internal_mut().edges.as_mut_ptr();
ptr::copy_nonoverlapping(source_ptr.add(source_offset), dest_ptr.add(dest_offset), count);
dest.correct_childrens_parent_links(dest_offset, dest_offset + count);
unsafe {
ptr::copy_nonoverlapping(source_ptr.add(source_offset), dest_ptr.add(dest_offset), count);
dest.correct_childrens_parent_links(dest_offset, dest_offset + count);
}
}
impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
@ -1459,12 +1465,16 @@ pub mod marker {
}
unsafe fn slice_insert<T>(slice: &mut [T], idx: usize, val: T) {
ptr::copy(slice.as_ptr().add(idx), slice.as_mut_ptr().add(idx + 1), slice.len() - idx);
ptr::write(slice.get_unchecked_mut(idx), val);
unsafe {
ptr::copy(slice.as_ptr().add(idx), slice.as_mut_ptr().add(idx + 1), slice.len() - idx);
ptr::write(slice.get_unchecked_mut(idx), val);
}
}
unsafe fn slice_remove<T>(slice: &mut [T], idx: usize) -> T {
let ret = ptr::read(slice.get_unchecked(idx));
ptr::copy(slice.as_ptr().add(idx + 1), slice.as_mut_ptr().add(idx), slice.len() - idx - 1);
ret
unsafe {
let ret = ptr::read(slice.get_unchecked(idx));
ptr::copy(slice.as_ptr().add(idx + 1), slice.as_mut_ptr().add(idx), slice.len() - idx - 1);
ret
}
}

View File

@ -225,17 +225,17 @@ impl<T> LinkedList<T> {
/// maintain validity of aliasing pointers.
#[inline]
unsafe fn unlink_node(&mut self, mut node: NonNull<Node<T>>) {
let node = node.as_mut(); // this one is ours now, we can create an &mut.
let node = unsafe { node.as_mut() }; // this one is ours now, we can create an &mut.
// Not creating new mutable (unique!) references overlapping `element`.
match node.prev {
Some(prev) => (*prev.as_ptr()).next = node.next,
Some(prev) => unsafe { (*prev.as_ptr()).next = node.next },
// this node is the head node
None => self.head = node.next,
};
match node.next {
Some(next) => (*next.as_ptr()).prev = node.prev,
Some(next) => unsafe { (*next.as_ptr()).prev = node.prev },
// this node is the tail node
None => self.tail = node.prev,
};
@ -258,17 +258,23 @@ impl<T> LinkedList<T> {
// This method takes care not to create multiple mutable references to whole nodes at the same time,
// to maintain validity of aliasing pointers into `element`.
if let Some(mut existing_prev) = existing_prev {
existing_prev.as_mut().next = Some(splice_start);
unsafe {
existing_prev.as_mut().next = Some(splice_start);
}
} else {
self.head = Some(splice_start);
}
if let Some(mut existing_next) = existing_next {
existing_next.as_mut().prev = Some(splice_end);
unsafe {
existing_next.as_mut().prev = Some(splice_end);
}
} else {
self.tail = Some(splice_end);
}
splice_start.as_mut().prev = existing_prev;
splice_end.as_mut().next = existing_next;
unsafe {
splice_start.as_mut().prev = existing_prev;
splice_end.as_mut().next = existing_next;
}
self.len += splice_length;
}
@ -297,9 +303,13 @@ impl<T> LinkedList<T> {
if let Some(mut split_node) = split_node {
let first_part_head;
let first_part_tail;
first_part_tail = split_node.as_mut().prev.take();
unsafe {
first_part_tail = split_node.as_mut().prev.take();
}
if let Some(mut tail) = first_part_tail {
tail.as_mut().next = None;
unsafe {
tail.as_mut().next = None;
}
first_part_head = self.head;
} else {
first_part_head = None;
@ -333,9 +343,13 @@ impl<T> LinkedList<T> {
if let Some(mut split_node) = split_node {
let second_part_head;
let second_part_tail;
second_part_head = split_node.as_mut().next.take();
unsafe {
second_part_head = split_node.as_mut().next.take();
}
if let Some(mut head) = second_part_head {
head.as_mut().prev = None;
unsafe {
head.as_mut().prev = None;
}
second_part_tail = self.tail;
} else {
second_part_tail = None;

View File

@ -7,6 +7,8 @@
#![stable(feature = "rust1", since = "1.0.0")]
// ignore-tidy-filelength
use core::array::LengthAtMost32;
use core::cmp::{self, Ordering};
use core::fmt;
@ -201,25 +203,27 @@ impl<T> VecDeque<T> {
/// Turn ptr into a slice
#[inline]
unsafe fn buffer_as_slice(&self) -> &[T] {
slice::from_raw_parts(self.ptr(), self.cap())
unsafe { slice::from_raw_parts(self.ptr(), self.cap()) }
}
/// Turn ptr into a mut slice
#[inline]
unsafe fn buffer_as_mut_slice(&mut self) -> &mut [T] {
slice::from_raw_parts_mut(self.ptr(), self.cap())
unsafe { slice::from_raw_parts_mut(self.ptr(), self.cap()) }
}
/// Moves an element out of the buffer
#[inline]
unsafe fn buffer_read(&mut self, off: usize) -> T {
ptr::read(self.ptr().add(off))
unsafe { ptr::read(self.ptr().add(off)) }
}
/// Writes an element into the buffer, moving it.
#[inline]
unsafe fn buffer_write(&mut self, off: usize, value: T) {
ptr::write(self.ptr().add(off), value);
unsafe {
ptr::write(self.ptr().add(off), value);
}
}
/// Returns `true` if the buffer is at full capacity.
@ -268,7 +272,9 @@ impl<T> VecDeque<T> {
len,
self.cap()
);
ptr::copy(self.ptr().add(src), self.ptr().add(dst), len);
unsafe {
ptr::copy(self.ptr().add(src), self.ptr().add(dst), len);
}
}
/// Copies a contiguous block of memory len long from src to dst
@ -290,7 +296,9 @@ impl<T> VecDeque<T> {
len,
self.cap()
);
ptr::copy_nonoverlapping(self.ptr().add(src), self.ptr().add(dst), len);
unsafe {
ptr::copy_nonoverlapping(self.ptr().add(src), self.ptr().add(dst), len);
}
}
/// Copies a potentially wrapping block of memory len long from src to dest.
@ -330,7 +338,9 @@ impl<T> VecDeque<T> {
// 2 [_ _ A A A A B B _]
// D . . .
//
self.copy(dst, src, len);
unsafe {
self.copy(dst, src, len);
}
}
(false, false, true) => {
// dst before src, src doesn't wrap, dst wraps
@ -341,8 +351,10 @@ impl<T> VecDeque<T> {
// 3 [B B B B _ _ _ A A]
// . . D .
//
self.copy(dst, src, dst_pre_wrap_len);
self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len);
unsafe {
self.copy(dst, src, dst_pre_wrap_len);
self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len);
}
}
(true, false, true) => {
// src before dst, src doesn't wrap, dst wraps
@ -353,8 +365,10 @@ impl<T> VecDeque<T> {
// 3 [B B _ _ _ A A A A]
// . . D .
//
self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len);
self.copy(dst, src, dst_pre_wrap_len);
unsafe {
self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len);
self.copy(dst, src, dst_pre_wrap_len);
}
}
(false, true, false) => {
// dst before src, src wraps, dst doesn't wrap
@ -365,8 +379,10 @@ impl<T> VecDeque<T> {
// 3 [C C _ _ _ B B C C]
// D . . .
//
self.copy(dst, src, src_pre_wrap_len);
self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len);
unsafe {
self.copy(dst, src, src_pre_wrap_len);
self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len);
}
}
(true, true, false) => {
// src before dst, src wraps, dst doesn't wrap
@ -377,8 +393,10 @@ impl<T> VecDeque<T> {
// 3 [C C A A _ _ _ C C]
// D . . .
//
self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len);
self.copy(dst, src, src_pre_wrap_len);
unsafe {
self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len);
self.copy(dst, src, src_pre_wrap_len);
}
}
(false, true, true) => {
// dst before src, src wraps, dst wraps
@ -392,9 +410,11 @@ impl<T> VecDeque<T> {
//
debug_assert!(dst_pre_wrap_len > src_pre_wrap_len);
let delta = dst_pre_wrap_len - src_pre_wrap_len;
self.copy(dst, src, src_pre_wrap_len);
self.copy(dst + src_pre_wrap_len, 0, delta);
self.copy(0, delta, len - dst_pre_wrap_len);
unsafe {
self.copy(dst, src, src_pre_wrap_len);
self.copy(dst + src_pre_wrap_len, 0, delta);
self.copy(0, delta, len - dst_pre_wrap_len);
}
}
(true, true, true) => {
// src before dst, src wraps, dst wraps
@ -408,9 +428,11 @@ impl<T> VecDeque<T> {
//
debug_assert!(src_pre_wrap_len > dst_pre_wrap_len);
let delta = src_pre_wrap_len - dst_pre_wrap_len;
self.copy(delta, 0, len - src_pre_wrap_len);
self.copy(0, self.cap() - delta, delta);
self.copy(dst, src, dst_pre_wrap_len);
unsafe {
self.copy(delta, 0, len - src_pre_wrap_len);
self.copy(0, self.cap() - delta, delta);
self.copy(dst, src, dst_pre_wrap_len);
}
}
}
}
@ -440,13 +462,17 @@ impl<T> VecDeque<T> {
// Nop
} else if self.head < old_capacity - self.tail {
// B
self.copy_nonoverlapping(old_capacity, 0, self.head);
unsafe {
self.copy_nonoverlapping(old_capacity, 0, self.head);
}
self.head += old_capacity;
debug_assert!(self.head > self.tail);
} else {
// C
let new_tail = new_capacity - (old_capacity - self.tail);
self.copy_nonoverlapping(new_tail, self.tail, old_capacity - self.tail);
unsafe {
self.copy_nonoverlapping(new_tail, self.tail, old_capacity - self.tail);
}
self.tail = new_tail;
debug_assert!(self.head < self.tail);
}
@ -2297,7 +2323,9 @@ impl<T> VecDeque<T> {
unsafe fn rotate_left_inner(&mut self, mid: usize) {
debug_assert!(mid * 2 <= self.len());
self.wrap_copy(self.head, self.tail, mid);
unsafe {
self.wrap_copy(self.head, self.tail, mid);
}
self.head = self.wrap_add(self.head, mid);
self.tail = self.wrap_add(self.tail, mid);
}
@ -2306,7 +2334,9 @@ impl<T> VecDeque<T> {
debug_assert!(k * 2 <= self.len());
self.head = self.wrap_sub(self.head, k);
self.tail = self.wrap_sub(self.tail, k);
self.wrap_copy(self.tail, self.head, k);
unsafe {
self.wrap_copy(self.tail, self.head, k);
}
}
}

View File

@ -72,6 +72,7 @@
#![deny(intra_doc_link_resolution_failure)] // rustdoc is run without -D warnings
#![allow(explicit_outlives_requirements)]
#![allow(incomplete_features)]
#![deny(unsafe_op_in_unsafe_fn)]
#![cfg_attr(not(test), feature(generator_trait))]
#![cfg_attr(test, feature(test))]
#![feature(allocator_api)]
@ -118,6 +119,7 @@
#![feature(try_reserve)]
#![feature(unboxed_closures)]
#![feature(unicode_internals)]
#![feature(unsafe_block_in_unsafe_fn)]
#![feature(unsize)]
#![feature(unsized_locals)]
#![feature(allocator_internals)]

View File

@ -108,7 +108,7 @@ impl<T> RawVec<T, Global> {
/// If the `ptr` and `capacity` come from a `RawVec`, then this is guaranteed.
#[inline]
pub unsafe fn from_raw_parts(ptr: *mut T, capacity: usize) -> Self {
Self::from_raw_parts_in(ptr, capacity, Global)
unsafe { Self::from_raw_parts_in(ptr, capacity, Global) }
}
/// Converts a `Box<[T]>` into a `RawVec<T>`.
@ -139,8 +139,10 @@ impl<T> RawVec<T, Global> {
);
let me = ManuallyDrop::new(self);
let slice = slice::from_raw_parts_mut(me.ptr() as *mut MaybeUninit<T>, len);
Box::from_raw(slice)
unsafe {
let slice = slice::from_raw_parts_mut(me.ptr() as *mut MaybeUninit<T>, len);
Box::from_raw(slice)
}
}
}
@ -192,7 +194,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
/// If the `ptr` and `capacity` come from a `RawVec` created via `a`, then this is guaranteed.
#[inline]
pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, a: A) -> Self {
Self { ptr: Unique::new_unchecked(ptr), cap: capacity, alloc: a }
Self { ptr: unsafe { Unique::new_unchecked(ptr) }, cap: capacity, alloc: a }
}
/// Gets a raw pointer to the start of the allocation. Note that this is

View File

@ -35,7 +35,7 @@ fn allocator_param() {
}
}
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
Global.dealloc(ptr, layout)
unsafe { Global.dealloc(ptr, layout) }
}
}

View File

@ -304,7 +304,7 @@ impl<T: ?Sized> Rc<T> {
}
unsafe fn from_ptr(ptr: *mut RcBox<T>) -> Self {
Self::from_inner(NonNull::new_unchecked(ptr))
Self::from_inner(unsafe { NonNull::new_unchecked(ptr) })
}
}
@ -544,7 +544,7 @@ impl<T> Rc<[mem::MaybeUninit<T>]> {
#[unstable(feature = "new_uninit", issue = "63291")]
#[inline]
pub unsafe fn assume_init(self) -> Rc<[T]> {
Rc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _)
unsafe { Rc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) }
}
}
@ -643,13 +643,13 @@ impl<T: ?Sized> Rc<T> {
/// ```
#[stable(feature = "rc_raw", since = "1.17.0")]
pub unsafe fn from_raw(ptr: *const T) -> Self {
let offset = data_offset(ptr);
let offset = unsafe { data_offset(ptr) };
// Reverse the offset to find the original RcBox.
let fake_ptr = ptr as *mut RcBox<T>;
let rc_ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
let rc_ptr = unsafe { set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset)) };
Self::from_ptr(rc_ptr)
unsafe { Self::from_ptr(rc_ptr) }
}
/// Consumes the `Rc`, returning the wrapped pointer as `NonNull<T>`.
@ -805,7 +805,7 @@ impl<T: ?Sized> Rc<T> {
#[inline]
#[unstable(feature = "get_mut_unchecked", issue = "63292")]
pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
&mut this.ptr.as_mut().value
unsafe { &mut this.ptr.as_mut().value }
}
#[inline]
@ -964,10 +964,12 @@ impl<T: ?Sized> Rc<T> {
// Initialize the RcBox
let inner = mem_to_rcbox(mem.ptr.as_ptr());
debug_assert_eq!(Layout::for_value(&*inner), layout);
unsafe {
debug_assert_eq!(Layout::for_value(&*inner), layout);
ptr::write(&mut (*inner).strong, Cell::new(1));
ptr::write(&mut (*inner).weak, Cell::new(1));
ptr::write(&mut (*inner).strong, Cell::new(1));
ptr::write(&mut (*inner).weak, Cell::new(1));
}
inner
}
@ -975,9 +977,11 @@ impl<T: ?Sized> Rc<T> {
/// Allocates an `RcBox<T>` with sufficient space for an unsized inner value
unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox<T> {
// Allocate for the `RcBox<T>` using the given value.
Self::allocate_for_layout(Layout::for_value(&*ptr), |mem| {
set_data_ptr(ptr as *mut T, mem) as *mut RcBox<T>
})
unsafe {
Self::allocate_for_layout(Layout::for_value(&*ptr), |mem| {
set_data_ptr(ptr as *mut T, mem) as *mut RcBox<T>
})
}
}
fn from_box(v: Box<T>) -> Rc<T> {
@ -1006,9 +1010,11 @@ impl<T: ?Sized> Rc<T> {
impl<T> Rc<[T]> {
/// Allocates an `RcBox<[T]>` with the given length.
unsafe fn allocate_for_slice(len: usize) -> *mut RcBox<[T]> {
Self::allocate_for_layout(Layout::array::<T>(len).unwrap(), |mem| {
ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut RcBox<[T]>
})
unsafe {
Self::allocate_for_layout(Layout::array::<T>(len).unwrap(), |mem| {
ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut RcBox<[T]>
})
}
}
}
@ -1017,7 +1023,9 @@ impl<T> Rc<[T]> {
/// For a slice/trait object, this sets the `data` field and leaves the rest
/// unchanged. For a sized raw pointer, this simply sets the pointer.
unsafe fn set_data_ptr<T: ?Sized, U>(mut ptr: *mut T, data: *mut U) -> *mut T {
ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8);
unsafe {
ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8);
}
ptr
}
@ -1026,11 +1034,11 @@ impl<T> Rc<[T]> {
///
/// Unsafe because the caller must either take ownership or bind `T: Copy`
unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> {
let ptr = Self::allocate_for_slice(v.len());
ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).value as *mut [T] as *mut T, v.len());
Self::from_ptr(ptr)
unsafe {
let ptr = Self::allocate_for_slice(v.len());
ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).value as *mut [T] as *mut T, v.len());
Self::from_ptr(ptr)
}
}
/// Constructs an `Rc<[T]>` from an iterator known to be of a certain size.
@ -1058,25 +1066,27 @@ impl<T> Rc<[T]> {
}
}
let ptr = Self::allocate_for_slice(len);
unsafe {
let ptr = Self::allocate_for_slice(len);
let mem = ptr as *mut _ as *mut u8;
let layout = Layout::for_value(&*ptr);
let mem = ptr as *mut _ as *mut u8;
let layout = Layout::for_value(&*ptr);
// Pointer to first element
let elems = &mut (*ptr).value as *mut [T] as *mut T;
// Pointer to first element
let elems = &mut (*ptr).value as *mut [T] as *mut T;
let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
for (i, item) in iter.enumerate() {
ptr::write(elems.add(i), item);
guard.n_elems += 1;
for (i, item) in iter.enumerate() {
ptr::write(elems.add(i), item);
guard.n_elems += 1;
}
// All clear. Forget the guard so it doesn't free the new RcBox.
forget(guard);
Self::from_ptr(ptr)
}
// All clear. Forget the guard so it doesn't free the new RcBox.
forget(guard);
Self::from_ptr(ptr)
}
}
@ -1786,10 +1796,12 @@ impl<T> Weak<T> {
Self::new()
} else {
// See Rc::from_raw for details
let offset = data_offset(ptr);
let fake_ptr = ptr as *mut RcBox<T>;
let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
Weak { ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw") }
unsafe {
let offset = data_offset(ptr);
let fake_ptr = ptr as *mut RcBox<T>;
let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
Weak { ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw") }
}
}
}
}
@ -2106,7 +2118,7 @@ unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> isize {
// Because it is ?Sized, it will always be the last field in memory.
// Note: This is a detail of the current implementation of the compiler,
// and is not a guaranteed language detail. Do not rely on it outside of std.
data_offset_align(align_of_val(&*ptr))
unsafe { data_offset_align(align_of_val(&*ptr)) }
}
/// Computes the offset of the data field within `RcBox`.

View File

@ -831,8 +831,7 @@ where
{
let len = v.len();
let v = v.as_mut_ptr();
let v_mid = v.add(mid);
let v_end = v.add(len);
let (v_mid, v_end) = unsafe { (v.add(mid), v.add(len)) };
// The merge process first copies the shorter run into `buf`. Then it traces the newly copied
// run and the longer run forwards (or backwards), comparing their next unconsumed elements and
@ -855,8 +854,10 @@ where
if mid <= len - mid {
// The left run is shorter.
ptr::copy_nonoverlapping(v, buf, mid);
hole = MergeHole { start: buf, end: buf.add(mid), dest: v };
unsafe {
ptr::copy_nonoverlapping(v, buf, mid);
hole = MergeHole { start: buf, end: buf.add(mid), dest: v };
}
// Initially, these pointers point to the beginnings of their arrays.
let left = &mut hole.start;
@ -866,17 +867,21 @@ where
while *left < hole.end && right < v_end {
// Consume the lesser side.
// If equal, prefer the left run to maintain stability.
let to_copy = if is_less(&*right, &**left) {
get_and_increment(&mut right)
} else {
get_and_increment(left)
};
ptr::copy_nonoverlapping(to_copy, get_and_increment(out), 1);
unsafe {
let to_copy = if is_less(&*right, &**left) {
get_and_increment(&mut right)
} else {
get_and_increment(left)
};
ptr::copy_nonoverlapping(to_copy, get_and_increment(out), 1);
}
}
} else {
// The right run is shorter.
ptr::copy_nonoverlapping(v_mid, buf, len - mid);
hole = MergeHole { start: buf, end: buf.add(len - mid), dest: v_mid };
unsafe {
ptr::copy_nonoverlapping(v_mid, buf, len - mid);
hole = MergeHole { start: buf, end: buf.add(len - mid), dest: v_mid };
}
// Initially, these pointers point past the ends of their arrays.
let left = &mut hole.dest;
@ -886,12 +891,14 @@ where
while v < *left && buf < *right {
// Consume the greater side.
// If equal, prefer the right run to maintain stability.
let to_copy = if is_less(&*right.offset(-1), &*left.offset(-1)) {
decrement_and_get(left)
} else {
decrement_and_get(right)
};
ptr::copy_nonoverlapping(to_copy, decrement_and_get(&mut out), 1);
unsafe {
let to_copy = if is_less(&*right.offset(-1), &*left.offset(-1)) {
decrement_and_get(left)
} else {
decrement_and_get(right)
};
ptr::copy_nonoverlapping(to_copy, decrement_and_get(&mut out), 1);
}
}
}
// Finally, `hole` gets dropped. If the shorter run was not fully consumed, whatever remains of
@ -899,12 +906,12 @@ where
unsafe fn get_and_increment<T>(ptr: &mut *mut T) -> *mut T {
let old = *ptr;
*ptr = ptr.offset(1);
*ptr = unsafe { ptr.offset(1) };
old
}
unsafe fn decrement_and_get<T>(ptr: &mut *mut T) -> *mut T {
*ptr = ptr.offset(-1);
*ptr = unsafe { ptr.offset(-1) };
*ptr
}

View File

@ -583,5 +583,5 @@ impl str {
#[stable(feature = "str_box_extras", since = "1.20.0")]
#[inline]
pub unsafe fn from_boxed_utf8_unchecked(v: Box<[u8]>) -> Box<str> {
Box::from_raw(Box::into_raw(v) as *mut str)
unsafe { Box::from_raw(Box::into_raw(v) as *mut str) }
}

View File

@ -724,7 +724,7 @@ impl String {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn from_raw_parts(buf: *mut u8, length: usize, capacity: usize) -> String {
String { vec: Vec::from_raw_parts(buf, length, capacity) }
unsafe { String { vec: Vec::from_raw_parts(buf, length, capacity) } }
}
/// Converts a vector of bytes to a `String` without checking that the
@ -1329,9 +1329,11 @@ impl String {
let amt = bytes.len();
self.vec.reserve(amt);
ptr::copy(self.vec.as_ptr().add(idx), self.vec.as_mut_ptr().add(idx + amt), len - idx);
ptr::copy(bytes.as_ptr(), self.vec.as_mut_ptr().add(idx), amt);
self.vec.set_len(len + amt);
unsafe {
ptr::copy(self.vec.as_ptr().add(idx), self.vec.as_mut_ptr().add(idx + amt), len - idx);
ptr::copy(bytes.as_ptr(), self.vec.as_mut_ptr().add(idx), amt);
self.vec.set_len(len + amt);
}
}
/// Inserts a string slice into this `String` at a byte position.

View File

@ -232,7 +232,7 @@ impl<T: ?Sized> Arc<T> {
}
unsafe fn from_ptr(ptr: *mut ArcInner<T>) -> Self {
Self::from_inner(NonNull::new_unchecked(ptr))
unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) }
}
}
@ -543,7 +543,7 @@ impl<T> Arc<[mem::MaybeUninit<T>]> {
#[unstable(feature = "new_uninit", issue = "63291")]
#[inline]
pub unsafe fn assume_init(self) -> Arc<[T]> {
Arc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _)
unsafe { Arc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) }
}
}
@ -642,13 +642,15 @@ impl<T: ?Sized> Arc<T> {
/// ```
#[stable(feature = "rc_raw", since = "1.17.0")]
pub unsafe fn from_raw(ptr: *const T) -> Self {
let offset = data_offset(ptr);
unsafe {
let offset = data_offset(ptr);
// Reverse the offset to find the original ArcInner.
let fake_ptr = ptr as *mut ArcInner<T>;
let arc_ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
// Reverse the offset to find the original ArcInner.
let fake_ptr = ptr as *mut ArcInner<T>;
let arc_ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
Self::from_ptr(arc_ptr)
Self::from_ptr(arc_ptr)
}
}
/// Consumes the `Arc`, returning the wrapped pointer as `NonNull<T>`.
@ -807,7 +809,7 @@ impl<T: ?Sized> Arc<T> {
#[unstable(feature = "arc_mutate_strong_count", issue = "71983")]
pub unsafe fn incr_strong_count(ptr: *const T) {
// Retain Arc, but don't touch refcount by wrapping in ManuallyDrop
let arc = mem::ManuallyDrop::new(Arc::<T>::from_raw(ptr));
let arc = unsafe { mem::ManuallyDrop::new(Arc::<T>::from_raw(ptr)) };
// Now increase refcount, but don't drop new refcount either
let _arc_clone: mem::ManuallyDrop<_> = arc.clone();
}
@ -847,7 +849,7 @@ impl<T: ?Sized> Arc<T> {
#[inline]
#[unstable(feature = "arc_mutate_strong_count", issue = "71983")]
pub unsafe fn decr_strong_count(ptr: *const T) {
mem::drop(Arc::from_raw(ptr));
unsafe { mem::drop(Arc::from_raw(ptr)) };
}
#[inline]
@ -865,7 +867,7 @@ impl<T: ?Sized> Arc<T> {
unsafe fn drop_slow(&mut self) {
// Destroy the data at this time, even though we may not free the box
// allocation itself (there may still be weak pointers lying around).
ptr::drop_in_place(Self::get_mut_unchecked(self));
unsafe { ptr::drop_in_place(Self::get_mut_unchecked(self)) };
// Drop the weak ref collectively held by all strong references
drop(Weak { ptr: self.ptr });
@ -917,10 +919,12 @@ impl<T: ?Sized> Arc<T> {
// Initialize the ArcInner
let inner = mem_to_arcinner(mem.ptr.as_ptr());
debug_assert_eq!(Layout::for_value(&*inner), layout);
debug_assert_eq!(unsafe { Layout::for_value(&*inner) }, layout);
ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1));
ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1));
unsafe {
ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1));
ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1));
}
inner
}
@ -928,9 +932,11 @@ impl<T: ?Sized> Arc<T> {
/// Allocates an `ArcInner<T>` with sufficient space for an unsized inner value.
unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner<T> {
// Allocate for the `ArcInner<T>` using the given value.
Self::allocate_for_layout(Layout::for_value(&*ptr), |mem| {
set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T>
})
unsafe {
Self::allocate_for_layout(Layout::for_value(&*ptr), |mem| {
set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T>
})
}
}
fn from_box(v: Box<T>) -> Arc<T> {
@ -959,9 +965,11 @@ impl<T: ?Sized> Arc<T> {
impl<T> Arc<[T]> {
/// Allocates an `ArcInner<[T]>` with the given length.
unsafe fn allocate_for_slice(len: usize) -> *mut ArcInner<[T]> {
Self::allocate_for_layout(Layout::array::<T>(len).unwrap(), |mem| {
ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut ArcInner<[T]>
})
unsafe {
Self::allocate_for_layout(Layout::array::<T>(len).unwrap(), |mem| {
ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut ArcInner<[T]>
})
}
}
}
@ -970,7 +978,9 @@ impl<T> Arc<[T]> {
/// For a slice/trait object, this sets the `data` field and leaves the rest
/// unchanged. For a sized raw pointer, this simply sets the pointer.
unsafe fn set_data_ptr<T: ?Sized, U>(mut ptr: *mut T, data: *mut U) -> *mut T {
ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8);
unsafe {
ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8);
}
ptr
}
@ -979,11 +989,13 @@ impl<T> Arc<[T]> {
///
/// Unsafe because the caller must either take ownership or bind `T: Copy`.
unsafe fn copy_from_slice(v: &[T]) -> Arc<[T]> {
let ptr = Self::allocate_for_slice(v.len());
unsafe {
let ptr = Self::allocate_for_slice(v.len());
ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).data as *mut [T] as *mut T, v.len());
ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).data as *mut [T] as *mut T, v.len());
Self::from_ptr(ptr)
Self::from_ptr(ptr)
}
}
/// Constructs an `Arc<[T]>` from an iterator known to be of a certain size.
@ -1011,25 +1023,27 @@ impl<T> Arc<[T]> {
}
}
let ptr = Self::allocate_for_slice(len);
unsafe {
let ptr = Self::allocate_for_slice(len);
let mem = ptr as *mut _ as *mut u8;
let layout = Layout::for_value(&*ptr);
let mem = ptr as *mut _ as *mut u8;
let layout = Layout::for_value(&*ptr);
// Pointer to first element
let elems = &mut (*ptr).data as *mut [T] as *mut T;
// Pointer to first element
let elems = &mut (*ptr).data as *mut [T] as *mut T;
let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
for (i, item) in iter.enumerate() {
ptr::write(elems.add(i), item);
guard.n_elems += 1;
for (i, item) in iter.enumerate() {
ptr::write(elems.add(i), item);
guard.n_elems += 1;
}
// All clear. Forget the guard so it doesn't free the new ArcInner.
mem::forget(guard);
Self::from_ptr(ptr)
}
// All clear. Forget the guard so it doesn't free the new ArcInner.
mem::forget(guard);
Self::from_ptr(ptr)
}
}
@ -1274,7 +1288,7 @@ impl<T: ?Sized> Arc<T> {
pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
// We are careful to *not* create a reference covering the "count" fields, as
// this would alias with concurrent access to the reference counts (e.g. by `Weak`).
&mut (*this.ptr.as_ptr()).data
unsafe { &mut (*this.ptr.as_ptr()).data }
}
/// Determine whether this is the unique reference (including weak refs) to
@ -1551,10 +1565,12 @@ impl<T> Weak<T> {
Self::new()
} else {
// See Arc::from_raw for details
let offset = data_offset(ptr);
let fake_ptr = ptr as *mut ArcInner<T>;
let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
Weak { ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw") }
unsafe {
let offset = data_offset(ptr);
let fake_ptr = ptr as *mut ArcInner<T>;
let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
Weak { ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw") }
}
}
}
}
@ -2260,7 +2276,7 @@ unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> isize {
// Because it is `?Sized`, it will always be the last field in memory.
// Note: This is a detail of the current implementation of the compiler,
// and is not a guaranteed language detail. Do not rely on it outside of std.
data_offset_align(align_of_val(&*ptr))
unsafe { data_offset_align(align_of_val(&*ptr)) }
}
/// Computes the offset of the data field within `ArcInner`.

View File

@ -60,7 +60,7 @@ impl<W: Wake + Send + Sync + 'static> From<Arc<W>> for RawWaker {
fn raw_waker<W: Wake + Send + Sync + 'static>(waker: Arc<W>) -> RawWaker {
// Increment the reference count of the arc to clone it.
unsafe fn clone_waker<W: Wake + Send + Sync + 'static>(waker: *const ()) -> RawWaker {
Arc::incr_strong_count(waker as *const W);
unsafe { Arc::incr_strong_count(waker as *const W) };
RawWaker::new(
waker as *const (),
&RawWakerVTable::new(clone_waker::<W>, wake::<W>, wake_by_ref::<W>, drop_waker::<W>),
@ -69,19 +69,20 @@ fn raw_waker<W: Wake + Send + Sync + 'static>(waker: Arc<W>) -> RawWaker {
// Wake by value, moving the Arc into the Wake::wake function
unsafe fn wake<W: Wake + Send + Sync + 'static>(waker: *const ()) {
let waker: Arc<W> = Arc::from_raw(waker as *const W);
let waker: Arc<W> = unsafe { Arc::from_raw(waker as *const W) };
<W as Wake>::wake(waker);
}
// Wake by reference, wrap the waker in ManuallyDrop to avoid dropping it
unsafe fn wake_by_ref<W: Wake + Send + Sync + 'static>(waker: *const ()) {
let waker: ManuallyDrop<Arc<W>> = ManuallyDrop::new(Arc::from_raw(waker as *const W));
let waker: ManuallyDrop<Arc<W>> =
unsafe { ManuallyDrop::new(Arc::from_raw(waker as *const W)) };
<W as Wake>::wake_by_ref(&waker);
}
// Decrement the reference count of the Arc on drop
unsafe fn drop_waker<W: Wake + Send + Sync + 'static>(waker: *const ()) {
Arc::decr_strong_count(waker as *const W);
unsafe { Arc::decr_strong_count(waker as *const W) };
}
RawWaker::new(

View File

@ -465,7 +465,7 @@ impl<T> Vec<T> {
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn from_raw_parts(ptr: *mut T, length: usize, capacity: usize) -> Vec<T> {
Vec { buf: RawVec::from_raw_parts(ptr, capacity), len: length }
unsafe { Vec { buf: RawVec::from_raw_parts(ptr, capacity), len: length } }
}
/// Returns the number of elements the vector can hold without
@ -1264,10 +1264,10 @@ impl<T> Vec<T> {
/// Appends elements to `Self` from other buffer.
#[inline]
unsafe fn append_elements(&mut self, other: *const [T]) {
let count = (*other).len();
let count = unsafe { (*other).len() };
self.reserve(count);
let len = self.len();
ptr::copy_nonoverlapping(other as *const T, self.as_mut_ptr().add(len), count);
unsafe { ptr::copy_nonoverlapping(other as *const T, self.as_mut_ptr().add(len), count) };
self.len += count;
}
@ -2965,15 +2965,16 @@ impl<T> Drain<'_, T> {
/// Fill that range as much as possible with new elements from the `replace_with` iterator.
/// Returns `true` if we filled the entire range. (`replace_with.next()` didnt return `None`.)
unsafe fn fill<I: Iterator<Item = T>>(&mut self, replace_with: &mut I) -> bool {
let vec = self.vec.as_mut();
let vec = unsafe { self.vec.as_mut() };
let range_start = vec.len;
let range_end = self.tail_start;
let range_slice =
slice::from_raw_parts_mut(vec.as_mut_ptr().add(range_start), range_end - range_start);
let range_slice = unsafe {
slice::from_raw_parts_mut(vec.as_mut_ptr().add(range_start), range_end - range_start)
};
for place in range_slice {
if let Some(new_item) = replace_with.next() {
ptr::write(place, new_item);
unsafe { ptr::write(place, new_item) };
vec.len += 1;
} else {
return false;
@ -2984,14 +2985,16 @@ impl<T> Drain<'_, T> {
/// Makes room for inserting more elements before the tail.
unsafe fn move_tail(&mut self, additional: usize) {
let vec = self.vec.as_mut();
let vec = unsafe { self.vec.as_mut() };
let len = self.tail_start + self.tail_len;
vec.buf.reserve(len, additional);
let new_tail_start = self.tail_start + additional;
let src = vec.as_ptr().add(self.tail_start);
let dst = vec.as_mut_ptr().add(new_tail_start);
ptr::copy(src, dst, self.tail_len);
unsafe {
let src = vec.as_ptr().add(self.tail_start);
let dst = vec.as_mut_ptr().add(new_tail_start);
ptr::copy(src, dst, self.tail_len);
}
self.tail_start = new_tail_start;
}
}