From 39e29ce4d08674734e2f2759607b1486db7d0fde Mon Sep 17 00:00:00 2001 From: LeSeulArtichaut Date: Thu, 28 May 2020 23:27:00 +0200 Subject: [PATCH 1/2] `#[deny(unsafe_op_in_unsafe_fn)]` in liballoc --- src/liballoc/alloc.rs | 51 ++++++----- src/liballoc/boxed.rs | 6 +- src/liballoc/collections/binary_heap.rs | 12 +-- src/liballoc/collections/btree/map.rs | 10 +-- src/liballoc/collections/btree/mod.rs | 4 +- src/liballoc/collections/btree/navigate.rs | 100 ++++++++++++--------- src/liballoc/collections/btree/node.rs | 54 ++++++----- src/liballoc/collections/linked_list.rs | 36 +++++--- src/liballoc/collections/vec_deque.rs | 80 +++++++++++------ src/liballoc/lib.rs | 2 + src/liballoc/raw_vec.rs | 10 ++- src/liballoc/raw_vec/tests.rs | 2 +- src/liballoc/rc.rs | 90 +++++++++++-------- src/liballoc/slice.rs | 47 +++++----- src/liballoc/str.rs | 2 +- src/liballoc/string.rs | 10 ++- src/liballoc/sync.rs | 100 ++++++++++++--------- src/liballoc/task.rs | 9 +- src/liballoc/vec.rs | 25 +++--- 19 files changed, 389 insertions(+), 261 deletions(-) diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs index d31c73cc1bd8d..5bed68151dd9d 100644 --- a/src/liballoc/alloc.rs +++ b/src/liballoc/alloc.rs @@ -77,7 +77,7 @@ pub struct Global; #[stable(feature = "global_alloc", since = "1.28.0")] #[inline] pub unsafe fn alloc(layout: Layout) -> *mut u8 { - __rust_alloc(layout.size(), layout.align()) + unsafe { __rust_alloc(layout.size(), layout.align()) } } /// Deallocate memory with the global allocator. @@ -99,7 +99,7 @@ pub unsafe fn alloc(layout: Layout) -> *mut u8 { #[stable(feature = "global_alloc", since = "1.28.0")] #[inline] pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) { - __rust_dealloc(ptr, layout.size(), layout.align()) + unsafe { __rust_dealloc(ptr, layout.size(), layout.align()) } } /// Reallocate memory with the global allocator. @@ -121,7 +121,7 @@ pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) { #[stable(feature = "global_alloc", since = "1.28.0")] #[inline] pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 { - __rust_realloc(ptr, layout.size(), layout.align(), new_size) + unsafe { __rust_realloc(ptr, layout.size(), layout.align(), new_size) } } /// Allocate zero-initialized memory with the global allocator. @@ -158,7 +158,7 @@ pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 #[stable(feature = "global_alloc", since = "1.28.0")] #[inline] pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 { - __rust_alloc_zeroed(layout.size(), layout.align()) + unsafe { __rust_alloc_zeroed(layout.size(), layout.align()) } } #[unstable(feature = "allocator_api", issue = "32838")] @@ -183,7 +183,7 @@ unsafe impl AllocRef for Global { #[inline] unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { if layout.size() != 0 { - dealloc(ptr.as_ptr(), layout) + unsafe { dealloc(ptr.as_ptr(), layout) } } } @@ -209,17 +209,20 @@ unsafe impl AllocRef for Global { match placement { ReallocPlacement::InPlace => Err(AllocErr), ReallocPlacement::MayMove if layout.size() == 0 => { - let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); + let new_layout = + unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) }; self.alloc(new_layout, init) } ReallocPlacement::MayMove => { // `realloc` probably checks for `new_size > size` or something similar. - intrinsics::assume(new_size > size); - let ptr = realloc(ptr.as_ptr(), layout, new_size); - let memory = - MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size }; - init.init_offset(memory, size); - Ok(memory) + unsafe { + intrinsics::assume(new_size > size); + let ptr = realloc(ptr.as_ptr(), layout, new_size); + let memory = + MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size }; + init.init_offset(memory, size); + Ok(memory) + } } } } @@ -245,14 +248,18 @@ unsafe impl AllocRef for Global { match placement { ReallocPlacement::InPlace => Err(AllocErr), ReallocPlacement::MayMove if new_size == 0 => { - self.dealloc(ptr, layout); + unsafe { + self.dealloc(ptr, layout); + } Ok(MemoryBlock { ptr: layout.dangling(), size: 0 }) } ReallocPlacement::MayMove => { // `realloc` probably checks for `new_size < size` or something similar. - intrinsics::assume(new_size < size); - let ptr = realloc(ptr.as_ptr(), layout, new_size); - Ok(MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size }) + unsafe { + intrinsics::assume(new_size < size); + let ptr = realloc(ptr.as_ptr(), layout, new_size); + Ok(MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size }) + } } } } @@ -264,7 +271,7 @@ unsafe impl AllocRef for Global { #[lang = "exchange_malloc"] #[inline] unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { - let layout = Layout::from_size_align_unchecked(size, align); + let layout = unsafe { Layout::from_size_align_unchecked(size, align) }; match Global.alloc(layout, AllocInit::Uninitialized) { Ok(memory) => memory.ptr.as_ptr(), Err(_) => handle_alloc_error(layout), @@ -279,10 +286,12 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { // For example if `Box` is changed to `struct Box(Unique, A)`, // this function has to be changed to `fn box_free(Unique, A)` as well. pub(crate) unsafe fn box_free(ptr: Unique) { - let size = size_of_val(ptr.as_ref()); - let align = min_align_of_val(ptr.as_ref()); - let layout = Layout::from_size_align_unchecked(size, align); - Global.dealloc(ptr.cast().into(), layout) + unsafe { + let size = size_of_val(ptr.as_ref()); + let align = min_align_of_val(ptr.as_ref()); + let layout = Layout::from_size_align_unchecked(size, align); + Global.dealloc(ptr.cast().into(), layout) + } } /// Abort on memory allocation error or failure. diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs index 22c344323a2ed..cb8f92b3e3e83 100644 --- a/src/liballoc/boxed.rs +++ b/src/liballoc/boxed.rs @@ -311,7 +311,7 @@ impl Box> { #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub unsafe fn assume_init(self) -> Box { - Box::from_raw(Box::into_raw(self) as *mut T) + unsafe { Box::from_raw(Box::into_raw(self) as *mut T) } } } @@ -349,7 +349,7 @@ impl Box<[mem::MaybeUninit]> { #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub unsafe fn assume_init(self) -> Box<[T]> { - Box::from_raw(Box::into_raw(self) as *mut [T]) + unsafe { Box::from_raw(Box::into_raw(self) as *mut [T]) } } } @@ -393,7 +393,7 @@ impl Box { #[stable(feature = "box_raw", since = "1.4.0")] #[inline] pub unsafe fn from_raw(raw: *mut T) -> Self { - Box(Unique::new_unchecked(raw)) + Box(unsafe { Unique::new_unchecked(raw) }) } /// Consumes the `Box`, returning a wrapped raw pointer. diff --git a/src/liballoc/collections/binary_heap.rs b/src/liballoc/collections/binary_heap.rs index c2fe4691b34c0..15313e333ce73 100644 --- a/src/liballoc/collections/binary_heap.rs +++ b/src/liballoc/collections/binary_heap.rs @@ -1003,7 +1003,7 @@ impl<'a, T> Hole<'a, T> { unsafe fn new(data: &'a mut [T], pos: usize) -> Self { debug_assert!(pos < data.len()); // SAFE: pos should be inside the slice - let elt = ptr::read(data.get_unchecked(pos)); + let elt = unsafe { ptr::read(data.get_unchecked(pos)) }; Hole { data, elt: ManuallyDrop::new(elt), pos } } @@ -1025,7 +1025,7 @@ impl<'a, T> Hole<'a, T> { unsafe fn get(&self, index: usize) -> &T { debug_assert!(index != self.pos); debug_assert!(index < self.data.len()); - self.data.get_unchecked(index) + unsafe { self.data.get_unchecked(index) } } /// Move hole to new location @@ -1035,9 +1035,11 @@ impl<'a, T> Hole<'a, T> { unsafe fn move_to(&mut self, index: usize) { debug_assert!(index != self.pos); debug_assert!(index < self.data.len()); - let index_ptr: *const _ = self.data.get_unchecked(index); - let hole_ptr = self.data.get_unchecked_mut(self.pos); - ptr::copy_nonoverlapping(index_ptr, hole_ptr, 1); + unsafe { + let index_ptr: *const _ = self.data.get_unchecked(index); + let hole_ptr = self.data.get_unchecked_mut(self.pos); + ptr::copy_nonoverlapping(index_ptr, hole_ptr, 1); + } self.pos = index; } } diff --git a/src/liballoc/collections/btree/map.rs b/src/liballoc/collections/btree/map.rs index fa1c09d9ece87..2fcc8cc98737d 100644 --- a/src/liballoc/collections/btree/map.rs +++ b/src/liballoc/collections/btree/map.rs @@ -1725,7 +1725,7 @@ impl<'a, K: 'a, V: 'a> DrainFilterInner<'a, K, V> { &mut self, ) -> Option, K, V, marker::LeafOrInternal>, marker::KV>> { let edge = self.cur_leaf_edge.as_ref()?; - ptr::read(edge).next_kv().ok() + unsafe { ptr::read(edge).next_kv().ok() } } /// Implementation of a typical `DrainFilter::next` method, given the predicate. @@ -1808,7 +1808,7 @@ impl<'a, K, V> Range<'a, K, V> { } unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) { - unwrap_unchecked(self.front.as_mut()).next_unchecked() + unsafe { unwrap_unchecked(self.front.as_mut()).next_unchecked() } } } @@ -1821,7 +1821,7 @@ impl<'a, K, V> DoubleEndedIterator for Range<'a, K, V> { impl<'a, K, V> Range<'a, K, V> { unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) { - unwrap_unchecked(self.back.as_mut()).next_back_unchecked() + unsafe { unwrap_unchecked(self.back.as_mut()).next_back_unchecked() } } } @@ -1859,7 +1859,7 @@ impl<'a, K, V> RangeMut<'a, K, V> { } unsafe fn next_unchecked(&mut self) -> (&'a mut K, &'a mut V) { - unwrap_unchecked(self.front.as_mut()).next_unchecked() + unsafe { unwrap_unchecked(self.front.as_mut()).next_unchecked() } } } @@ -1880,7 +1880,7 @@ impl FusedIterator for RangeMut<'_, K, V> {} impl<'a, K, V> RangeMut<'a, K, V> { unsafe fn next_back_unchecked(&mut self) -> (&'a mut K, &'a mut V) { - unwrap_unchecked(self.back.as_mut()).next_back_unchecked() + unsafe { unwrap_unchecked(self.back.as_mut()).next_back_unchecked() } } } diff --git a/src/liballoc/collections/btree/mod.rs b/src/liballoc/collections/btree/mod.rs index fb5825ee21a9e..543ff41a4d48d 100644 --- a/src/liballoc/collections/btree/mod.rs +++ b/src/liballoc/collections/btree/mod.rs @@ -19,7 +19,9 @@ pub unsafe fn unwrap_unchecked(val: Option) -> T { if cfg!(debug_assertions) { panic!("'unchecked' unwrap on None in BTreeMap"); } else { - core::intrinsics::unreachable(); + unsafe { + core::intrinsics::unreachable(); + } } }) } diff --git a/src/liballoc/collections/btree/navigate.rs b/src/liballoc/collections/btree/navigate.rs index 5e8dcf247ae59..5478d822438b1 100644 --- a/src/liballoc/collections/btree/navigate.rs +++ b/src/liballoc/collections/btree/navigate.rs @@ -64,8 +64,10 @@ macro_rules! def_next_kv_uncheched_dealloc { edge = match edge.$adjacent_kv() { Ok(internal_kv) => return internal_kv, Err(last_edge) => { - let parent_edge = last_edge.into_node().deallocate_and_ascend(); - unwrap_unchecked(parent_edge).forget_node_type() + unsafe { + let parent_edge = last_edge.into_node().deallocate_and_ascend(); + unwrap_unchecked(parent_edge).forget_node_type() + } } } } @@ -82,9 +84,11 @@ def_next_kv_uncheched_dealloc! {unsafe fn next_back_kv_unchecked_dealloc: left_k /// Safety: The change closure must not panic. #[inline] unsafe fn replace(v: &mut T, change: impl FnOnce(T) -> (T, R)) -> R { - let value = ptr::read(v); + let value = unsafe { ptr::read(v) }; let (new_value, ret) = change(value); - ptr::write(v, new_value); + unsafe { + ptr::write(v, new_value); + } ret } @@ -93,22 +97,26 @@ impl<'a, K, V> Handle, K, V, marker::Leaf>, marker::Ed /// key and value in between. /// Unsafe because the caller must ensure that the leaf edge is not the last one in the tree. pub unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) { - replace(self, |leaf_edge| { - let kv = leaf_edge.next_kv(); - let kv = unwrap_unchecked(kv.ok()); - (kv.next_leaf_edge(), kv.into_kv()) - }) + unsafe { + replace(self, |leaf_edge| { + let kv = leaf_edge.next_kv(); + let kv = unwrap_unchecked(kv.ok()); + (kv.next_leaf_edge(), kv.into_kv()) + }) + } } /// Moves the leaf edge handle to the previous leaf edge and returns references to the /// key and value in between. /// Unsafe because the caller must ensure that the leaf edge is not the first one in the tree. pub unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) { - replace(self, |leaf_edge| { - let kv = leaf_edge.next_back_kv(); - let kv = unwrap_unchecked(kv.ok()); - (kv.next_back_leaf_edge(), kv.into_kv()) - }) + unsafe { + replace(self, |leaf_edge| { + let kv = leaf_edge.next_back_kv(); + let kv = unwrap_unchecked(kv.ok()); + (kv.next_back_leaf_edge(), kv.into_kv()) + }) + } } } @@ -119,14 +127,16 @@ impl<'a, K, V> Handle, K, V, marker::Leaf>, marker::Edge /// - The caller must ensure that the leaf edge is not the last one in the tree. /// - Using the updated handle may well invalidate the returned references. pub unsafe fn next_unchecked(&mut self) -> (&'a mut K, &'a mut V) { - let kv = replace(self, |leaf_edge| { - let kv = leaf_edge.next_kv(); - let kv = unwrap_unchecked(kv.ok()); - (ptr::read(&kv).next_leaf_edge(), kv) - }); - // Doing the descend (and perhaps another move) invalidates the references - // returned by `into_kv_mut`, so we have to do this last. - kv.into_kv_mut() + unsafe { + let kv = replace(self, |leaf_edge| { + let kv = leaf_edge.next_kv(); + let kv = unwrap_unchecked(kv.ok()); + (ptr::read(&kv).next_leaf_edge(), kv) + }); + // Doing the descend (and perhaps another move) invalidates the references + // returned by `into_kv_mut`, so we have to do this last. + kv.into_kv_mut() + } } /// Moves the leaf edge handle to the previous leaf and returns references to the @@ -135,14 +145,16 @@ impl<'a, K, V> Handle, K, V, marker::Leaf>, marker::Edge /// - The caller must ensure that the leaf edge is not the first one in the tree. /// - Using the updated handle may well invalidate the returned references. pub unsafe fn next_back_unchecked(&mut self) -> (&'a mut K, &'a mut V) { - let kv = replace(self, |leaf_edge| { - let kv = leaf_edge.next_back_kv(); - let kv = unwrap_unchecked(kv.ok()); - (ptr::read(&kv).next_back_leaf_edge(), kv) - }); - // Doing the descend (and perhaps another move) invalidates the references - // returned by `into_kv_mut`, so we have to do this last. - kv.into_kv_mut() + unsafe { + let kv = replace(self, |leaf_edge| { + let kv = leaf_edge.next_back_kv(); + let kv = unwrap_unchecked(kv.ok()); + (ptr::read(&kv).next_back_leaf_edge(), kv) + }); + // Doing the descend (and perhaps another move) invalidates the references + // returned by `into_kv_mut`, so we have to do this last. + kv.into_kv_mut() + } } } @@ -159,12 +171,14 @@ impl Handle, marker::Edge> { /// if the two preconditions above hold. /// - Using the updated handle may well invalidate the returned references. pub unsafe fn next_unchecked(&mut self) -> (K, V) { - replace(self, |leaf_edge| { - let kv = next_kv_unchecked_dealloc(leaf_edge); - let k = ptr::read(kv.reborrow().into_kv().0); - let v = ptr::read(kv.reborrow().into_kv().1); - (kv.next_leaf_edge(), (k, v)) - }) + unsafe { + replace(self, |leaf_edge| { + let kv = next_kv_unchecked_dealloc(leaf_edge); + let k = ptr::read(kv.reborrow().into_kv().0); + let v = ptr::read(kv.reborrow().into_kv().1); + (kv.next_leaf_edge(), (k, v)) + }) + } } /// Moves the leaf edge handle to the previous leaf edge and returns the key @@ -179,12 +193,14 @@ impl Handle, marker::Edge> { /// if the two preconditions above hold. /// - Using the updated handle may well invalidate the returned references. pub unsafe fn next_back_unchecked(&mut self) -> (K, V) { - replace(self, |leaf_edge| { - let kv = next_back_kv_unchecked_dealloc(leaf_edge); - let k = ptr::read(kv.reborrow().into_kv().0); - let v = ptr::read(kv.reborrow().into_kv().1); - (kv.next_back_leaf_edge(), (k, v)) - }) + unsafe { + replace(self, |leaf_edge| { + let kv = next_back_kv_unchecked_dealloc(leaf_edge); + let k = ptr::read(kv.reborrow().into_kv().0); + let v = ptr::read(kv.reborrow().into_kv().1); + (kv.next_back_leaf_edge(), (k, v)) + }) + } } } diff --git a/src/liballoc/collections/btree/node.rs b/src/liballoc/collections/btree/node.rs index 5569c293e2f66..a4b6cf12a23bd 100644 --- a/src/liballoc/collections/btree/node.rs +++ b/src/liballoc/collections/btree/node.rs @@ -107,7 +107,7 @@ impl InternalNode { /// `len` of 0), there must be one initialized and valid edge. This function does not set up /// such an edge. unsafe fn new() -> Self { - InternalNode { data: LeafNode::new(), edges: [MaybeUninit::UNINIT; 2 * B] } + InternalNode { data: unsafe { LeafNode::new() }, edges: [MaybeUninit::UNINIT; 2 * B] } } } @@ -131,7 +131,7 @@ impl BoxedNode { } unsafe fn from_ptr(ptr: NonNull>) -> Self { - BoxedNode { ptr: Unique::new_unchecked(ptr.as_ptr()) } + BoxedNode { ptr: unsafe { Unique::new_unchecked(ptr.as_ptr()) } } } fn as_ptr(&self) -> NonNull> { @@ -392,14 +392,16 @@ impl NodeRef { let height = self.height; let node = self.node; let ret = self.ascend().ok(); - Global.dealloc( - node.cast(), - if height > 0 { - Layout::new::>() - } else { - Layout::new::>() - }, - ); + unsafe { + Global.dealloc( + node.cast(), + if height > 0 { + Layout::new::>() + } else { + Layout::new::>() + }, + ); + } ret } } @@ -565,7 +567,7 @@ impl<'a, K, V> NodeRef, K, V, marker::Internal> { debug_assert!(first <= self.len()); debug_assert!(after_last <= self.len() + 1); for i in first..after_last { - Handle::new_edge(self.reborrow_mut(), i).correct_parent_link(); + unsafe { Handle::new_edge(self.reborrow_mut(), i) }.correct_parent_link(); } } @@ -789,7 +791,7 @@ impl<'a, K, V, NodeType, HandleType> Handle, K, V, NodeT &mut self, ) -> Handle, K, V, NodeType>, HandleType> { // We can't use Handle::new_kv or Handle::new_edge because we don't know our type - Handle { node: self.node.reborrow_mut(), idx: self.idx, _marker: PhantomData } + Handle { node: unsafe { self.node.reborrow_mut() }, idx: self.idx, _marker: PhantomData } } } @@ -885,7 +887,7 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: unsafe fn cast_unchecked( &mut self, ) -> Handle, K, V, NewType>, marker::Edge> { - Handle::new_edge(self.node.cast_unchecked(), self.idx) + unsafe { Handle::new_edge(self.node.cast_unchecked(), self.idx) } } /// Inserts a new key/value pair and an edge that will go to the right of that new pair @@ -1330,8 +1332,10 @@ unsafe fn move_kv( dest_offset: usize, count: usize, ) { - ptr::copy_nonoverlapping(source.0.add(source_offset), dest.0.add(dest_offset), count); - ptr::copy_nonoverlapping(source.1.add(source_offset), dest.1.add(dest_offset), count); + unsafe { + ptr::copy_nonoverlapping(source.0.add(source_offset), dest.0.add(dest_offset), count); + ptr::copy_nonoverlapping(source.1.add(source_offset), dest.1.add(dest_offset), count); + } } // Source and destination must have the same height. @@ -1344,8 +1348,10 @@ unsafe fn move_edges( ) { let source_ptr = source.as_internal_mut().edges.as_mut_ptr(); let dest_ptr = dest.as_internal_mut().edges.as_mut_ptr(); - ptr::copy_nonoverlapping(source_ptr.add(source_offset), dest_ptr.add(dest_offset), count); - dest.correct_childrens_parent_links(dest_offset, dest_offset + count); + unsafe { + ptr::copy_nonoverlapping(source_ptr.add(source_offset), dest_ptr.add(dest_offset), count); + dest.correct_childrens_parent_links(dest_offset, dest_offset + count); + } } impl Handle, marker::Edge> { @@ -1459,12 +1465,16 @@ pub mod marker { } unsafe fn slice_insert(slice: &mut [T], idx: usize, val: T) { - ptr::copy(slice.as_ptr().add(idx), slice.as_mut_ptr().add(idx + 1), slice.len() - idx); - ptr::write(slice.get_unchecked_mut(idx), val); + unsafe { + ptr::copy(slice.as_ptr().add(idx), slice.as_mut_ptr().add(idx + 1), slice.len() - idx); + ptr::write(slice.get_unchecked_mut(idx), val); + } } unsafe fn slice_remove(slice: &mut [T], idx: usize) -> T { - let ret = ptr::read(slice.get_unchecked(idx)); - ptr::copy(slice.as_ptr().add(idx + 1), slice.as_mut_ptr().add(idx), slice.len() - idx - 1); - ret + unsafe { + let ret = ptr::read(slice.get_unchecked(idx)); + ptr::copy(slice.as_ptr().add(idx + 1), slice.as_mut_ptr().add(idx), slice.len() - idx - 1); + ret + } } diff --git a/src/liballoc/collections/linked_list.rs b/src/liballoc/collections/linked_list.rs index 85f2505f756aa..36b5785fdf6c5 100644 --- a/src/liballoc/collections/linked_list.rs +++ b/src/liballoc/collections/linked_list.rs @@ -225,17 +225,17 @@ impl LinkedList { /// maintain validity of aliasing pointers. #[inline] unsafe fn unlink_node(&mut self, mut node: NonNull>) { - let node = node.as_mut(); // this one is ours now, we can create an &mut. + let node = unsafe { node.as_mut() }; // this one is ours now, we can create an &mut. // Not creating new mutable (unique!) references overlapping `element`. match node.prev { - Some(prev) => (*prev.as_ptr()).next = node.next, + Some(prev) => unsafe { (*prev.as_ptr()).next = node.next }, // this node is the head node None => self.head = node.next, }; match node.next { - Some(next) => (*next.as_ptr()).prev = node.prev, + Some(next) => unsafe { (*next.as_ptr()).prev = node.prev }, // this node is the tail node None => self.tail = node.prev, }; @@ -258,17 +258,23 @@ impl LinkedList { // This method takes care not to create multiple mutable references to whole nodes at the same time, // to maintain validity of aliasing pointers into `element`. if let Some(mut existing_prev) = existing_prev { - existing_prev.as_mut().next = Some(splice_start); + unsafe { + existing_prev.as_mut().next = Some(splice_start); + } } else { self.head = Some(splice_start); } if let Some(mut existing_next) = existing_next { - existing_next.as_mut().prev = Some(splice_end); + unsafe { + existing_next.as_mut().prev = Some(splice_end); + } } else { self.tail = Some(splice_end); } - splice_start.as_mut().prev = existing_prev; - splice_end.as_mut().next = existing_next; + unsafe { + splice_start.as_mut().prev = existing_prev; + splice_end.as_mut().next = existing_next; + } self.len += splice_length; } @@ -297,9 +303,13 @@ impl LinkedList { if let Some(mut split_node) = split_node { let first_part_head; let first_part_tail; - first_part_tail = split_node.as_mut().prev.take(); + unsafe { + first_part_tail = split_node.as_mut().prev.take(); + } if let Some(mut tail) = first_part_tail { - tail.as_mut().next = None; + unsafe { + tail.as_mut().next = None; + } first_part_head = self.head; } else { first_part_head = None; @@ -333,9 +343,13 @@ impl LinkedList { if let Some(mut split_node) = split_node { let second_part_head; let second_part_tail; - second_part_head = split_node.as_mut().next.take(); + unsafe { + second_part_head = split_node.as_mut().next.take(); + } if let Some(mut head) = second_part_head { - head.as_mut().prev = None; + unsafe { + head.as_mut().prev = None; + } second_part_tail = self.tail; } else { second_part_tail = None; diff --git a/src/liballoc/collections/vec_deque.rs b/src/liballoc/collections/vec_deque.rs index ae54d3971baac..15f3a94ca2d6a 100644 --- a/src/liballoc/collections/vec_deque.rs +++ b/src/liballoc/collections/vec_deque.rs @@ -7,6 +7,8 @@ #![stable(feature = "rust1", since = "1.0.0")] +// ignore-tidy-filelength + use core::array::LengthAtMost32; use core::cmp::{self, Ordering}; use core::fmt; @@ -201,25 +203,27 @@ impl VecDeque { /// Turn ptr into a slice #[inline] unsafe fn buffer_as_slice(&self) -> &[T] { - slice::from_raw_parts(self.ptr(), self.cap()) + unsafe { slice::from_raw_parts(self.ptr(), self.cap()) } } /// Turn ptr into a mut slice #[inline] unsafe fn buffer_as_mut_slice(&mut self) -> &mut [T] { - slice::from_raw_parts_mut(self.ptr(), self.cap()) + unsafe { slice::from_raw_parts_mut(self.ptr(), self.cap()) } } /// Moves an element out of the buffer #[inline] unsafe fn buffer_read(&mut self, off: usize) -> T { - ptr::read(self.ptr().add(off)) + unsafe { ptr::read(self.ptr().add(off)) } } /// Writes an element into the buffer, moving it. #[inline] unsafe fn buffer_write(&mut self, off: usize, value: T) { - ptr::write(self.ptr().add(off), value); + unsafe { + ptr::write(self.ptr().add(off), value); + } } /// Returns `true` if the buffer is at full capacity. @@ -268,7 +272,9 @@ impl VecDeque { len, self.cap() ); - ptr::copy(self.ptr().add(src), self.ptr().add(dst), len); + unsafe { + ptr::copy(self.ptr().add(src), self.ptr().add(dst), len); + } } /// Copies a contiguous block of memory len long from src to dst @@ -290,7 +296,9 @@ impl VecDeque { len, self.cap() ); - ptr::copy_nonoverlapping(self.ptr().add(src), self.ptr().add(dst), len); + unsafe { + ptr::copy_nonoverlapping(self.ptr().add(src), self.ptr().add(dst), len); + } } /// Copies a potentially wrapping block of memory len long from src to dest. @@ -330,7 +338,9 @@ impl VecDeque { // 2 [_ _ A A A A B B _] // D . . . // - self.copy(dst, src, len); + unsafe { + self.copy(dst, src, len); + } } (false, false, true) => { // dst before src, src doesn't wrap, dst wraps @@ -341,8 +351,10 @@ impl VecDeque { // 3 [B B B B _ _ _ A A] // . . D . // - self.copy(dst, src, dst_pre_wrap_len); - self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len); + unsafe { + self.copy(dst, src, dst_pre_wrap_len); + self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len); + } } (true, false, true) => { // src before dst, src doesn't wrap, dst wraps @@ -353,8 +365,10 @@ impl VecDeque { // 3 [B B _ _ _ A A A A] // . . D . // - self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len); - self.copy(dst, src, dst_pre_wrap_len); + unsafe { + self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len); + self.copy(dst, src, dst_pre_wrap_len); + } } (false, true, false) => { // dst before src, src wraps, dst doesn't wrap @@ -365,8 +379,10 @@ impl VecDeque { // 3 [C C _ _ _ B B C C] // D . . . // - self.copy(dst, src, src_pre_wrap_len); - self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len); + unsafe { + self.copy(dst, src, src_pre_wrap_len); + self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len); + } } (true, true, false) => { // src before dst, src wraps, dst doesn't wrap @@ -377,8 +393,10 @@ impl VecDeque { // 3 [C C A A _ _ _ C C] // D . . . // - self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len); - self.copy(dst, src, src_pre_wrap_len); + unsafe { + self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len); + self.copy(dst, src, src_pre_wrap_len); + } } (false, true, true) => { // dst before src, src wraps, dst wraps @@ -392,9 +410,11 @@ impl VecDeque { // debug_assert!(dst_pre_wrap_len > src_pre_wrap_len); let delta = dst_pre_wrap_len - src_pre_wrap_len; - self.copy(dst, src, src_pre_wrap_len); - self.copy(dst + src_pre_wrap_len, 0, delta); - self.copy(0, delta, len - dst_pre_wrap_len); + unsafe { + self.copy(dst, src, src_pre_wrap_len); + self.copy(dst + src_pre_wrap_len, 0, delta); + self.copy(0, delta, len - dst_pre_wrap_len); + } } (true, true, true) => { // src before dst, src wraps, dst wraps @@ -408,9 +428,11 @@ impl VecDeque { // debug_assert!(src_pre_wrap_len > dst_pre_wrap_len); let delta = src_pre_wrap_len - dst_pre_wrap_len; - self.copy(delta, 0, len - src_pre_wrap_len); - self.copy(0, self.cap() - delta, delta); - self.copy(dst, src, dst_pre_wrap_len); + unsafe { + self.copy(delta, 0, len - src_pre_wrap_len); + self.copy(0, self.cap() - delta, delta); + self.copy(dst, src, dst_pre_wrap_len); + } } } } @@ -440,13 +462,17 @@ impl VecDeque { // Nop } else if self.head < old_capacity - self.tail { // B - self.copy_nonoverlapping(old_capacity, 0, self.head); + unsafe { + self.copy_nonoverlapping(old_capacity, 0, self.head); + } self.head += old_capacity; debug_assert!(self.head > self.tail); } else { // C let new_tail = new_capacity - (old_capacity - self.tail); - self.copy_nonoverlapping(new_tail, self.tail, old_capacity - self.tail); + unsafe { + self.copy_nonoverlapping(new_tail, self.tail, old_capacity - self.tail); + } self.tail = new_tail; debug_assert!(self.head < self.tail); } @@ -2297,7 +2323,9 @@ impl VecDeque { unsafe fn rotate_left_inner(&mut self, mid: usize) { debug_assert!(mid * 2 <= self.len()); - self.wrap_copy(self.head, self.tail, mid); + unsafe { + self.wrap_copy(self.head, self.tail, mid); + } self.head = self.wrap_add(self.head, mid); self.tail = self.wrap_add(self.tail, mid); } @@ -2306,7 +2334,9 @@ impl VecDeque { debug_assert!(k * 2 <= self.len()); self.head = self.wrap_sub(self.head, k); self.tail = self.wrap_sub(self.tail, k); - self.wrap_copy(self.tail, self.head, k); + unsafe { + self.wrap_copy(self.tail, self.head, k); + } } } diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs index 9bcfc9457f50e..41c2b221704e6 100644 --- a/src/liballoc/lib.rs +++ b/src/liballoc/lib.rs @@ -72,6 +72,7 @@ #![deny(intra_doc_link_resolution_failure)] // rustdoc is run without -D warnings #![allow(explicit_outlives_requirements)] #![allow(incomplete_features)] +#![deny(unsafe_op_in_unsafe_fn)] #![cfg_attr(not(test), feature(generator_trait))] #![cfg_attr(test, feature(test))] #![feature(allocator_api)] @@ -118,6 +119,7 @@ #![feature(try_reserve)] #![feature(unboxed_closures)] #![feature(unicode_internals)] +#![feature(unsafe_block_in_unsafe_fn)] #![feature(unsize)] #![feature(unsized_locals)] #![feature(allocator_internals)] diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index 805dbfe277584..15e81f9288722 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -108,7 +108,7 @@ impl RawVec { /// If the `ptr` and `capacity` come from a `RawVec`, then this is guaranteed. #[inline] pub unsafe fn from_raw_parts(ptr: *mut T, capacity: usize) -> Self { - Self::from_raw_parts_in(ptr, capacity, Global) + unsafe { Self::from_raw_parts_in(ptr, capacity, Global) } } /// Converts a `Box<[T]>` into a `RawVec`. @@ -139,8 +139,10 @@ impl RawVec { ); let me = ManuallyDrop::new(self); - let slice = slice::from_raw_parts_mut(me.ptr() as *mut MaybeUninit, len); - Box::from_raw(slice) + unsafe { + let slice = slice::from_raw_parts_mut(me.ptr() as *mut MaybeUninit, len); + Box::from_raw(slice) + } } } @@ -192,7 +194,7 @@ impl RawVec { /// If the `ptr` and `capacity` come from a `RawVec` created via `a`, then this is guaranteed. #[inline] pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, a: A) -> Self { - Self { ptr: Unique::new_unchecked(ptr), cap: capacity, alloc: a } + Self { ptr: unsafe { Unique::new_unchecked(ptr) }, cap: capacity, alloc: a } } /// Gets a raw pointer to the start of the allocation. Note that this is diff --git a/src/liballoc/raw_vec/tests.rs b/src/liballoc/raw_vec/tests.rs index 17622d72a059c..6418c4a9823f2 100644 --- a/src/liballoc/raw_vec/tests.rs +++ b/src/liballoc/raw_vec/tests.rs @@ -35,7 +35,7 @@ fn allocator_param() { } } unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { - Global.dealloc(ptr, layout) + unsafe { Global.dealloc(ptr, layout) } } } diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index 0327a9f9a96e5..4d50ae9efca95 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -304,7 +304,7 @@ impl Rc { } unsafe fn from_ptr(ptr: *mut RcBox) -> Self { - Self::from_inner(NonNull::new_unchecked(ptr)) + Self::from_inner(unsafe { NonNull::new_unchecked(ptr) }) } } @@ -544,7 +544,7 @@ impl Rc<[mem::MaybeUninit]> { #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub unsafe fn assume_init(self) -> Rc<[T]> { - Rc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) + unsafe { Rc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) } } } @@ -643,13 +643,13 @@ impl Rc { /// ``` #[stable(feature = "rc_raw", since = "1.17.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { - let offset = data_offset(ptr); + let offset = unsafe { data_offset(ptr) }; // Reverse the offset to find the original RcBox. let fake_ptr = ptr as *mut RcBox; - let rc_ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset)); + let rc_ptr = unsafe { set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset)) }; - Self::from_ptr(rc_ptr) + unsafe { Self::from_ptr(rc_ptr) } } /// Consumes the `Rc`, returning the wrapped pointer as `NonNull`. @@ -805,7 +805,7 @@ impl Rc { #[inline] #[unstable(feature = "get_mut_unchecked", issue = "63292")] pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T { - &mut this.ptr.as_mut().value + unsafe { &mut this.ptr.as_mut().value } } #[inline] @@ -964,10 +964,12 @@ impl Rc { // Initialize the RcBox let inner = mem_to_rcbox(mem.ptr.as_ptr()); - debug_assert_eq!(Layout::for_value(&*inner), layout); + unsafe { + debug_assert_eq!(Layout::for_value(&*inner), layout); - ptr::write(&mut (*inner).strong, Cell::new(1)); - ptr::write(&mut (*inner).weak, Cell::new(1)); + ptr::write(&mut (*inner).strong, Cell::new(1)); + ptr::write(&mut (*inner).weak, Cell::new(1)); + } inner } @@ -975,9 +977,11 @@ impl Rc { /// Allocates an `RcBox` with sufficient space for an unsized inner value unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox { // Allocate for the `RcBox` using the given value. - Self::allocate_for_layout(Layout::for_value(&*ptr), |mem| { - set_data_ptr(ptr as *mut T, mem) as *mut RcBox - }) + unsafe { + Self::allocate_for_layout(Layout::for_value(&*ptr), |mem| { + set_data_ptr(ptr as *mut T, mem) as *mut RcBox + }) + } } fn from_box(v: Box) -> Rc { @@ -1006,9 +1010,11 @@ impl Rc { impl Rc<[T]> { /// Allocates an `RcBox<[T]>` with the given length. unsafe fn allocate_for_slice(len: usize) -> *mut RcBox<[T]> { - Self::allocate_for_layout(Layout::array::(len).unwrap(), |mem| { - ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut RcBox<[T]> - }) + unsafe { + Self::allocate_for_layout(Layout::array::(len).unwrap(), |mem| { + ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut RcBox<[T]> + }) + } } } @@ -1017,7 +1023,9 @@ impl Rc<[T]> { /// For a slice/trait object, this sets the `data` field and leaves the rest /// unchanged. For a sized raw pointer, this simply sets the pointer. unsafe fn set_data_ptr(mut ptr: *mut T, data: *mut U) -> *mut T { - ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8); + unsafe { + ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8); + } ptr } @@ -1026,11 +1034,11 @@ impl Rc<[T]> { /// /// Unsafe because the caller must either take ownership or bind `T: Copy` unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> { - let ptr = Self::allocate_for_slice(v.len()); - - ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).value as *mut [T] as *mut T, v.len()); - - Self::from_ptr(ptr) + unsafe { + let ptr = Self::allocate_for_slice(v.len()); + ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).value as *mut [T] as *mut T, v.len()); + Self::from_ptr(ptr) + } } /// Constructs an `Rc<[T]>` from an iterator known to be of a certain size. @@ -1058,25 +1066,27 @@ impl Rc<[T]> { } } - let ptr = Self::allocate_for_slice(len); + unsafe { + let ptr = Self::allocate_for_slice(len); - let mem = ptr as *mut _ as *mut u8; - let layout = Layout::for_value(&*ptr); + let mem = ptr as *mut _ as *mut u8; + let layout = Layout::for_value(&*ptr); - // Pointer to first element - let elems = &mut (*ptr).value as *mut [T] as *mut T; + // Pointer to first element + let elems = &mut (*ptr).value as *mut [T] as *mut T; - let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 }; + let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 }; - for (i, item) in iter.enumerate() { - ptr::write(elems.add(i), item); - guard.n_elems += 1; - } + for (i, item) in iter.enumerate() { + ptr::write(elems.add(i), item); + guard.n_elems += 1; + } - // All clear. Forget the guard so it doesn't free the new RcBox. - forget(guard); + // All clear. Forget the guard so it doesn't free the new RcBox. + forget(guard); - Self::from_ptr(ptr) + Self::from_ptr(ptr) + } } } @@ -1786,10 +1796,12 @@ impl Weak { Self::new() } else { // See Rc::from_raw for details - let offset = data_offset(ptr); - let fake_ptr = ptr as *mut RcBox; - let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset)); - Weak { ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw") } + unsafe { + let offset = data_offset(ptr); + let fake_ptr = ptr as *mut RcBox; + let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset)); + Weak { ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw") } + } } } } @@ -2106,7 +2118,7 @@ unsafe fn data_offset(ptr: *const T) -> isize { // Because it is ?Sized, it will always be the last field in memory. // Note: This is a detail of the current implementation of the compiler, // and is not a guaranteed language detail. Do not rely on it outside of std. - data_offset_align(align_of_val(&*ptr)) + unsafe { data_offset_align(align_of_val(&*ptr)) } } /// Computes the offset of the data field within `RcBox`. diff --git a/src/liballoc/slice.rs b/src/liballoc/slice.rs index 53477288b59ee..d7dc2174d665f 100644 --- a/src/liballoc/slice.rs +++ b/src/liballoc/slice.rs @@ -831,8 +831,7 @@ where { let len = v.len(); let v = v.as_mut_ptr(); - let v_mid = v.add(mid); - let v_end = v.add(len); + let (v_mid, v_end) = unsafe { (v.add(mid), v.add(len)) }; // The merge process first copies the shorter run into `buf`. Then it traces the newly copied // run and the longer run forwards (or backwards), comparing their next unconsumed elements and @@ -855,8 +854,10 @@ where if mid <= len - mid { // The left run is shorter. - ptr::copy_nonoverlapping(v, buf, mid); - hole = MergeHole { start: buf, end: buf.add(mid), dest: v }; + unsafe { + ptr::copy_nonoverlapping(v, buf, mid); + hole = MergeHole { start: buf, end: buf.add(mid), dest: v }; + } // Initially, these pointers point to the beginnings of their arrays. let left = &mut hole.start; @@ -866,17 +867,21 @@ where while *left < hole.end && right < v_end { // Consume the lesser side. // If equal, prefer the left run to maintain stability. - let to_copy = if is_less(&*right, &**left) { - get_and_increment(&mut right) - } else { - get_and_increment(left) - }; - ptr::copy_nonoverlapping(to_copy, get_and_increment(out), 1); + unsafe { + let to_copy = if is_less(&*right, &**left) { + get_and_increment(&mut right) + } else { + get_and_increment(left) + }; + ptr::copy_nonoverlapping(to_copy, get_and_increment(out), 1); + } } } else { // The right run is shorter. - ptr::copy_nonoverlapping(v_mid, buf, len - mid); - hole = MergeHole { start: buf, end: buf.add(len - mid), dest: v_mid }; + unsafe { + ptr::copy_nonoverlapping(v_mid, buf, len - mid); + hole = MergeHole { start: buf, end: buf.add(len - mid), dest: v_mid }; + } // Initially, these pointers point past the ends of their arrays. let left = &mut hole.dest; @@ -886,12 +891,14 @@ where while v < *left && buf < *right { // Consume the greater side. // If equal, prefer the right run to maintain stability. - let to_copy = if is_less(&*right.offset(-1), &*left.offset(-1)) { - decrement_and_get(left) - } else { - decrement_and_get(right) - }; - ptr::copy_nonoverlapping(to_copy, decrement_and_get(&mut out), 1); + unsafe { + let to_copy = if is_less(&*right.offset(-1), &*left.offset(-1)) { + decrement_and_get(left) + } else { + decrement_and_get(right) + }; + ptr::copy_nonoverlapping(to_copy, decrement_and_get(&mut out), 1); + } } } // Finally, `hole` gets dropped. If the shorter run was not fully consumed, whatever remains of @@ -899,12 +906,12 @@ where unsafe fn get_and_increment(ptr: &mut *mut T) -> *mut T { let old = *ptr; - *ptr = ptr.offset(1); + *ptr = unsafe { ptr.offset(1) }; old } unsafe fn decrement_and_get(ptr: &mut *mut T) -> *mut T { - *ptr = ptr.offset(-1); + *ptr = unsafe { ptr.offset(-1) }; *ptr } diff --git a/src/liballoc/str.rs b/src/liballoc/str.rs index 70860c09a2c31..57927c688479b 100644 --- a/src/liballoc/str.rs +++ b/src/liballoc/str.rs @@ -583,5 +583,5 @@ impl str { #[stable(feature = "str_box_extras", since = "1.20.0")] #[inline] pub unsafe fn from_boxed_utf8_unchecked(v: Box<[u8]>) -> Box { - Box::from_raw(Box::into_raw(v) as *mut str) + unsafe { Box::from_raw(Box::into_raw(v) as *mut str) } } diff --git a/src/liballoc/string.rs b/src/liballoc/string.rs index 0378ff5362a8b..91b6d52679600 100644 --- a/src/liballoc/string.rs +++ b/src/liballoc/string.rs @@ -724,7 +724,7 @@ impl String { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub unsafe fn from_raw_parts(buf: *mut u8, length: usize, capacity: usize) -> String { - String { vec: Vec::from_raw_parts(buf, length, capacity) } + unsafe { String { vec: Vec::from_raw_parts(buf, length, capacity) } } } /// Converts a vector of bytes to a `String` without checking that the @@ -1329,9 +1329,11 @@ impl String { let amt = bytes.len(); self.vec.reserve(amt); - ptr::copy(self.vec.as_ptr().add(idx), self.vec.as_mut_ptr().add(idx + amt), len - idx); - ptr::copy(bytes.as_ptr(), self.vec.as_mut_ptr().add(idx), amt); - self.vec.set_len(len + amt); + unsafe { + ptr::copy(self.vec.as_ptr().add(idx), self.vec.as_mut_ptr().add(idx + amt), len - idx); + ptr::copy(bytes.as_ptr(), self.vec.as_mut_ptr().add(idx), amt); + self.vec.set_len(len + amt); + } } /// Inserts a string slice into this `String` at a byte position. diff --git a/src/liballoc/sync.rs b/src/liballoc/sync.rs index cd4172d6a2d24..826f0c8fa833f 100644 --- a/src/liballoc/sync.rs +++ b/src/liballoc/sync.rs @@ -232,7 +232,7 @@ impl Arc { } unsafe fn from_ptr(ptr: *mut ArcInner) -> Self { - Self::from_inner(NonNull::new_unchecked(ptr)) + unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) } } } @@ -543,7 +543,7 @@ impl Arc<[mem::MaybeUninit]> { #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub unsafe fn assume_init(self) -> Arc<[T]> { - Arc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) + unsafe { Arc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) } } } @@ -642,13 +642,15 @@ impl Arc { /// ``` #[stable(feature = "rc_raw", since = "1.17.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { - let offset = data_offset(ptr); + unsafe { + let offset = data_offset(ptr); - // Reverse the offset to find the original ArcInner. - let fake_ptr = ptr as *mut ArcInner; - let arc_ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset)); + // Reverse the offset to find the original ArcInner. + let fake_ptr = ptr as *mut ArcInner; + let arc_ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset)); - Self::from_ptr(arc_ptr) + Self::from_ptr(arc_ptr) + } } /// Consumes the `Arc`, returning the wrapped pointer as `NonNull`. @@ -807,7 +809,7 @@ impl Arc { #[unstable(feature = "arc_mutate_strong_count", issue = "71983")] pub unsafe fn incr_strong_count(ptr: *const T) { // Retain Arc, but don't touch refcount by wrapping in ManuallyDrop - let arc = mem::ManuallyDrop::new(Arc::::from_raw(ptr)); + let arc = unsafe { mem::ManuallyDrop::new(Arc::::from_raw(ptr)) }; // Now increase refcount, but don't drop new refcount either let _arc_clone: mem::ManuallyDrop<_> = arc.clone(); } @@ -847,7 +849,7 @@ impl Arc { #[inline] #[unstable(feature = "arc_mutate_strong_count", issue = "71983")] pub unsafe fn decr_strong_count(ptr: *const T) { - mem::drop(Arc::from_raw(ptr)); + unsafe { mem::drop(Arc::from_raw(ptr)) }; } #[inline] @@ -865,7 +867,7 @@ impl Arc { unsafe fn drop_slow(&mut self) { // Destroy the data at this time, even though we may not free the box // allocation itself (there may still be weak pointers lying around). - ptr::drop_in_place(Self::get_mut_unchecked(self)); + unsafe { ptr::drop_in_place(Self::get_mut_unchecked(self)) }; // Drop the weak ref collectively held by all strong references drop(Weak { ptr: self.ptr }); @@ -917,10 +919,12 @@ impl Arc { // Initialize the ArcInner let inner = mem_to_arcinner(mem.ptr.as_ptr()); - debug_assert_eq!(Layout::for_value(&*inner), layout); + debug_assert_eq!(unsafe { Layout::for_value(&*inner) }, layout); - ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1)); - ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1)); + unsafe { + ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1)); + ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1)); + } inner } @@ -928,9 +932,11 @@ impl Arc { /// Allocates an `ArcInner` with sufficient space for an unsized inner value. unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner { // Allocate for the `ArcInner` using the given value. - Self::allocate_for_layout(Layout::for_value(&*ptr), |mem| { - set_data_ptr(ptr as *mut T, mem) as *mut ArcInner - }) + unsafe { + Self::allocate_for_layout(Layout::for_value(&*ptr), |mem| { + set_data_ptr(ptr as *mut T, mem) as *mut ArcInner + }) + } } fn from_box(v: Box) -> Arc { @@ -959,9 +965,11 @@ impl Arc { impl Arc<[T]> { /// Allocates an `ArcInner<[T]>` with the given length. unsafe fn allocate_for_slice(len: usize) -> *mut ArcInner<[T]> { - Self::allocate_for_layout(Layout::array::(len).unwrap(), |mem| { - ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut ArcInner<[T]> - }) + unsafe { + Self::allocate_for_layout(Layout::array::(len).unwrap(), |mem| { + ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut ArcInner<[T]> + }) + } } } @@ -970,7 +978,9 @@ impl Arc<[T]> { /// For a slice/trait object, this sets the `data` field and leaves the rest /// unchanged. For a sized raw pointer, this simply sets the pointer. unsafe fn set_data_ptr(mut ptr: *mut T, data: *mut U) -> *mut T { - ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8); + unsafe { + ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8); + } ptr } @@ -979,11 +989,13 @@ impl Arc<[T]> { /// /// Unsafe because the caller must either take ownership or bind `T: Copy`. unsafe fn copy_from_slice(v: &[T]) -> Arc<[T]> { - let ptr = Self::allocate_for_slice(v.len()); + unsafe { + let ptr = Self::allocate_for_slice(v.len()); - ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).data as *mut [T] as *mut T, v.len()); + ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).data as *mut [T] as *mut T, v.len()); - Self::from_ptr(ptr) + Self::from_ptr(ptr) + } } /// Constructs an `Arc<[T]>` from an iterator known to be of a certain size. @@ -1011,25 +1023,27 @@ impl Arc<[T]> { } } - let ptr = Self::allocate_for_slice(len); + unsafe { + let ptr = Self::allocate_for_slice(len); - let mem = ptr as *mut _ as *mut u8; - let layout = Layout::for_value(&*ptr); + let mem = ptr as *mut _ as *mut u8; + let layout = Layout::for_value(&*ptr); - // Pointer to first element - let elems = &mut (*ptr).data as *mut [T] as *mut T; + // Pointer to first element + let elems = &mut (*ptr).data as *mut [T] as *mut T; - let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 }; + let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 }; - for (i, item) in iter.enumerate() { - ptr::write(elems.add(i), item); - guard.n_elems += 1; - } + for (i, item) in iter.enumerate() { + ptr::write(elems.add(i), item); + guard.n_elems += 1; + } - // All clear. Forget the guard so it doesn't free the new ArcInner. - mem::forget(guard); + // All clear. Forget the guard so it doesn't free the new ArcInner. + mem::forget(guard); - Self::from_ptr(ptr) + Self::from_ptr(ptr) + } } } @@ -1274,7 +1288,7 @@ impl Arc { pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T { // We are careful to *not* create a reference covering the "count" fields, as // this would alias with concurrent access to the reference counts (e.g. by `Weak`). - &mut (*this.ptr.as_ptr()).data + unsafe { &mut (*this.ptr.as_ptr()).data } } /// Determine whether this is the unique reference (including weak refs) to @@ -1551,10 +1565,12 @@ impl Weak { Self::new() } else { // See Arc::from_raw for details - let offset = data_offset(ptr); - let fake_ptr = ptr as *mut ArcInner; - let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset)); - Weak { ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw") } + unsafe { + let offset = data_offset(ptr); + let fake_ptr = ptr as *mut ArcInner; + let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset)); + Weak { ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw") } + } } } } @@ -2260,7 +2276,7 @@ unsafe fn data_offset(ptr: *const T) -> isize { // Because it is `?Sized`, it will always be the last field in memory. // Note: This is a detail of the current implementation of the compiler, // and is not a guaranteed language detail. Do not rely on it outside of std. - data_offset_align(align_of_val(&*ptr)) + unsafe { data_offset_align(align_of_val(&*ptr)) } } /// Computes the offset of the data field within `ArcInner`. diff --git a/src/liballoc/task.rs b/src/liballoc/task.rs index 745444a152e1b..0d1cc99df47c5 100644 --- a/src/liballoc/task.rs +++ b/src/liballoc/task.rs @@ -60,7 +60,7 @@ impl From> for RawWaker { fn raw_waker(waker: Arc) -> RawWaker { // Increment the reference count of the arc to clone it. unsafe fn clone_waker(waker: *const ()) -> RawWaker { - Arc::incr_strong_count(waker as *const W); + unsafe { Arc::incr_strong_count(waker as *const W) }; RawWaker::new( waker as *const (), &RawWakerVTable::new(clone_waker::, wake::, wake_by_ref::, drop_waker::), @@ -69,19 +69,20 @@ fn raw_waker(waker: Arc) -> RawWaker { // Wake by value, moving the Arc into the Wake::wake function unsafe fn wake(waker: *const ()) { - let waker: Arc = Arc::from_raw(waker as *const W); + let waker: Arc = unsafe { Arc::from_raw(waker as *const W) }; ::wake(waker); } // Wake by reference, wrap the waker in ManuallyDrop to avoid dropping it unsafe fn wake_by_ref(waker: *const ()) { - let waker: ManuallyDrop> = ManuallyDrop::new(Arc::from_raw(waker as *const W)); + let waker: ManuallyDrop> = + unsafe { ManuallyDrop::new(Arc::from_raw(waker as *const W)) }; ::wake_by_ref(&waker); } // Decrement the reference count of the Arc on drop unsafe fn drop_waker(waker: *const ()) { - Arc::decr_strong_count(waker as *const W); + unsafe { Arc::decr_strong_count(waker as *const W) }; } RawWaker::new( diff --git a/src/liballoc/vec.rs b/src/liballoc/vec.rs index 06462fd96d9a9..94a19c0db374d 100644 --- a/src/liballoc/vec.rs +++ b/src/liballoc/vec.rs @@ -465,7 +465,7 @@ impl Vec { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub unsafe fn from_raw_parts(ptr: *mut T, length: usize, capacity: usize) -> Vec { - Vec { buf: RawVec::from_raw_parts(ptr, capacity), len: length } + unsafe { Vec { buf: RawVec::from_raw_parts(ptr, capacity), len: length } } } /// Returns the number of elements the vector can hold without @@ -1264,10 +1264,10 @@ impl Vec { /// Appends elements to `Self` from other buffer. #[inline] unsafe fn append_elements(&mut self, other: *const [T]) { - let count = (*other).len(); + let count = unsafe { (*other).len() }; self.reserve(count); let len = self.len(); - ptr::copy_nonoverlapping(other as *const T, self.as_mut_ptr().add(len), count); + unsafe { ptr::copy_nonoverlapping(other as *const T, self.as_mut_ptr().add(len), count) }; self.len += count; } @@ -2965,15 +2965,16 @@ impl Drain<'_, T> { /// Fill that range as much as possible with new elements from the `replace_with` iterator. /// Returns `true` if we filled the entire range. (`replace_with.next()` didn’t return `None`.) unsafe fn fill>(&mut self, replace_with: &mut I) -> bool { - let vec = self.vec.as_mut(); + let vec = unsafe { self.vec.as_mut() }; let range_start = vec.len; let range_end = self.tail_start; - let range_slice = - slice::from_raw_parts_mut(vec.as_mut_ptr().add(range_start), range_end - range_start); + let range_slice = unsafe { + slice::from_raw_parts_mut(vec.as_mut_ptr().add(range_start), range_end - range_start) + }; for place in range_slice { if let Some(new_item) = replace_with.next() { - ptr::write(place, new_item); + unsafe { ptr::write(place, new_item) }; vec.len += 1; } else { return false; @@ -2984,14 +2985,16 @@ impl Drain<'_, T> { /// Makes room for inserting more elements before the tail. unsafe fn move_tail(&mut self, additional: usize) { - let vec = self.vec.as_mut(); + let vec = unsafe { self.vec.as_mut() }; let len = self.tail_start + self.tail_len; vec.buf.reserve(len, additional); let new_tail_start = self.tail_start + additional; - let src = vec.as_ptr().add(self.tail_start); - let dst = vec.as_mut_ptr().add(new_tail_start); - ptr::copy(src, dst, self.tail_len); + unsafe { + let src = vec.as_ptr().add(self.tail_start); + let dst = vec.as_mut_ptr().add(new_tail_start); + ptr::copy(src, dst, self.tail_len); + } self.tail_start = new_tail_start; } } From 7b6398657c2335c053d7733f5bb752e8d2b5d261 Mon Sep 17 00:00:00 2001 From: LeSeulArtichaut Date: Sun, 31 May 2020 15:06:51 +0200 Subject: [PATCH 2/2] Apply suggestions from code review Co-authored-by: nikomatsakis --- src/liballoc/alloc.rs | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs index 5bed68151dd9d..98c7ac3f2ef17 100644 --- a/src/liballoc/alloc.rs +++ b/src/liballoc/alloc.rs @@ -215,14 +215,16 @@ unsafe impl AllocRef for Global { } ReallocPlacement::MayMove => { // `realloc` probably checks for `new_size > size` or something similar. - unsafe { + let ptr = unsafe { intrinsics::assume(new_size > size); - let ptr = realloc(ptr.as_ptr(), layout, new_size); - let memory = - MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size }; + realloc(ptr.as_ptr(), layout, new_size) + }; + let memory = + MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size }; + unsafe { init.init_offset(memory, size); - Ok(memory) } + Ok(memory) } } } @@ -255,11 +257,11 @@ unsafe impl AllocRef for Global { } ReallocPlacement::MayMove => { // `realloc` probably checks for `new_size < size` or something similar. - unsafe { + let ptr = unsafe { intrinsics::assume(new_size < size); - let ptr = realloc(ptr.as_ptr(), layout, new_size); - Ok(MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size }) - } + realloc(ptr.as_ptr(), layout, new_size) + }; + Ok(MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size }) } } }