Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update NonZero and NonNull to not field-project (per MCP#807) #133651

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 28 additions & 6 deletions library/core/src/num/nonzero.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@ pub unsafe trait ZeroablePrimitive: Sized + Copy + private::Sealed {
macro_rules! impl_zeroable_primitive {
($($NonZeroInner:ident ( $primitive:ty )),+ $(,)?) => {
mod private {
use super::*;

#[unstable(
feature = "nonzero_internals",
reason = "implementation detail which may disappear or be replaced at any time",
Expand All @@ -45,7 +47,11 @@ macro_rules! impl_zeroable_primitive {
pub trait Sealed {}

$(
#[derive(Debug, Clone, Copy, PartialEq)]
// This inner type is never shown directly, so intentionally does not have Debug
#[expect(missing_debug_implementations)]
// Since this struct is non-generic and derives Copy,
// the derived Clone is `*self` and thus doesn't field-project.
#[derive(Clone, Copy)]
oli-obk marked this conversation as resolved.
Show resolved Hide resolved
#[repr(transparent)]
#[rustc_layout_scalar_valid_range_start(1)]
#[rustc_nonnull_optimization_guaranteed]
Expand All @@ -55,6 +61,16 @@ macro_rules! impl_zeroable_primitive {
issue = "none"
)]
pub struct $NonZeroInner($primitive);

// This is required to allow matching a constant. We don't get it from a derive
// because the derived `PartialEq` would do a field projection, which is banned
// by <https://github.com/rust-lang/compiler-team/issues/807>.
#[unstable(
feature = "nonzero_internals",
reason = "implementation detail which may disappear or be replaced at any time",
issue = "none"
)]
impl StructuralPartialEq for $NonZeroInner {}
)+
}

Expand Down Expand Up @@ -172,7 +188,7 @@ where
{
#[inline]
fn clone(&self) -> Self {
Self(self.0)
*self
}
}

Expand Down Expand Up @@ -440,15 +456,21 @@ where
#[rustc_const_stable(feature = "const_nonzero_get", since = "1.34.0")]
#[inline]
pub const fn get(self) -> T {
// FIXME: This can be changed to simply `self.0` once LLVM supports `!range` metadata
// for function arguments: https://github.com/llvm/llvm-project/issues/76628
//
// Rustc can set range metadata only if it loads `self` from
// memory somewhere. If the value of `self` was from by-value argument
// of some not-inlined function, LLVM don't have range metadata
// to understand that the value cannot be zero.
//
// For now, using the transmute `assume`s the range at runtime.
// Using the transmute `assume`s the range at runtime.
//
// Even once LLVM supports `!range` metadata for function arguments
// (see <https://github.com/llvm/llvm-project/issues/76628>), this can't
// be `.0` because MCP#807 bans field-projecting into `scalar_valid_range`
// types, and it arguably wouldn't want to be anyway because if this is
// MIR-inlined, there's no opportunity to put that argument metadata anywhere.
//
// The good answer here will eventually be pattern types, which will hopefully
// allow it to go back to `.0`, maybe with a cast of some sort.
//
// SAFETY: `ZeroablePrimitive` guarantees that the size and bit validity
// of `.0` is such that this transmute is sound.
Expand Down
53 changes: 30 additions & 23 deletions library/core/src/ptr/non_null.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use crate::pin::PinCoerceUnsized;
use crate::ptr::Unique;
use crate::slice::{self, SliceIndex};
use crate::ub_checks::assert_unsafe_precondition;
use crate::{fmt, hash, intrinsics, ptr};
use crate::{fmt, hash, intrinsics, mem, ptr};

/// `*mut T` but non-zero and [covariant].
///
Expand Down Expand Up @@ -69,6 +69,8 @@ use crate::{fmt, hash, intrinsics, ptr};
#[rustc_nonnull_optimization_guaranteed]
#[rustc_diagnostic_item = "NonNull"]
pub struct NonNull<T: ?Sized> {
// Remember to use `.as_ptr()` instead of `.pointer`, as field projecting to
// this is banned by <https://github.com/rust-lang/compiler-team/issues/807>.
pointer: *const T,
}

Expand Down Expand Up @@ -282,7 +284,7 @@ impl<T: ?Sized> NonNull<T> {
pub fn addr(self) -> NonZero<usize> {
// SAFETY: The pointer is guaranteed by the type to be non-null,
// meaning that the address will be non-zero.
unsafe { NonZero::new_unchecked(self.pointer.addr()) }
unsafe { NonZero::new_unchecked(self.as_ptr().addr()) }
}

/// Creates a new pointer with the given address and the [provenance][crate::ptr#provenance] of
Expand All @@ -296,7 +298,7 @@ impl<T: ?Sized> NonNull<T> {
#[stable(feature = "strict_provenance", since = "CURRENT_RUSTC_VERSION")]
pub fn with_addr(self, addr: NonZero<usize>) -> Self {
// SAFETY: The result of `ptr::from::with_addr` is non-null because `addr` is guaranteed to be non-zero.
unsafe { NonNull::new_unchecked(self.pointer.with_addr(addr.get()) as *mut _) }
unsafe { NonNull::new_unchecked(self.as_ptr().with_addr(addr.get()) as *mut _) }
}

/// Creates a new pointer by mapping `self`'s address to a new one, preserving the
Expand Down Expand Up @@ -335,7 +337,12 @@ impl<T: ?Sized> NonNull<T> {
#[must_use]
#[inline(always)]
pub const fn as_ptr(self) -> *mut T {
self.pointer as *mut T
// This is a transmute for the same reasons as `NonZero::get`.

// SAFETY: `NonNull` is `transparent` over a `*const T`, and `*const T`
// and `*mut T` have the same layout, so transitively we can transmute
// our `NonNull` to a `*mut T` directly.
unsafe { mem::transmute::<Self, *mut T>(self) }
}

/// Returns a shared reference to the value. If the value may be uninitialized, [`as_uninit_ref`]
Expand Down Expand Up @@ -484,7 +491,7 @@ impl<T: ?Sized> NonNull<T> {
// Additionally safety contract of `offset` guarantees that the resulting pointer is
// pointing to an allocation, there can't be an allocation at null, thus it's safe to
// construct `NonNull`.
unsafe { NonNull { pointer: intrinsics::offset(self.pointer, count) } }
unsafe { NonNull { pointer: intrinsics::offset(self.as_ptr(), count) } }
}

/// Calculates the offset from a pointer in bytes.
Expand All @@ -508,7 +515,7 @@ impl<T: ?Sized> NonNull<T> {
// Additionally safety contract of `offset` guarantees that the resulting pointer is
// pointing to an allocation, there can't be an allocation at null, thus it's safe to
// construct `NonNull`.
unsafe { NonNull { pointer: self.pointer.byte_offset(count) } }
unsafe { NonNull { pointer: self.as_ptr().byte_offset(count) } }
}

/// Adds an offset to a pointer (convenience for `.offset(count as isize)`).
Expand Down Expand Up @@ -560,7 +567,7 @@ impl<T: ?Sized> NonNull<T> {
// Additionally safety contract of `offset` guarantees that the resulting pointer is
// pointing to an allocation, there can't be an allocation at null, thus it's safe to
// construct `NonNull`.
unsafe { NonNull { pointer: intrinsics::offset(self.pointer, count) } }
unsafe { NonNull { pointer: intrinsics::offset(self.as_ptr(), count) } }
}

/// Calculates the offset from a pointer in bytes (convenience for `.byte_offset(count as isize)`).
Expand All @@ -584,7 +591,7 @@ impl<T: ?Sized> NonNull<T> {
// Additionally safety contract of `add` guarantees that the resulting pointer is pointing
// to an allocation, there can't be an allocation at null, thus it's safe to construct
// `NonNull`.
unsafe { NonNull { pointer: self.pointer.byte_add(count) } }
unsafe { NonNull { pointer: self.as_ptr().byte_add(count) } }
}

/// Subtracts an offset from a pointer (convenience for
Expand Down Expand Up @@ -667,7 +674,7 @@ impl<T: ?Sized> NonNull<T> {
// Additionally safety contract of `sub` guarantees that the resulting pointer is pointing
// to an allocation, there can't be an allocation at null, thus it's safe to construct
// `NonNull`.
unsafe { NonNull { pointer: self.pointer.byte_sub(count) } }
unsafe { NonNull { pointer: self.as_ptr().byte_sub(count) } }
}

/// Calculates the distance between two pointers within the same allocation. The returned value is in
Expand Down Expand Up @@ -764,7 +771,7 @@ impl<T: ?Sized> NonNull<T> {
T: Sized,
{
// SAFETY: the caller must uphold the safety contract for `offset_from`.
unsafe { self.pointer.offset_from(origin.pointer) }
unsafe { self.as_ptr().offset_from(origin.as_ptr()) }
}

/// Calculates the distance between two pointers within the same allocation. The returned value is in
Expand All @@ -782,7 +789,7 @@ impl<T: ?Sized> NonNull<T> {
#[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: NonNull<U>) -> isize {
// SAFETY: the caller must uphold the safety contract for `byte_offset_from`.
unsafe { self.pointer.byte_offset_from(origin.pointer) }
unsafe { self.as_ptr().byte_offset_from(origin.as_ptr()) }
}

// N.B. `wrapping_offset``, `wrapping_add`, etc are not implemented because they can wrap to null
Expand Down Expand Up @@ -857,7 +864,7 @@ impl<T: ?Sized> NonNull<T> {
T: Sized,
{
// SAFETY: the caller must uphold the safety contract for `sub_ptr`.
unsafe { self.pointer.sub_ptr(subtracted.pointer) }
unsafe { self.as_ptr().sub_ptr(subtracted.as_ptr()) }
}

/// Calculates the distance between two pointers within the same allocation, *where it's known that
Expand All @@ -876,7 +883,7 @@ impl<T: ?Sized> NonNull<T> {
#[rustc_const_unstable(feature = "const_ptr_sub_ptr", issue = "95892")]
pub const unsafe fn byte_sub_ptr<U: ?Sized>(self, origin: NonNull<U>) -> usize {
// SAFETY: the caller must uphold the safety contract for `byte_sub_ptr`.
unsafe { self.pointer.byte_sub_ptr(origin.pointer) }
unsafe { self.as_ptr().byte_sub_ptr(origin.as_ptr()) }
}

/// Reads the value from `self` without moving it. This leaves the
Expand All @@ -894,7 +901,7 @@ impl<T: ?Sized> NonNull<T> {
T: Sized,
{
// SAFETY: the caller must uphold the safety contract for `read`.
unsafe { ptr::read(self.pointer) }
unsafe { ptr::read(self.as_ptr()) }
}

/// Performs a volatile read of the value from `self` without moving it. This
Expand All @@ -915,7 +922,7 @@ impl<T: ?Sized> NonNull<T> {
T: Sized,
{
// SAFETY: the caller must uphold the safety contract for `read_volatile`.
unsafe { ptr::read_volatile(self.pointer) }
unsafe { ptr::read_volatile(self.as_ptr()) }
}

/// Reads the value from `self` without moving it. This leaves the
Expand All @@ -935,7 +942,7 @@ impl<T: ?Sized> NonNull<T> {
T: Sized,
{
// SAFETY: the caller must uphold the safety contract for `read_unaligned`.
unsafe { ptr::read_unaligned(self.pointer) }
unsafe { ptr::read_unaligned(self.as_ptr()) }
}

/// Copies `count * size_of<T>` bytes from `self` to `dest`. The source
Expand All @@ -955,7 +962,7 @@ impl<T: ?Sized> NonNull<T> {
T: Sized,
{
// SAFETY: the caller must uphold the safety contract for `copy`.
unsafe { ptr::copy(self.pointer, dest.as_ptr(), count) }
unsafe { ptr::copy(self.as_ptr(), dest.as_ptr(), count) }
}

/// Copies `count * size_of<T>` bytes from `self` to `dest`. The source
Expand All @@ -975,7 +982,7 @@ impl<T: ?Sized> NonNull<T> {
T: Sized,
{
// SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
unsafe { ptr::copy_nonoverlapping(self.pointer, dest.as_ptr(), count) }
unsafe { ptr::copy_nonoverlapping(self.as_ptr(), dest.as_ptr(), count) }
}

/// Copies `count * size_of<T>` bytes from `src` to `self`. The source
Expand All @@ -995,7 +1002,7 @@ impl<T: ?Sized> NonNull<T> {
T: Sized,
{
// SAFETY: the caller must uphold the safety contract for `copy`.
unsafe { ptr::copy(src.pointer, self.as_ptr(), count) }
unsafe { ptr::copy(src.as_ptr(), self.as_ptr(), count) }
}

/// Copies `count * size_of<T>` bytes from `src` to `self`. The source
Expand All @@ -1015,7 +1022,7 @@ impl<T: ?Sized> NonNull<T> {
T: Sized,
{
// SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
unsafe { ptr::copy_nonoverlapping(src.pointer, self.as_ptr(), count) }
unsafe { ptr::copy_nonoverlapping(src.as_ptr(), self.as_ptr(), count) }
}

/// Executes the destructor (if any) of the pointed-to value.
Expand Down Expand Up @@ -1202,7 +1209,7 @@ impl<T: ?Sized> NonNull<T> {

{
// SAFETY: `align` has been checked to be a power of 2 above.
unsafe { ptr::align_offset(self.pointer, align) }
unsafe { ptr::align_offset(self.as_ptr(), align) }
}
}

Expand Down Expand Up @@ -1230,7 +1237,7 @@ impl<T: ?Sized> NonNull<T> {
where
T: Sized,
{
self.pointer.is_aligned()
self.as_ptr().is_aligned()
}

/// Returns whether the pointer is aligned to `align`.
Expand Down Expand Up @@ -1267,7 +1274,7 @@ impl<T: ?Sized> NonNull<T> {
#[must_use]
#[unstable(feature = "pointer_is_aligned_to", issue = "96284")]
pub fn is_aligned_to(self, align: usize) -> bool {
self.pointer.is_aligned_to(align)
self.as_ptr().is_aligned_to(align)
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
}
}
scope 9 (inlined NonNull::<[u8]>::as_ptr) {
let mut _17: *const [u8];
}
}
scope 3 (inlined #[track_caller] Option::<Layout>::unwrap) {
Expand Down Expand Up @@ -102,16 +101,9 @@
StorageDead(_16);
StorageDead(_12);
StorageDead(_6);
- StorageLive(_17);
+ nop;
_17 = copy (_5.0: *const [u8]);
- _4 = move _17 as *mut [u8] (PtrToPtr);
- StorageDead(_17);
+ _4 = copy _17 as *mut [u8] (PtrToPtr);
+ nop;
_4 = copy _5 as *mut [u8] (Transmute);
StorageDead(_5);
- _3 = move _4 as *mut u8 (PtrToPtr);
+ _3 = copy _17 as *mut u8 (PtrToPtr);
_3 = move _4 as *mut u8 (PtrToPtr);
StorageDead(_4);
StorageDead(_3);
- StorageDead(_1);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
scope 5 (inlined <std::alloc::Global as Allocator>::allocate) {
}
scope 6 (inlined NonNull::<[u8]>::as_ptr) {
let mut _12: *const [u8];
}
}
scope 3 (inlined #[track_caller] Option::<Layout>::unwrap) {
Expand All @@ -45,16 +44,9 @@

bb1: {
StorageDead(_6);
- StorageLive(_12);
+ nop;
_12 = copy (_5.0: *const [u8]);
- _4 = move _12 as *mut [u8] (PtrToPtr);
- StorageDead(_12);
+ _4 = copy _12 as *mut [u8] (PtrToPtr);
+ nop;
_4 = copy _5 as *mut [u8] (Transmute);
StorageDead(_5);
- _3 = move _4 as *mut u8 (PtrToPtr);
+ _3 = copy _12 as *mut u8 (PtrToPtr);
_3 = move _4 as *mut u8 (PtrToPtr);
StorageDead(_4);
StorageDead(_3);
- StorageDead(_1);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
}
}
scope 9 (inlined NonNull::<[u8]>::as_ptr) {
let mut _17: *const [u8];
}
}
scope 3 (inlined #[track_caller] Option::<Layout>::unwrap) {
Expand Down Expand Up @@ -102,16 +101,9 @@
StorageDead(_16);
StorageDead(_12);
StorageDead(_6);
- StorageLive(_17);
+ nop;
_17 = copy (_5.0: *const [u8]);
- _4 = move _17 as *mut [u8] (PtrToPtr);
- StorageDead(_17);
+ _4 = copy _17 as *mut [u8] (PtrToPtr);
+ nop;
_4 = copy _5 as *mut [u8] (Transmute);
StorageDead(_5);
- _3 = move _4 as *mut u8 (PtrToPtr);
+ _3 = copy _17 as *mut u8 (PtrToPtr);
_3 = move _4 as *mut u8 (PtrToPtr);
StorageDead(_4);
StorageDead(_3);
- StorageDead(_1);
Expand Down
Loading
Loading