Skip to content

Commit

Permalink
Use &raw in the standard library
Browse files Browse the repository at this point in the history
Since the stabilization in rust-lang#127679 has reached stage0, 1.82-beta, we can
start using `&raw` freely, and even the soft-deprecated `ptr::addr_of!`
and `ptr::addr_of_mut!` can stop allowing the unstable feature.

I intentionally did not change any documentation or tests, but the rest
of those macro uses are all now using `&raw const` or `&raw mut` in the
standard library.
  • Loading branch information
cuviper committed Sep 26, 2024
1 parent f50b2ba commit ff45313
Show file tree
Hide file tree
Showing 51 changed files with 150 additions and 185 deletions.
10 changes: 5 additions & 5 deletions alloc/src/boxed.rs
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ use core::ops::{
DerefPure, DispatchFromDyn, Receiver,
};
use core::pin::{Pin, PinCoerceUnsized};
use core::ptr::{self, NonNull, Unique, addr_of_mut};
use core::ptr::{self, NonNull, Unique};
use core::task::{Context, Poll};
use core::{borrow, fmt, slice};

Expand Down Expand Up @@ -1277,7 +1277,7 @@ impl<T: ?Sized, A: Allocator> Box<T, A> {
#[inline]
pub fn into_raw(b: Self) -> *mut T {
// Make sure Miri realizes that we transition from a noalias pointer to a raw pointer here.
unsafe { addr_of_mut!(*&mut *Self::into_raw_with_allocator(b).0) }
unsafe { &raw mut *&mut *Self::into_raw_with_allocator(b).0 }
}

/// Consumes the `Box`, returning a wrapped `NonNull` pointer.
Expand Down Expand Up @@ -1396,7 +1396,7 @@ impl<T: ?Sized, A: Allocator> Box<T, A> {
// want *no* aliasing requirements here!
// In case `A` *is* `Global`, this does not quite have the right behavior; `into_raw`
// works around that.
let ptr = addr_of_mut!(**b);
let ptr = &raw mut **b;
let alloc = unsafe { ptr::read(&b.1) };
(ptr, alloc)
}
Expand Down Expand Up @@ -1506,7 +1506,7 @@ impl<T: ?Sized, A: Allocator> Box<T, A> {
pub fn as_mut_ptr(b: &mut Self) -> *mut T {
// This is a primitive deref, not going through `DerefMut`, and therefore not materializing
// any references.
ptr::addr_of_mut!(**b)
&raw mut **b
}

/// Returns a raw pointer to the `Box`'s contents.
Expand Down Expand Up @@ -1554,7 +1554,7 @@ impl<T: ?Sized, A: Allocator> Box<T, A> {
pub fn as_ptr(b: &Self) -> *const T {
// This is a primitive deref, not going through `DerefMut`, and therefore not materializing
// any references.
ptr::addr_of!(**b)
&raw const **b
}

/// Returns a reference to the underlying allocator.
Expand Down
2 changes: 1 addition & 1 deletion alloc/src/boxed/thin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ impl<T: ?Sized> ThinBox<T> {

fn with_header(&self) -> &WithHeader<<T as Pointee>::Metadata> {
// SAFETY: both types are transparent to `NonNull<u8>`
unsafe { &*(core::ptr::addr_of!(self.ptr) as *const WithHeader<_>) }
unsafe { &*((&raw const self.ptr) as *const WithHeader<_>) }
}
}

Expand Down
10 changes: 5 additions & 5 deletions alloc/src/collections/btree/node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,8 +72,8 @@ impl<K, V> LeafNode<K, V> {
// be both slightly faster and easier to track in Valgrind.
unsafe {
// parent_idx, keys, and vals are all MaybeUninit
ptr::addr_of_mut!((*this).parent).write(None);
ptr::addr_of_mut!((*this).len).write(0);
(&raw mut (*this).parent).write(None);
(&raw mut (*this).len).write(0);
}
}

Expand Down Expand Up @@ -114,7 +114,7 @@ impl<K, V> InternalNode<K, V> {
unsafe {
let mut node = Box::<Self, _>::new_uninit_in(alloc);
// We only need to initialize the data; the edges are MaybeUninit.
LeafNode::init(ptr::addr_of_mut!((*node.as_mut_ptr()).data));
LeafNode::init(&raw mut (*node.as_mut_ptr()).data);
node.assume_init()
}
}
Expand Down Expand Up @@ -525,8 +525,8 @@ impl<'a, K, V, Type> NodeRef<marker::ValMut<'a>, K, V, Type> {
// to avoid aliasing with outstanding references to other elements,
// in particular, those returned to the caller in earlier iterations.
let leaf = Self::as_leaf_ptr(&mut self);
let keys = unsafe { ptr::addr_of!((*leaf).keys) };
let vals = unsafe { ptr::addr_of_mut!((*leaf).vals) };
let keys = unsafe { &raw const (*leaf).keys };
let vals = unsafe { &raw mut (*leaf).vals };
// We must coerce to unsized array pointers because of Rust issue #74679.
let keys: *const [_] = keys;
let vals: *mut [_] = vals;
Expand Down
26 changes: 11 additions & 15 deletions alloc/src/rc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -787,7 +787,7 @@ impl<T, A: Allocator> Rc<T, A> {

let strong = unsafe {
let inner = init_ptr.as_ptr();
ptr::write(ptr::addr_of_mut!((*inner).value), data);
ptr::write(&raw mut (*inner).value, data);

let prev_value = (*inner).strong.get();
debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
Expand Down Expand Up @@ -1442,7 +1442,7 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
// SAFETY: This cannot go through Deref::deref or Rc::inner because
// this is required to retain raw/mut provenance such that e.g. `get_mut` can
// write through the pointer after the Rc is recovered through `from_raw`.
unsafe { ptr::addr_of_mut!((*ptr).value) }
unsafe { &raw mut (*ptr).value }
}

/// Constructs an `Rc<T, A>` from a raw pointer in the provided allocator.
Expand Down Expand Up @@ -2042,8 +2042,8 @@ impl<T: ?Sized> Rc<T> {
unsafe {
debug_assert_eq!(Layout::for_value_raw(inner), layout);

ptr::addr_of_mut!((*inner).strong).write(Cell::new(1));
ptr::addr_of_mut!((*inner).weak).write(Cell::new(1));
(&raw mut (*inner).strong).write(Cell::new(1));
(&raw mut (*inner).weak).write(Cell::new(1));
}

Ok(inner)
Expand Down Expand Up @@ -2072,8 +2072,8 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {

// Copy value as bytes
ptr::copy_nonoverlapping(
core::ptr::addr_of!(*src) as *const u8,
ptr::addr_of_mut!((*ptr).value) as *mut u8,
(&raw const *src) as *const u8,
(&raw mut (*ptr).value) as *mut u8,
value_size,
);

Expand Down Expand Up @@ -2107,11 +2107,7 @@ impl<T> Rc<[T]> {
unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> {
unsafe {
let ptr = Self::allocate_for_slice(v.len());
ptr::copy_nonoverlapping(
v.as_ptr(),
ptr::addr_of_mut!((*ptr).value) as *mut T,
v.len(),
);
ptr::copy_nonoverlapping(v.as_ptr(), (&raw mut (*ptr).value) as *mut T, v.len());
Self::from_ptr(ptr)
}
}
Expand Down Expand Up @@ -2149,7 +2145,7 @@ impl<T> Rc<[T]> {
let layout = Layout::for_value_raw(ptr);

// Pointer to first element
let elems = ptr::addr_of_mut!((*ptr).value) as *mut T;
let elems = (&raw mut (*ptr).value) as *mut T;

let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };

Expand Down Expand Up @@ -2577,7 +2573,7 @@ impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for Rc<T, A> {
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized, A: Allocator> fmt::Pointer for Rc<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Pointer::fmt(&core::ptr::addr_of!(**self), f)
fmt::Pointer::fmt(&(&raw const **self), f)
}
}

Expand Down Expand Up @@ -2718,7 +2714,7 @@ impl<T, A: Allocator> From<Vec<T, A>> for Rc<[T], A> {
let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc();

let rc_ptr = Self::allocate_for_slice_in(len, &alloc);
ptr::copy_nonoverlapping(vec_ptr, ptr::addr_of_mut!((*rc_ptr).value) as *mut T, len);
ptr::copy_nonoverlapping(vec_ptr, (&raw mut (*rc_ptr).value) as *mut T, len);

// Create a `Vec<T, &A>` with length 0, to deallocate the buffer
// without dropping its contents or the allocator
Expand Down Expand Up @@ -3084,7 +3080,7 @@ impl<T: ?Sized, A: Allocator> Weak<T, A> {
// SAFETY: if is_dangling returns false, then the pointer is dereferenceable.
// The payload may be dropped at this point, and we have to maintain provenance,
// so use raw pointer manipulation.
unsafe { ptr::addr_of_mut!((*ptr).value) }
unsafe { &raw mut (*ptr).value }
}
}

Expand Down
22 changes: 11 additions & 11 deletions alloc/src/sync.rs
Original file line number Diff line number Diff line change
Expand Up @@ -797,7 +797,7 @@ impl<T, A: Allocator> Arc<T, A> {
// reference into a strong reference.
let strong = unsafe {
let inner = init_ptr.as_ptr();
ptr::write(ptr::addr_of_mut!((*inner).data), data);
ptr::write(&raw mut (*inner).data, data);

// The above write to the data field must be visible to any threads which
// observe a non-zero strong count. Therefore we need at least "Release" ordering
Expand Down Expand Up @@ -1583,7 +1583,7 @@ impl<T: ?Sized, A: Allocator> Arc<T, A> {
// SAFETY: This cannot go through Deref::deref or RcBoxPtr::inner because
// this is required to retain raw/mut provenance such that e.g. `get_mut` can
// write through the pointer after the Rc is recovered through `from_raw`.
unsafe { ptr::addr_of_mut!((*ptr).data) }
unsafe { &raw mut (*ptr).data }
}

/// Constructs an `Arc<T, A>` from a raw pointer.
Expand Down Expand Up @@ -1955,8 +1955,8 @@ impl<T: ?Sized> Arc<T> {
debug_assert_eq!(unsafe { Layout::for_value_raw(inner) }, layout);

unsafe {
ptr::addr_of_mut!((*inner).strong).write(atomic::AtomicUsize::new(1));
ptr::addr_of_mut!((*inner).weak).write(atomic::AtomicUsize::new(1));
(&raw mut (*inner).strong).write(atomic::AtomicUsize::new(1));
(&raw mut (*inner).weak).write(atomic::AtomicUsize::new(1));
}

inner
Expand Down Expand Up @@ -1986,8 +1986,8 @@ impl<T: ?Sized, A: Allocator> Arc<T, A> {

// Copy value as bytes
ptr::copy_nonoverlapping(
core::ptr::addr_of!(*src) as *const u8,
ptr::addr_of_mut!((*ptr).data) as *mut u8,
(&raw const *src) as *const u8,
(&raw mut (*ptr).data) as *mut u8,
value_size,
);

Expand Down Expand Up @@ -2022,7 +2022,7 @@ impl<T> Arc<[T]> {
unsafe {
let ptr = Self::allocate_for_slice(v.len());

ptr::copy_nonoverlapping(v.as_ptr(), ptr::addr_of_mut!((*ptr).data) as *mut T, v.len());
ptr::copy_nonoverlapping(v.as_ptr(), (&raw mut (*ptr).data) as *mut T, v.len());

Self::from_ptr(ptr)
}
Expand Down Expand Up @@ -2061,7 +2061,7 @@ impl<T> Arc<[T]> {
let layout = Layout::for_value_raw(ptr);

// Pointer to first element
let elems = ptr::addr_of_mut!((*ptr).data) as *mut T;
let elems = (&raw mut (*ptr).data) as *mut T;

let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };

Expand Down Expand Up @@ -2805,7 +2805,7 @@ impl<T: ?Sized, A: Allocator> Weak<T, A> {
// SAFETY: if is_dangling returns false, then the pointer is dereferenceable.
// The payload may be dropped at this point, and we have to maintain provenance,
// so use raw pointer manipulation.
unsafe { ptr::addr_of_mut!((*ptr).data) }
unsafe { &raw mut (*ptr).data }
}
}

Expand Down Expand Up @@ -3428,7 +3428,7 @@ impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for Arc<T, A> {
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized, A: Allocator> fmt::Pointer for Arc<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Pointer::fmt(&core::ptr::addr_of!(**self), f)
fmt::Pointer::fmt(&(&raw const **self), f)
}
}

Expand Down Expand Up @@ -3678,7 +3678,7 @@ impl<T, A: Allocator + Clone> From<Vec<T, A>> for Arc<[T], A> {
let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc();

let rc_ptr = Self::allocate_for_slice_in(len, &alloc);
ptr::copy_nonoverlapping(vec_ptr, ptr::addr_of_mut!((*rc_ptr).data) as *mut T, len);
ptr::copy_nonoverlapping(vec_ptr, (&raw mut (*rc_ptr).data) as *mut T, len);

// Create a `Vec<T, &A>` with length 0, to deallocate the buffer
// without dropping its contents or the allocator
Expand Down
4 changes: 2 additions & 2 deletions alloc/src/vec/into_iter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,11 @@ use crate::raw_vec::RawVec;
macro non_null {
(mut $place:expr, $t:ident) => {{
#![allow(unused_unsafe)] // we're sometimes used within an unsafe block
unsafe { &mut *(ptr::addr_of_mut!($place) as *mut NonNull<$t>) }
unsafe { &mut *((&raw mut $place) as *mut NonNull<$t>) }
}},
($place:expr, $t:ident) => {{
#![allow(unused_unsafe)] // we're sometimes used within an unsafe block
unsafe { *(ptr::addr_of!($place) as *const NonNull<$t>) }
unsafe { *((&raw const $place) as *const NonNull<$t>) }
}},
}

Expand Down
4 changes: 2 additions & 2 deletions core/src/ffi/c_str.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use crate::error::Error;
use crate::ffi::c_char;
use crate::iter::FusedIterator;
use crate::marker::PhantomData;
use crate::ptr::{NonNull, addr_of};
use crate::ptr::NonNull;
use crate::slice::memchr;
use crate::{fmt, intrinsics, ops, slice, str};

Expand Down Expand Up @@ -623,7 +623,7 @@ impl CStr {
pub const fn to_bytes_with_nul(&self) -> &[u8] {
// SAFETY: Transmuting a slice of `c_char`s to a slice of `u8`s
// is safe on all supported targets.
unsafe { &*(addr_of!(self.inner) as *const [u8]) }
unsafe { &*((&raw const self.inner) as *const [u8]) }
}

/// Iterates over the bytes in this C string.
Expand Down
3 changes: 1 addition & 2 deletions core/src/iter/adapters/filter_map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ use crate::iter::{FusedIterator, InPlaceIterable, TrustedFused};
use crate::mem::{ManuallyDrop, MaybeUninit};
use crate::num::NonZero;
use crate::ops::{ControlFlow, Try};
use crate::ptr::addr_of;
use crate::{array, fmt};

/// An iterator that uses `f` to both filter and map elements from `iter`.
Expand Down Expand Up @@ -101,7 +100,7 @@ where

unsafe {
let opt_payload_at: *const MaybeUninit<B> =
addr_of!(val).byte_add(core::mem::offset_of!(Option<B>, Some.0)).cast();
(&raw const val).byte_add(core::mem::offset_of!(Option<B>, Some.0)).cast();
let dst = guard.array.as_mut_ptr().add(idx);
crate::ptr::copy_nonoverlapping(opt_payload_at, dst, 1);
crate::mem::forget(val);
Expand Down
4 changes: 1 addition & 3 deletions core/src/ptr/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1730,7 +1730,7 @@ pub const unsafe fn write_unaligned<T>(dst: *mut T, src: T) {
// `dst` cannot overlap `src` because the caller has mutable access
// to `dst` while `src` is owned by this function.
unsafe {
copy_nonoverlapping(addr_of!(src) as *const u8, dst as *mut u8, mem::size_of::<T>());
copy_nonoverlapping((&raw const src) as *const u8, dst as *mut u8, mem::size_of::<T>());
// We are calling the intrinsic directly to avoid function calls in the generated code.
intrinsics::forget(src);
}
Expand Down Expand Up @@ -2348,7 +2348,6 @@ impl<F: FnPtr> fmt::Debug for F {
/// no difference whether the pointer is null or dangling.)
#[stable(feature = "raw_ref_macros", since = "1.51.0")]
#[rustc_macro_transparency = "semitransparent"]
#[allow_internal_unstable(raw_ref_op)]
pub macro addr_of($place:expr) {
&raw const $place
}
Expand Down Expand Up @@ -2439,7 +2438,6 @@ pub macro addr_of($place:expr) {
/// makes no difference whether the pointer is null or dangling.)
#[stable(feature = "raw_ref_macros", since = "1.51.0")]
#[rustc_macro_transparency = "semitransparent"]
#[allow_internal_unstable(raw_ref_op)]
pub macro addr_of_mut($place:expr) {
&raw mut $place
}
2 changes: 1 addition & 1 deletion core/src/slice/iter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ use crate::iter::{
use crate::marker::PhantomData;
use crate::mem::{self, SizedTypeProperties};
use crate::num::NonZero;
use crate::ptr::{self, NonNull, without_provenance, without_provenance_mut};
use crate::ptr::{NonNull, without_provenance, without_provenance_mut};
use crate::{cmp, fmt};

#[stable(feature = "boxed_slice_into_iter", since = "1.80.0")]
Expand Down
6 changes: 3 additions & 3 deletions core/src/slice/iter/macros.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,11 @@ macro_rules! if_zst {
if T::IS_ZST {
// SAFETY: for ZSTs, the pointer is storing a provenance-free length,
// so consuming and updating it as a `usize` is fine.
let $len = unsafe { &mut *ptr::addr_of_mut!($this.end_or_len).cast::<usize>() };
let $len = unsafe { &mut *(&raw mut $this.end_or_len).cast::<usize>() };
$zst_body
} else {
// SAFETY: for non-ZSTs, the type invariant ensures it cannot be null
let $end = unsafe { &mut *ptr::addr_of_mut!($this.end_or_len).cast::<NonNull<T>>() };
let $end = unsafe { &mut *(&raw mut $this.end_or_len).cast::<NonNull<T>>() };
$other_body
}
}};
Expand All @@ -30,7 +30,7 @@ macro_rules! if_zst {
$zst_body
} else {
// SAFETY: for non-ZSTs, the type invariant ensures it cannot be null
let $end = unsafe { *ptr::addr_of!($this.end_or_len).cast::<NonNull<T>>() };
let $end = unsafe { *(&raw const $this.end_or_len).cast::<NonNull<T>>() };
$other_body
}
}};
Expand Down
4 changes: 2 additions & 2 deletions core/src/slice/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -883,8 +883,8 @@ impl<T> [T] {
pub const fn swap(&mut self, a: usize, b: usize) {
// FIXME: use swap_unchecked here (https://github.com/rust-lang/rust/pull/88540#issuecomment-944344343)
// Can't take two mutable loans from one vector, so instead use raw pointers.
let pa = ptr::addr_of_mut!(self[a]);
let pb = ptr::addr_of_mut!(self[b]);
let pa = &raw mut self[a];
let pb = &raw mut self[b];
// SAFETY: `pa` and `pb` have been created from safe mutable references and refer
// to elements in the slice and therefore are guaranteed to be valid and aligned.
// Note that accessing the elements behind `a` and `b` is checked and will
Expand Down
2 changes: 1 addition & 1 deletion panic_unwind/src/emcc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ pub unsafe fn cleanup(ptr: *mut u8) -> Box<dyn Any + Send> {
super::__rust_foreign_exception();
}

let canary = ptr::addr_of!((*adjusted_ptr).canary).read();
let canary = (&raw const (*adjusted_ptr).canary).read();
if !ptr::eq(canary, &EXCEPTION_TYPE_INFO) {
super::__rust_foreign_exception();
}
Expand Down
2 changes: 1 addition & 1 deletion panic_unwind/src/gcc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ pub unsafe fn cleanup(ptr: *mut u8) -> Box<dyn Any + Send> {
let exception = exception.cast::<Exception>();
// Just access the canary field, avoid accessing the entire `Exception` as
// it can be a foreign Rust exception.
let canary = ptr::addr_of!((*exception).canary).read();
let canary = (&raw const (*exception).canary).read();
if !ptr::eq(canary, &CANARY) {
// A foreign Rust exception, treat it slightly differently from other
// foreign exceptions, because call into `_Unwind_DeleteException` will
Expand Down
Loading

0 comments on commit ff45313

Please sign in to comment.