Description
Code
I tried this code: [playground]
pub fn closed(v: &Vec<u8>) -> &[u8] {
v
}
pub fn open(v: &Vec<u8>) -> &[u8] {
vec_deref(v)
}
#[inline]
fn vec_deref<T>(v: &Vec<T>) -> &[T] {
unsafe { std::slice::from_raw_parts(v.as_ptr(), v.len()) }
}
I expected to see this happen: in release mode with default MIR optimization, both closed
and open
would generate equivalent optimized MIR — an inlined conversion from &Vec<u8>
to &[u8]
(deconstruct the Vec
into ptr and len then build the &[_]
).
Instead, this happened: On 1.77.0, the inlining happens as expected. On 1.78.0-beta.3 (2024-03-27 4147533), <Vec<u8> as Deref>::deref
(vec_deref
for open
) is called outline. Impactfully, the call is additionally not known to never unwind (-> [return: …, unwind continue]
in MIR), which likely impedes optimization around the call.
vec_deref
MIR on stable
fn vec_deref(_1: &Vec<T>) -> &[T] {
debug v => _1;
let mut _0: &[T];
let mut _2: *const T;
let mut _3: usize;
scope 1 {
scope 2 (inlined Vec::<T>::as_ptr) {
debug self => _1;
let mut _4: *mut T;
let mut _5: &alloc::raw_vec::RawVec<T>;
scope 3 (inlined alloc::raw_vec::RawVec::<T>::ptr) {
debug self => _5;
let mut _7: std::ptr::NonNull<T>;
scope 4 (inlined Unique::<T>::as_ptr) {
debug ((self: Unique<T>).0: std::ptr::NonNull<T>) => _7;
debug ((self: Unique<T>).1: std::marker::PhantomData<T>) => const ZeroSized: PhantomData<T>;
scope 5 (inlined NonNull::<T>::as_ptr) {
debug self => _7;
let mut _6: *const T;
}
}
}
}
scope 6 (inlined Vec::<T>::len) {
debug self => _1;
}
scope 7 (inlined std::slice::from_raw_parts::<'_, T>) {
debug data => _2;
debug len => _3;
let _8: *const [T];
scope 8 {
scope 9 (inlined slice_from_raw_parts::<T>) {
debug data => _2;
debug len => _3;
let mut _9: *const ();
scope 10 (inlined std::ptr::const_ptr::<impl *const T>::cast::<()>) {
debug self => _2;
}
scope 11 (inlined std::ptr::from_raw_parts::<[T]>) {
debug data_pointer => _9;
debug metadata => _3;
let mut _10: std::ptr::metadata::PtrRepr<[T]>;
let mut _11: std::ptr::metadata::PtrComponents<[T]>;
scope 12 {
}
}
}
}
}
}
bb0: {
StorageLive(_2);
StorageLive(_4);
StorageLive(_5);
_5 = &((*_1).0: alloc::raw_vec::RawVec<T>);
StorageLive(_7);
_7 = ((((*_1).0: alloc::raw_vec::RawVec<T>).0: std::ptr::Unique<T>).0: std::ptr::NonNull<T>);
StorageLive(_6);
_6 = (_7.0: *const T);
_4 = move _6 as *mut T (PtrToPtr);
StorageDead(_6);
StorageDead(_7);
_2 = move _4 as *const T (PointerCoercion(MutToConstPointer));
StorageDead(_5);
StorageDead(_4);
StorageLive(_3);
_3 = ((*_1).1: usize);
StorageLive(_9);
_9 = _2 as *const () (PtrToPtr);
StorageLive(_10);
StorageLive(_11);
_11 = std::ptr::metadata::PtrComponents::<[T]> { data_pointer: _9, metadata: _3 };
_10 = std::ptr::metadata::PtrRepr::<[T]> { const_ptr: move _11 };
StorageDead(_11);
_8 = (_10.0: *const [T]);
StorageDead(_10);
StorageDead(_9);
_0 = &(*_8);
StorageDead(_3);
StorageDead(_2);
return;
}
}
vec_deref
MIR on beta
fn vec_deref(_1: &Vec<T>) -> &[T] {
debug v => _1;
let mut _0: &[T];
let mut _2: *const T;
let mut _3: usize;
scope 1 {
scope 2 (inlined Vec::<T>::as_ptr) {
debug self => _1;
let mut _4: &alloc::raw_vec::RawVec<T>;
scope 3 (inlined alloc::raw_vec::RawVec::<T>::ptr) {
debug self => _4;
let mut _5: std::ptr::NonNull<T>;
scope 4 (inlined Unique::<T>::as_ptr) {
debug ((self: Unique<T>).0: std::ptr::NonNull<T>) => _5;
debug ((self: Unique<T>).1: std::marker::PhantomData<T>) => const ZeroSized: PhantomData<T>;
scope 5 (inlined NonNull::<T>::as_ptr) {
debug self => _5;
}
}
}
}
scope 6 (inlined Vec::<T>::len) {
debug self => _1;
}
scope 7 (inlined std::slice::from_raw_parts::<'_, T>) {
debug data => _2;
debug len => _3;
let mut _6: bool;
let _7: ();
let mut _8: *mut ();
let mut _9: usize;
let mut _10: usize;
let _11: *const [T];
scope 8 {
scope 9 (inlined std::mem::size_of::<T>) {
}
scope 10 (inlined align_of::<T>) {
}
scope 11 (inlined slice_from_raw_parts::<T>) {
debug data => _2;
debug len => _3;
let mut _12: *const ();
scope 12 (inlined std::ptr::const_ptr::<impl *const T>::cast::<()>) {
debug self => _2;
}
scope 13 (inlined std::ptr::from_raw_parts::<[T]>) {
debug data_pointer => _12;
debug metadata => _3;
let mut _13: std::ptr::metadata::PtrRepr<[T]>;
let mut _14: std::ptr::metadata::PtrComponents<[T]>;
scope 14 {
}
}
}
}
}
}
bb0: {
StorageLive(_2);
StorageLive(_4);
_4 = &((*_1).0: alloc::raw_vec::RawVec<T>);
StorageLive(_5);
_5 = ((((*_1).0: alloc::raw_vec::RawVec<T>).0: std::ptr::Unique<T>).0: std::ptr::NonNull<T>);
_2 = (_5.0: *const T);
StorageDead(_5);
StorageDead(_4);
StorageLive(_3);
_3 = ((*_1).1: usize);
StorageLive(_6);
_6 = UbCheck(LanguageUb);
switchInt(move _6) -> [0: bb3, otherwise: bb1];
}
bb1: {
StorageLive(_8);
_8 = _2 as *mut () (PtrToPtr);
StorageLive(_9);
_9 = SizeOf(T);
StorageLive(_10);
_10 = AlignOf(T);
_7 = std::slice::from_raw_parts::precondition_check(move _8, move _9, move _10, _3) -> [return: bb2, unwind unreachable];
}
bb2: {
StorageDead(_10);
StorageDead(_9);
StorageDead(_8);
goto -> bb3;
}
bb3: {
StorageDead(_6);
StorageLive(_12);
_12 = _2 as *const () (PtrToPtr);
StorageLive(_13);
StorageLive(_14);
_14 = std::ptr::metadata::PtrComponents::<[T]> { data_pointer: _12, metadata: _3 };
_13 = std::ptr::metadata::PtrRepr::<[T]> { const_ptr: move _14 };
StorageDead(_14);
_11 = (_13.0: *const [T]);
StorageDead(_13);
StorageDead(_12);
_0 = &(*_11);
StorageDead(_3);
StorageDead(_2);
return;
}
}
On stable, vec_deref
's optimized MIR is a single straightline basic block. On beta, it contains a switchInt
on UbCheck(LanguageUb)
guarding a conditional call to std::slice::from_raw_parts::precondition_check
, resulting in four basic blocks including an MIR call operation. AIUI both of these metrics (block/call count) directly contribute to the MIR inlining heuristics.
stable slice::from_raw_parts
pub const unsafe fn from_raw_parts<'a, T>(data: *const T, len: usize) -> &'a [T] {
// SAFETY: the caller must uphold the safety contract for `from_raw_parts`.
unsafe {
assert_unsafe_precondition!(
"slice::from_raw_parts requires the pointer to be aligned and non-null, and the total size of the slice not to exceed `isize::MAX`",
[T](data: *const T, len: usize) => is_aligned_and_not_null(data)
&& is_valid_allocation_size::<T>(len)
);
&*ptr::slice_from_raw_parts(data, len)
}
}
beta slice::from_raw_parts
pub const unsafe fn from_raw_parts<'a, T>(data: *const T, len: usize) -> &'a [T] {
// SAFETY: the caller must uphold the safety contract for `from_raw_parts`.
unsafe {
assert_unsafe_precondition!(
check_language_ub,
"slice::from_raw_parts requires the pointer to be aligned and non-null, and the total size of the slice not to exceed `isize::MAX`",
(
data: *mut () = data as *mut (),
size: usize = size_of::<T>(),
align: usize = align_of::<T>(),
len: usize = len,
) =>
is_aligned_and_not_null(data, align)
&& is_valid_allocation_size(size, len)
);
&*ptr::slice_from_raw_parts(data, len)
}
}
Version it worked on
It most recently worked on: Rust Stable 1.77.0
Version with regression
It isn't working on: Rust Beta 1.78.0-beta.3 (2024-03-27 4147533)
Meta
Found on irlo while looking at inlining behavior of ToString
.
@rustbot modify labels: +regression-from-stable-to-beta
(didn't do -regression-untriaged
since I didn't try to narrow it any further than playground stable/beta, but don't know the convention here)
cc @saethlin (IIRC you've been working on assert_unsafe_precondition!
s and other precondition_check
s)