Skip to content

Make UB transmutes really UB in LLVM #143718

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Jul 12, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 4 additions & 10 deletions compiler/rustc_codegen_ssa/src/mir/rvalue.rs
Original file line number Diff line number Diff line change
Expand Up @@ -207,9 +207,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
{
// These cases are all UB to actually hit, so don't emit code for them.
// (The size mismatches are reachable via `transmute_unchecked`.)
// We can't use unreachable because that's a terminator, and we
// need something that can be in the middle of a basic block.
bx.assume(bx.cx().const_bool(false))
bx.unreachable_nonterminator();
} else {
// Since in this path we have a place anyway, we can store or copy to it,
// making sure we use the destination place's alignment even if the
Expand All @@ -236,14 +234,10 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|| operand.layout.is_uninhabited()
|| cast.is_uninhabited()
{
if !operand.layout.is_uninhabited() {
// Since this is known statically and the input could have existed
// without already having hit UB, might as well trap for it.
bx.abort();
}
bx.unreachable_nonterminator();

// Because this transmute is UB, return something easy to generate,
// since it's fine that later uses of the value are probably UB.
// We still need to return a value of the appropriate type, but
// it's already UB so do the easiest thing available.
return OperandValue::poison(bx, cast);
}

Expand Down
10 changes: 10 additions & 0 deletions compiler/rustc_codegen_ssa/src/traits/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,16 @@ pub trait BuilderMethods<'a, 'tcx>:
) -> Self::Value;
fn unreachable(&mut self);

/// Like [`Self::unreachable`], but for use in the middle of a basic block.
fn unreachable_nonterminator(&mut self) {
// This is the preferred LLVM incantation for this per
// https://llvm.org/docs/Frontend/PerformanceTips.html#other-things-to-consider
// Other backends may override if they have a better way.
let const_true = self.cx().const_bool(true);
let poison_ptr = self.const_poison(self.cx().type_ptr());
self.store(const_true, poison_ptr, Align::ONE);
}

fn add(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn fadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn fadd_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
Expand Down
37 changes: 20 additions & 17 deletions tests/codegen/intrinsics/transmute.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,38 +29,38 @@ pub struct Aggregate8(u8);
// CHECK-LABEL: @check_bigger_size(
#[no_mangle]
pub unsafe fn check_bigger_size(x: u16) -> u32 {
// CHECK: call void @llvm.assume(i1 false)
// CHECK: store i1 true, ptr poison, align 1
transmute_unchecked(x)
}

// CHECK-LABEL: @check_smaller_size(
#[no_mangle]
pub unsafe fn check_smaller_size(x: u32) -> u16 {
// CHECK: call void @llvm.assume(i1 false)
// CHECK: store i1 true, ptr poison, align 1
transmute_unchecked(x)
}

// CHECK-LABEL: @check_smaller_array(
#[no_mangle]
pub unsafe fn check_smaller_array(x: [u32; 7]) -> [u32; 3] {
// CHECK: call void @llvm.assume(i1 false)
// CHECK: store i1 true, ptr poison, align 1
transmute_unchecked(x)
}

// CHECK-LABEL: @check_bigger_array(
#[no_mangle]
pub unsafe fn check_bigger_array(x: [u32; 3]) -> [u32; 7] {
// CHECK: call void @llvm.assume(i1 false)
// CHECK: store i1 true, ptr poison, align 1
transmute_unchecked(x)
}

// CHECK-LABEL: @check_to_empty_array(
#[no_mangle]
#[custom_mir(dialect = "runtime", phase = "optimized")]
pub unsafe fn check_to_empty_array(x: [u32; 5]) -> [u32; 0] {
// CHECK-NOT: trap
// CHECK: call void @llvm.trap
// CHECK-NOT: trap
// CHECK: start
// CHECK-NEXT: store i1 true, ptr poison, align 1
// CHECK-NEXT: ret void
mir! {
{
RET = CastTransmute(x);
Expand All @@ -73,9 +73,9 @@ pub unsafe fn check_to_empty_array(x: [u32; 5]) -> [u32; 0] {
#[no_mangle]
#[custom_mir(dialect = "runtime", phase = "optimized")]
pub unsafe fn check_from_empty_array(x: [u32; 0]) -> [u32; 5] {
// CHECK-NOT: call
// CHECK: call void @llvm.assume(i1 false)
// CHECK-NOT: call
// CHECK: start
// CHECK-NEXT: store i1 true, ptr poison, align 1
// CHECK-NEXT: ret void
mir! {
{
RET = CastTransmute(x);
Expand All @@ -88,9 +88,9 @@ pub unsafe fn check_from_empty_array(x: [u32; 0]) -> [u32; 5] {
#[no_mangle]
#[custom_mir(dialect = "runtime", phase = "optimized")]
pub unsafe fn check_to_uninhabited(x: u16) {
// CHECK-NOT: trap
// CHECK: call void @llvm.trap
// CHECK-NOT: trap
// CHECK: start
// CHECK-NEXT: store i1 true, ptr poison, align 1
// CHECK-NEXT: ret void
mir! {
let temp: BigNever;
{
Expand All @@ -104,7 +104,9 @@ pub unsafe fn check_to_uninhabited(x: u16) {
#[no_mangle]
#[custom_mir(dialect = "runtime", phase = "optimized")]
pub unsafe fn check_from_uninhabited(x: BigNever) -> u16 {
// CHECK: ret i16 poison
// CHECK: start
// CHECK-NEXT: store i1 true, ptr poison, align 1
// CHECK-NEXT: ret i16 poison
mir! {
{
RET = CastTransmute(x);
Expand Down Expand Up @@ -401,9 +403,9 @@ pub unsafe fn check_issue_109992(x: ()) -> [(); 1] {
pub unsafe fn check_unit_to_never(x: ()) {
// This uses custom MIR to avoid MIR optimizations having removed ZST ops.

// CHECK-NOT: trap
// CHECK: call void @llvm.trap
// CHECK-NOT: trap
// CHECK: start
// CHECK-NEXT: store i1 true, ptr poison, align 1
// CHECK-NEXT: ret void
mir! {
let temp: ZstNever;
{
Expand All @@ -420,6 +422,7 @@ pub unsafe fn check_unit_from_never(x: ZstNever) -> () {
// This uses custom MIR to avoid MIR optimizations having removed ZST ops.

// CHECK: start
// CHECK-NEXT: store i1 true, ptr poison, align 1
// CHECK-NEXT: ret void
mir! {
{
Expand Down
Loading