From 5d8140df6fae50378db53c40ee89a23bc931ed5f Mon Sep 17 00:00:00 2001 From: Lukas Markeffsky <@> Date: Sun, 27 Oct 2024 17:47:00 +0100 Subject: [PATCH 1/3] add test for panicking drop in Box/Rc/Arc --- alloc/tests/arc.rs | 40 ++++++++++++++++++++++++++++++++++++++-- alloc/tests/boxed.rs | 38 ++++++++++++++++++++++++++++++++++++++ alloc/tests/rc.rs | 38 +++++++++++++++++++++++++++++++++++++- 3 files changed, 113 insertions(+), 3 deletions(-) diff --git a/alloc/tests/arc.rs b/alloc/tests/arc.rs index dc27c578b57ef..a259c0131ecdf 100644 --- a/alloc/tests/arc.rs +++ b/alloc/tests/arc.rs @@ -1,5 +1,5 @@ use std::any::Any; -use std::cell::RefCell; +use std::cell::{Cell, RefCell}; use std::iter::TrustedLen; use std::mem; use std::sync::{Arc, Weak}; @@ -89,7 +89,7 @@ fn eq() { // The test code below is identical to that in `rc.rs`. // For better maintainability we therefore define this type alias. -type Rc = Arc; +type Rc = Arc; const SHARED_ITER_MAX: u16 = 100; @@ -210,6 +210,42 @@ fn weak_may_dangle() { // borrow might be used here, when `val` is dropped and runs the `Drop` code for type `std::sync::Weak` } +/// Test that a panic from a destructor does not leak the allocation. +#[test] +#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] +fn panic_no_leak() { + use std::alloc::{AllocError, Allocator, Global, Layout}; + use std::panic::{AssertUnwindSafe, catch_unwind}; + use std::ptr::NonNull; + + struct AllocCount(Cell); + unsafe impl Allocator for AllocCount { + fn allocate(&self, layout: Layout) -> Result, AllocError> { + self.0.set(self.0.get() + 1); + Global.allocate(layout) + } + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + self.0.set(self.0.get() - 1); + unsafe { Global.deallocate(ptr, layout) } + } + } + + struct PanicOnDrop; + impl Drop for PanicOnDrop { + fn drop(&mut self) { + panic!("PanicOnDrop"); + } + } + + let alloc = AllocCount(Cell::new(0)); + let rc = Rc::new_in(PanicOnDrop, &alloc); + assert_eq!(alloc.0.get(), 1); + + let panic_message = catch_unwind(AssertUnwindSafe(|| drop(rc))).unwrap_err(); + assert_eq!(*panic_message.downcast_ref::<&'static str>().unwrap(), "PanicOnDrop"); + assert_eq!(alloc.0.get(), 0); +} + /// This is similar to the doc-test for `Arc::make_mut()`, but on an unsized type (slice). #[test] fn make_mut_unsized() { diff --git a/alloc/tests/boxed.rs b/alloc/tests/boxed.rs index bfc31a626fadd..2d172fc213799 100644 --- a/alloc/tests/boxed.rs +++ b/alloc/tests/boxed.rs @@ -59,6 +59,44 @@ fn box_deref_lval() { assert_eq!(x.get(), 1000); } +/// Test that a panic from a destructor does not leak the allocation. +#[test] +#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] +fn panic_no_leak() { + use std::alloc::{AllocError, Allocator, Global, Layout}; + use std::panic::{AssertUnwindSafe, catch_unwind}; + use std::ptr::NonNull; + + struct AllocCount(Cell); + unsafe impl Allocator for AllocCount { + fn allocate(&self, layout: Layout) -> Result, AllocError> { + self.0.set(self.0.get() + 1); + Global.allocate(layout) + } + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + self.0.set(self.0.get() - 1); + unsafe { Global.deallocate(ptr, layout) } + } + } + + struct PanicOnDrop { + _data: u8, + } + impl Drop for PanicOnDrop { + fn drop(&mut self) { + panic!("PanicOnDrop"); + } + } + + let alloc = AllocCount(Cell::new(0)); + let b = Box::new_in(PanicOnDrop { _data: 42 }, &alloc); + assert_eq!(alloc.0.get(), 1); + + let panic_message = catch_unwind(AssertUnwindSafe(|| drop(b))).unwrap_err(); + assert_eq!(*panic_message.downcast_ref::<&'static str>().unwrap(), "PanicOnDrop"); + assert_eq!(alloc.0.get(), 0); +} + #[allow(unused)] pub struct ConstAllocator; diff --git a/alloc/tests/rc.rs b/alloc/tests/rc.rs index 29dbdcf225eb5..451765d724283 100644 --- a/alloc/tests/rc.rs +++ b/alloc/tests/rc.rs @@ -1,5 +1,5 @@ use std::any::Any; -use std::cell::RefCell; +use std::cell::{Cell, RefCell}; use std::iter::TrustedLen; use std::mem; use std::rc::{Rc, Weak}; @@ -206,6 +206,42 @@ fn weak_may_dangle() { // borrow might be used here, when `val` is dropped and runs the `Drop` code for type `std::rc::Weak` } +/// Test that a panic from a destructor does not leak the allocation. +#[test] +#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] +fn panic_no_leak() { + use std::alloc::{AllocError, Allocator, Global, Layout}; + use std::panic::{AssertUnwindSafe, catch_unwind}; + use std::ptr::NonNull; + + struct AllocCount(Cell); + unsafe impl Allocator for AllocCount { + fn allocate(&self, layout: Layout) -> Result, AllocError> { + self.0.set(self.0.get() + 1); + Global.allocate(layout) + } + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + self.0.set(self.0.get() - 1); + unsafe { Global.deallocate(ptr, layout) } + } + } + + struct PanicOnDrop; + impl Drop for PanicOnDrop { + fn drop(&mut self) { + panic!("PanicOnDrop"); + } + } + + let alloc = AllocCount(Cell::new(0)); + let rc = Rc::new_in(PanicOnDrop, &alloc); + assert_eq!(alloc.0.get(), 1); + + let panic_message = catch_unwind(AssertUnwindSafe(|| drop(rc))).unwrap_err(); + assert_eq!(*panic_message.downcast_ref::<&'static str>().unwrap(), "PanicOnDrop"); + assert_eq!(alloc.0.get(), 0); +} + #[allow(unused)] mod pin_coerce_unsized { use alloc::rc::{Rc, UniqueRc}; From 28223bdba1d56a3dd406b5fd67f24ec5f1d502cc Mon Sep 17 00:00:00 2001 From: Lukas Markeffsky <@> Date: Sun, 27 Oct 2024 17:48:15 +0100 Subject: [PATCH 2/3] Rc/Arc: don't leak the allocation if drop panics --- alloc/src/rc.rs | 19 ++++++++----------- alloc/src/sync.rs | 16 +++++++++------- 2 files changed, 17 insertions(+), 18 deletions(-) diff --git a/alloc/src/rc.rs b/alloc/src/rc.rs index 9fdd51ce331fb..52ec8237d834a 100644 --- a/alloc/src/rc.rs +++ b/alloc/src/rc.rs @@ -2256,17 +2256,14 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Rc { unsafe { self.inner().dec_strong(); if self.inner().strong() == 0 { - // destroy the contained object - ptr::drop_in_place(Self::get_mut_unchecked(self)); - - // remove the implicit "strong weak" pointer now that we've - // destroyed the contents. - self.inner().dec_weak(); - - if self.inner().weak() == 0 { - self.alloc - .deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())); - } + // Reconstruct the "strong weak" pointer and drop it when this + // variable goes out of scope. This ensures that the memory is + // deallocated even if the destructor of `T` panics. + let _weak = Weak { ptr: self.ptr, alloc: &self.alloc }; + + // Destroy the contained object. + // We cannot use `get_mut_unchecked` here, because `self.alloc` is borrowed. + ptr::drop_in_place(&mut (*self.ptr.as_ptr()).value); } } } diff --git a/alloc/src/sync.rs b/alloc/src/sync.rs index 15a1b0f283449..98a2fe242570f 100644 --- a/alloc/src/sync.rs +++ b/alloc/src/sync.rs @@ -1872,15 +1872,17 @@ impl Arc { // Non-inlined part of `drop`. #[inline(never)] unsafe fn drop_slow(&mut self) { + // Drop the weak ref collectively held by all strong references when this + // variable goes out of scope. This ensures that the memory is deallocated + // even if the destructor of `T` panics. + // Take a reference to `self.alloc` instead of cloning because 1. it'll last long + // enough, and 2. you should be able to drop `Arc`s with unclonable allocators + let _weak = Weak { ptr: self.ptr, alloc: &self.alloc }; + // Destroy the data at this time, even though we must not free the box // allocation itself (there might still be weak pointers lying around). - unsafe { ptr::drop_in_place(Self::get_mut_unchecked(self)) }; - - // Drop the weak ref collectively held by all strong references - // Take a reference to `self.alloc` instead of cloning because 1. it'll - // last long enough, and 2. you should be able to drop `Arc`s with - // unclonable allocators - drop(Weak { ptr: self.ptr, alloc: &self.alloc }); + // We cannot use `get_mut_unchecked` here, because `self.alloc` is borrowed. + unsafe { ptr::drop_in_place(&mut (*self.ptr.as_ptr()).data) }; } /// Returns `true` if the two `Arc`s point to the same allocation in a vein similar to From 9318ae3e6adf4c181f6064ad244a3c57c1ff59b1 Mon Sep 17 00:00:00 2001 From: Lukas Markeffsky <@> Date: Mon, 28 Oct 2024 22:59:14 +0100 Subject: [PATCH 3/3] Rc destructor: tweak inlining --- alloc/src/rc.rs | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/alloc/src/rc.rs b/alloc/src/rc.rs index 52ec8237d834a..fc8646e96d948 100644 --- a/alloc/src/rc.rs +++ b/alloc/src/rc.rs @@ -376,6 +376,21 @@ impl Rc { unsafe fn from_ptr_in(ptr: *mut RcInner, alloc: A) -> Self { unsafe { Self::from_inner_in(NonNull::new_unchecked(ptr), alloc) } } + + // Non-inlined part of `drop`. + #[inline(never)] + unsafe fn drop_slow(&mut self) { + // Reconstruct the "strong weak" pointer and drop it when this + // variable goes out of scope. This ensures that the memory is + // deallocated even if the destructor of `T` panics. + let _weak = Weak { ptr: self.ptr, alloc: &self.alloc }; + + // Destroy the contained object. + // We cannot use `get_mut_unchecked` here, because `self.alloc` is borrowed. + unsafe { + ptr::drop_in_place(&mut (*self.ptr.as_ptr()).value); + } + } } impl Rc { @@ -2252,18 +2267,12 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Rc { /// drop(foo); // Doesn't print anything /// drop(foo2); // Prints "dropped!" /// ``` + #[inline] fn drop(&mut self) { unsafe { self.inner().dec_strong(); if self.inner().strong() == 0 { - // Reconstruct the "strong weak" pointer and drop it when this - // variable goes out of scope. This ensures that the memory is - // deallocated even if the destructor of `T` panics. - let _weak = Weak { ptr: self.ptr, alloc: &self.alloc }; - - // Destroy the contained object. - // We cannot use `get_mut_unchecked` here, because `self.alloc` is borrowed. - ptr::drop_in_place(&mut (*self.ptr.as_ptr()).value); + self.drop_slow(); } } }