Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Liberate rustc_ast_lowering from rustc #70204

Merged
merged 11 commits into from
Mar 23, 2020
Next Next commit
move DropArena -> libarena
  • Loading branch information
Centril committed Mar 21, 2020
commit 6aff92a7d2095d7de1121d48d70e14e4ec680c9b
81 changes: 81 additions & 0 deletions src/libarena/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -488,5 +488,86 @@ impl DroplessArena {
}
}

/// Calls the destructor for an object when dropped.
struct DropType {
drop_fn: unsafe fn(*mut u8),
obj: *mut u8,
}

unsafe fn drop_for_type<T>(to_drop: *mut u8) {
std::ptr::drop_in_place(to_drop as *mut T)
}

impl Drop for DropType {
fn drop(&mut self) {
unsafe { (self.drop_fn)(self.obj) }
}
}

/// An arena which can be used to allocate any type.
/// Allocating in this arena is unsafe since the type system
/// doesn't know which types it contains. In order to
/// allocate safely, you must store a PhantomData<T>
/// alongside this arena for each type T you allocate.
#[derive(Default)]
pub struct DropArena {
/// A list of destructors to run when the arena drops.
/// Ordered so `destructors` gets dropped before the arena
/// since its destructor can reference memory in the arena.
destructors: RefCell<Vec<DropType>>,
arena: DroplessArena,
}

impl DropArena {
#[inline]
pub unsafe fn alloc<T>(&self, object: T) -> &mut T {
let mem =
self.arena.alloc_raw(mem::size_of::<T>(), mem::align_of::<T>()) as *mut _ as *mut T;
// Write into uninitialized memory.
ptr::write(mem, object);
let result = &mut *mem;
// Record the destructor after doing the allocation as that may panic
// and would cause `object`'s destuctor to run twice if it was recorded before
self.destructors
.borrow_mut()
.push(DropType { drop_fn: drop_for_type::<T>, obj: result as *mut T as *mut u8 });
result
}

#[inline]
pub unsafe fn alloc_from_iter<T, I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
let mut vec: SmallVec<[_; 8]> = iter.into_iter().collect();
if vec.is_empty() {
return &mut [];
}
let len = vec.len();

let start_ptr = self
.arena
.alloc_raw(len.checked_mul(mem::size_of::<T>()).unwrap(), mem::align_of::<T>())
as *mut _ as *mut T;

let mut destructors = self.destructors.borrow_mut();
// Reserve space for the destructors so we can't panic while adding them
destructors.reserve(len);

// Move the content to the arena by copying it and then forgetting
// the content of the SmallVec
vec.as_ptr().copy_to_nonoverlapping(start_ptr, len);
mem::forget(vec.drain(..));

// Record the destructors after doing the allocation as that may panic
// and would cause `object`'s destuctor to run twice if it was recorded before
for i in 0..len {
destructors.push(DropType {
drop_fn: drop_for_type::<T>,
obj: start_ptr.offset(i as isize) as *mut u8,
});
}

slice::from_raw_parts_mut(start_ptr, len)
}
}

#[cfg(test)]
mod tests;
87 changes: 1 addition & 86 deletions src/librustc/arena.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,6 @@
use arena::{DroplessArena, TypedArena};
use smallvec::SmallVec;
use std::cell::RefCell;
use arena::{DropArena, DroplessArena, TypedArena};
use std::marker::PhantomData;
use std::mem;
use std::ptr;
use std::slice;

/// This declares a list of types which can be allocated by `Arena`.
///
Expand Down Expand Up @@ -275,84 +271,3 @@ impl<'tcx> Arena<'tcx> {
}
}
}

/// Calls the destructor for an object when dropped.
struct DropType {
drop_fn: unsafe fn(*mut u8),
obj: *mut u8,
}

unsafe fn drop_for_type<T>(to_drop: *mut u8) {
std::ptr::drop_in_place(to_drop as *mut T)
}

impl Drop for DropType {
fn drop(&mut self) {
unsafe { (self.drop_fn)(self.obj) }
}
}

/// An arena which can be used to allocate any type.
/// Allocating in this arena is unsafe since the type system
/// doesn't know which types it contains. In order to
/// allocate safely, you must store a PhantomData<T>
/// alongside this arena for each type T you allocate.
#[derive(Default)]
struct DropArena {
/// A list of destructors to run when the arena drops.
/// Ordered so `destructors` gets dropped before the arena
/// since its destructor can reference memory in the arena.
destructors: RefCell<Vec<DropType>>,
arena: DroplessArena,
}

impl DropArena {
#[inline]
unsafe fn alloc<T>(&self, object: T) -> &mut T {
let mem =
self.arena.alloc_raw(mem::size_of::<T>(), mem::align_of::<T>()) as *mut _ as *mut T;
// Write into uninitialized memory.
ptr::write(mem, object);
let result = &mut *mem;
// Record the destructor after doing the allocation as that may panic
// and would cause `object`'s destuctor to run twice if it was recorded before
self.destructors
.borrow_mut()
.push(DropType { drop_fn: drop_for_type::<T>, obj: result as *mut T as *mut u8 });
result
}

#[inline]
unsafe fn alloc_from_iter<T, I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
let mut vec: SmallVec<[_; 8]> = iter.into_iter().collect();
if vec.is_empty() {
return &mut [];
}
let len = vec.len();

let start_ptr = self
.arena
.alloc_raw(len.checked_mul(mem::size_of::<T>()).unwrap(), mem::align_of::<T>())
as *mut _ as *mut T;

let mut destructors = self.destructors.borrow_mut();
// Reserve space for the destructors so we can't panic while adding them
destructors.reserve(len);

// Move the content to the arena by copying it and then forgetting
// the content of the SmallVec
vec.as_ptr().copy_to_nonoverlapping(start_ptr, len);
mem::forget(vec.drain(..));

// Record the destructors after doing the allocation as that may panic
// and would cause `object`'s destuctor to run twice if it was recorded before
for i in 0..len {
destructors.push(DropType {
drop_fn: drop_for_type::<T>,
obj: start_ptr.offset(i as isize) as *mut u8,
});
}

slice::from_raw_parts_mut(start_ptr, len)
}
}