Skip to content

Global allocation bit #390

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 20 commits into from
Aug 19, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ perf_counter = ["pfm"]
# .github/scripts/ci-common.sh extracts features from the following part (including from comments).
# So be careful when editing or adding stuff to the section below.


# Do not modify the following line - ci-common.sh matches it
# -- Non mutually exclusive features --

Expand All @@ -57,6 +58,9 @@ vm_space = []
ro_space = []
code_space = []

# metadata
global_alloc_bit = []

# Run sanity GC
sanity = []
# Run analysis
Expand Down
21 changes: 18 additions & 3 deletions src/plan/immix/global.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ use crate::util::heap::layout::heap_layout::Mmapper;
use crate::util::heap::layout::heap_layout::VMMap;
use crate::util::heap::layout::vm_layout_constants::{HEAP_END, HEAP_START};
use crate::util::heap::HeapMeta;
use crate::util::metadata::side_metadata::SideMetadataContext;
use crate::util::metadata::side_metadata::SideMetadataSanity;
use crate::util::options::UnsafeOptionsWrapper;
#[cfg(feature = "sanity")]
Expand Down Expand Up @@ -170,10 +171,24 @@ impl<VM: VMBinding> Immix<VM> {
scheduler: Arc<GCWorkScheduler<VM>>,
) -> Self {
let mut heap = HeapMeta::new(HEAP_START, HEAP_END);

let global_metadata_specs = SideMetadataContext::new_global_specs(&[]);
let immix = Immix {
immix_space: ImmixSpace::new("immix", vm_map, mmapper, &mut heap, scheduler, vec![]),
common: CommonPlan::new(vm_map, mmapper, options, heap, &IMMIX_CONSTRAINTS, vec![]),
immix_space: ImmixSpace::new(
"immix",
vm_map,
mmapper,
&mut heap,
scheduler,
global_metadata_specs.clone(),
),
common: CommonPlan::new(
vm_map,
mmapper,
options,
heap,
&IMMIX_CONSTRAINTS,
global_metadata_specs,
),
};

{
Expand Down
12 changes: 12 additions & 0 deletions src/plan/marksweep/global.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@ use crate::policy::space::Space;
use crate::scheduler::gc_work::*;
use crate::scheduler::*;
use crate::util::alloc::allocators::AllocatorSelector;
#[cfg(not(feature = "global_alloc_bit"))]
use crate::util::alloc_bit::ALLOC_SIDE_METADATA_SPEC;
#[cfg(feature = "analysis")]
use crate::util::analysis::GcHookWork;
use crate::util::heap::layout::heap_layout::Mmapper;
Expand Down Expand Up @@ -136,8 +138,18 @@ impl<VM: VMBinding> MarkSweep<VM> {
options: Arc<UnsafeOptionsWrapper>,
) -> Self {
let heap = HeapMeta::new(HEAP_START, HEAP_END);
// if global_alloc_bit is enabled, ALLOC_SIDE_METADATA_SPEC will be added to
// SideMetadataContext by default, so we don't need to add it here.
#[cfg(feature = "global_alloc_bit")]
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Some comments are needed here because this looks counter-intuitive. Normally people will expect that, with global_alloc_bit, we need the alloc bit spec. But we are not adding the alloc bit spec for global_alloc_bit, as in that case, the spec is already added to the global specs in new_global_specs().

let global_metadata_specs =
SideMetadataContext::new_global_specs(&[ACTIVE_CHUNK_METADATA_SPEC]);
// if global_alloc_bit is NOT enabled,
// we need to add ALLOC_SIDE_METADATA_SPEC to SideMetadataContext here.
#[cfg(not(feature = "global_alloc_bit"))]
let global_metadata_specs = SideMetadataContext::new_global_specs(&[
ALLOC_SIDE_METADATA_SPEC,
ACTIVE_CHUNK_METADATA_SPEC,
]);

let res = MarkSweep {
ms: MallocSpace::new(global_metadata_specs.clone()),
Expand Down
28 changes: 27 additions & 1 deletion src/policy/copyspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ use crate::policy::space::SpaceOptions;
use crate::policy::space::{CommonSpace, Space, SFT};
use crate::util::constants::CARD_META_PAGES_PER_REGION;
use crate::util::heap::layout::heap_layout::{Mmapper, VMMap};
#[cfg(feature = "global_alloc_bit")]
use crate::util::heap::layout::vm_layout_constants::BYTES_IN_CHUNK;
use crate::util::heap::HeapMeta;
use crate::util::heap::VMRequest;
use crate::util::heap::{MonotonePageResource, PageResource};
Expand Down Expand Up @@ -38,7 +40,10 @@ impl<VM: VMBinding> SFT for CopySpace<VM> {
fn is_sane(&self) -> bool {
!self.from_space()
}
fn initialize_object_metadata(&self, _object: ObjectReference, _alloc: bool) {}
fn initialize_object_metadata(&self, _object: ObjectReference, _alloc: bool) {
#[cfg(feature = "global_alloc_bit")]
crate::util::alloc_bit::set_alloc_bit(_object);
}
#[inline(always)]
fn get_forwarded_object(&self, object: ObjectReference) -> Option<ObjectReference> {
if !self.from_space() {
Expand Down Expand Up @@ -142,12 +147,27 @@ impl<VM: VMBinding> CopySpace<VM> {

pub fn release(&self) {
unsafe {
#[cfg(feature = "global_alloc_bit")]
self.reset_alloc_bit();
self.pr.reset();
}
self.common.metadata.reset();
self.from_space.store(false, Ordering::SeqCst);
}

#[cfg(feature = "global_alloc_bit")]
unsafe fn reset_alloc_bit(&self) {
let current_chunk = self.pr.get_current_chunk();
if self.common.contiguous {
crate::util::alloc_bit::bzero_alloc_bit(
self.common.start,
current_chunk + BYTES_IN_CHUNK - self.common.start,
);
} else {
unimplemented!();
}
}

fn from_space(&self) -> bool {
self.from_space.load(Ordering::SeqCst)
}
Expand All @@ -164,6 +184,12 @@ impl<VM: VMBinding> CopySpace<VM> {
if !self.from_space() {
return object;
}
#[cfg(feature = "global_alloc_bit")]
debug_assert!(
crate::util::alloc_bit::is_alloced(object),
"{:x}: alloc bit not set",
object
);
trace!("attempting to forward");
let forwarding_status = object_forwarding::attempt_to_forward::<VM>(object);
trace!("checking if object is being forwarded");
Expand Down
2 changes: 2 additions & 0 deletions src/policy/immix/block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -225,6 +225,8 @@ impl Block {
/// Deinitalize a block before releasing.
#[inline]
pub fn deinit(&self) {
#[cfg(feature = "global_alloc_bit")]
crate::util::alloc_bit::bzero_alloc_bit(self.start(), Self::BYTES);
self.set_state(BlockState::Unallocated);
}

Expand Down
13 changes: 12 additions & 1 deletion src/policy/immix/immixspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,10 @@ impl<VM: VMBinding> SFT for ImmixSpace<VM> {
fn is_sane(&self) -> bool {
true
}
fn initialize_object_metadata(&self, _object: ObjectReference, _alloc: bool) {}
fn initialize_object_metadata(&self, _object: ObjectReference, _alloc: bool) {
#[cfg(feature = "global_alloc_bit")]
crate::util::alloc_bit::set_alloc_bit(_object);
}
}

impl<VM: VMBinding> Space<VM> for ImmixSpace<VM> {
Expand Down Expand Up @@ -305,6 +308,12 @@ impl<VM: VMBinding> ImmixSpace<VM> {
semantics: AllocationSemantics,
copy_context: &mut impl CopyContext,
) -> ObjectReference {
#[cfg(feature = "global_alloc_bit")]
debug_assert!(
crate::util::alloc_bit::is_alloced(object),
"{:x}: alloc bit not set",
object
);
if Block::containing::<VM>(object).is_defrag_source() {
self.trace_object_with_opportunistic_copy(trace, object, semantics, copy_context)
} else {
Expand Down Expand Up @@ -358,6 +367,8 @@ impl<VM: VMBinding> ImmixSpace<VM> {
Block::containing::<VM>(object).set_state(BlockState::Marked);
object
} else {
#[cfg(feature = "global_alloc_bit")]
crate::util::alloc_bit::unset_alloc_bit(object);
ForwardingWord::forward_object::<VM, _>(object, semantics, copy_context)
};
if !super::MARK_LINE_AT_SCAN_TIME {
Expand Down
8 changes: 8 additions & 0 deletions src/policy/immortalspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,8 @@ impl<VM: VMBinding> SFT for ImmortalSpace<VM> {
if self.common.needs_log_bit {
VM::VMObjectModel::GLOBAL_LOG_BIT_SPEC.mark_as_unlogged::<VM>(object, Ordering::SeqCst);
}
#[cfg(feature = "global_alloc_bit")]
crate::util::alloc_bit::set_alloc_bit(object);
}
}

Expand Down Expand Up @@ -183,6 +185,12 @@ impl<VM: VMBinding> ImmortalSpace<VM> {
trace: &mut T,
object: ObjectReference,
) -> ObjectReference {
#[cfg(feature = "global_alloc_bit")]
debug_assert!(
crate::util::alloc_bit::is_alloced(object),
"{:x}: alloc bit not set",
object
);
if ImmortalSpace::<VM>::test_and_mark(object, self.mark_state) {
trace.process_node(object);
}
Expand Down
12 changes: 12 additions & 0 deletions src/policy/largeobjectspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,8 @@ impl<VM: VMBinding> SFT for LargeObjectSpace<VM> {
VM::VMObjectModel::GLOBAL_LOG_BIT_SPEC.mark_as_unlogged::<VM>(object, Ordering::SeqCst);
}

#[cfg(feature = "global_alloc_bit")]
crate::util::alloc_bit::set_alloc_bit(object);
let cell = VM::VMObjectModel::object_start_ref(object);
self.treadmill.add_to_treadmill(cell, alloc);
}
Expand Down Expand Up @@ -172,6 +174,12 @@ impl<VM: VMBinding> LargeObjectSpace<VM> {
trace: &mut T,
object: ObjectReference,
) -> ObjectReference {
#[cfg(feature = "global_alloc_bit")]
debug_assert!(
crate::util::alloc_bit::is_alloced(object),
"{:x}: alloc bit not set",
object
);
let nursery_object = self.is_in_nursery(object);
if !self.in_nursery_gc || nursery_object {
// Note that test_and_mark() has side effects
Expand All @@ -192,11 +200,15 @@ impl<VM: VMBinding> LargeObjectSpace<VM> {
if sweep_nursery {
for cell in self.treadmill.collect_nursery() {
// println!("- cn {}", cell);
#[cfg(feature = "global_alloc_bit")]
crate::util::alloc_bit::unset_addr_alloc_bit(cell);
self.pr.release_pages(get_super_page(cell));
}
} else {
for cell in self.treadmill.collect() {
// println!("- ts {}", cell);
#[cfg(feature = "global_alloc_bit")]
crate::util::alloc_bit::unset_addr_alloc_bit(cell);
self.pr.release_pages(get_super_page(cell));
}
}
Expand Down
3 changes: 2 additions & 1 deletion src/policy/lockfreeimmortalspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,8 @@ impl<VM: VMBinding> SFT for LockFreeImmortalSpace<VM> {
unimplemented!()
}
fn initialize_object_metadata(&self, _object: ObjectReference, _alloc: bool) {
unimplemented!()
#[cfg(feature = "global_alloc_bit")]
crate::util::alloc_bit::set_alloc_bit(_object);
}
}

Expand Down
10 changes: 6 additions & 4 deletions src/policy/mallocspace/global.rs
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,6 @@ impl<VM: VMBinding> MallocSpace<VM> {
metadata: SideMetadataContext {
global: global_side_metadata_specs,
local: metadata::extract_side_metadata(&[
MetadataSpec::OnSide(ALLOC_SIDE_METADATA_SPEC),
MetadataSpec::OnSide(ACTIVE_PAGE_METADATA_SPEC),
*VM::VMObjectModel::LOCAL_MARK_BIT_SPEC,
]),
Expand Down Expand Up @@ -334,13 +333,15 @@ impl<VM: VMBinding> MallocSpace<VM> {
let mut last_on_page_boundary = false;

debug_assert!(
ALLOC_SIDE_METADATA_SPEC.log_min_obj_size == mark_bit_spec.log_min_obj_size,
crate::util::alloc_bit::ALLOC_SIDE_METADATA_SPEC.log_min_obj_size
== mark_bit_spec.log_min_obj_size,
"Alloc-bit and mark-bit metadata have different minimum object sizes!"
);

// For bulk xor'ing 128-bit vectors on architectures with vector instructions
// Each bit represents an object of LOG_MIN_OBJ_SIZE size
let bulk_load_size: usize = 128 * (1 << ALLOC_SIDE_METADATA_SPEC.log_min_obj_size);
let bulk_load_size: usize =
128 * (1 << crate::util::alloc_bit::ALLOC_SIDE_METADATA_SPEC.log_min_obj_size);

while address < chunk_end {
// We extensively tested the performance of the following if-statement and were
Expand All @@ -363,7 +364,8 @@ impl<VM: VMBinding> MallocSpace<VM> {
last_on_page_boundary = false;
}

let alloc_128: u128 = unsafe { load128(&ALLOC_SIDE_METADATA_SPEC, address) };
let alloc_128: u128 =
unsafe { load128(&crate::util::alloc_bit::ALLOC_SIDE_METADATA_SPEC, address) };
let mark_128: u128 = unsafe { load128(&mark_bit_spec, address) };

// Check if there are dead objects in the bulk loaded region
Expand Down
39 changes: 13 additions & 26 deletions src/policy/mallocspace/metadata.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
use crate::util::alloc_bit;
use crate::util::heap::layout::vm_layout_constants::{BYTES_IN_CHUNK, LOG_BYTES_IN_CHUNK};
use crate::util::metadata::load_metadata;
use crate::util::metadata::side_metadata;
use crate::util::metadata::side_metadata::SideMetadataContext;
use crate::util::metadata::side_metadata::SideMetadataOffset;
use crate::util::metadata::side_metadata::SideMetadataSpec;
use crate::util::metadata::side_metadata::GLOBAL_SIDE_METADATA_BASE_OFFSET;
use crate::util::metadata::side_metadata::LOCAL_SIDE_METADATA_BASE_OFFSET;
use crate::util::metadata::store_metadata;
use crate::util::Address;
Expand Down Expand Up @@ -38,25 +38,11 @@ lazy_static! {
/// overwriting the previous mapping.
pub(crate) const ACTIVE_CHUNK_METADATA_SPEC: SideMetadataSpec = SideMetadataSpec {
is_global: true,
offset: GLOBAL_SIDE_METADATA_BASE_OFFSET,
offset: SideMetadataOffset::layout_after(&crate::util::alloc_bit::ALLOC_SIDE_METADATA_SPEC),
log_num_of_bits: 3,
log_min_obj_size: LOG_BYTES_IN_CHUNK as usize,
};

/// This is the metadata spec for the alloc-bit.
///
/// An alloc-bit is required per min-object-size aligned address, rather than per object, and can only exist as side metadata.
///
/// The other metadata used by MallocSpace is mark-bit, which is per-object and can be kept in object header if the VM allows it.
/// Thus, mark-bit is vm-dependant and is part of each VM's ObjectModel.
///
pub(crate) const ALLOC_SIDE_METADATA_SPEC: SideMetadataSpec = SideMetadataSpec {
is_global: false,
offset: LOCAL_SIDE_METADATA_BASE_OFFSET,
log_num_of_bits: 0,
log_min_obj_size: constants::LOG_MIN_OBJECT_SIZE as usize,
};

/// Metadata spec for the active page byte
///
/// The active page metadata is used to accurately track the total number of pages that have
Expand All @@ -69,7 +55,7 @@ pub(crate) const ALLOC_SIDE_METADATA_SPEC: SideMetadataSpec = SideMetadataSpec {
// how many pages are active in this metadata spec. Explore SIMD vectorization with 8-bit integers
pub(crate) const ACTIVE_PAGE_METADATA_SPEC: SideMetadataSpec = SideMetadataSpec {
is_global: false,
offset: SideMetadataOffset::layout_after(&ALLOC_SIDE_METADATA_SPEC),
offset: LOCAL_SIDE_METADATA_BASE_OFFSET,
log_num_of_bits: 3,
log_min_obj_size: constants::LOG_BYTES_IN_PAGE as usize,
};
Expand Down Expand Up @@ -153,11 +139,11 @@ pub fn is_alloced(object: ObjectReference) -> bool {
}

pub fn is_alloced_object(address: Address) -> bool {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I feel most of these functions can be removed. You can directly call to alloc_bit::...

side_metadata::load_atomic(&ALLOC_SIDE_METADATA_SPEC, address, Ordering::SeqCst) == 1
alloc_bit::is_alloced_object(address)
}

pub unsafe fn is_alloced_object_unsafe(address: Address) -> bool {
side_metadata::load(&ALLOC_SIDE_METADATA_SPEC, address) == 1
alloc_bit::is_alloced_object_unsafe(address)
}

pub fn is_marked<VM: VMBinding>(object: ObjectReference, ordering: Option<Ordering>) -> bool {
Expand Down Expand Up @@ -192,12 +178,7 @@ pub unsafe fn is_chunk_marked_unsafe(chunk_start: Address) -> bool {
}

pub fn set_alloc_bit(object: ObjectReference) {
side_metadata::store_atomic(
&ALLOC_SIDE_METADATA_SPEC,
object.to_address(),
1,
Ordering::SeqCst,
);
alloc_bit::set_alloc_bit(object);
}

pub fn set_mark_bit<VM: VMBinding>(object: ObjectReference, ordering: Option<Ordering>) {
Expand All @@ -210,6 +191,11 @@ pub fn set_mark_bit<VM: VMBinding>(object: ObjectReference, ordering: Option<Ord
);
}

#[allow(unused)]
pub fn unset_alloc_bit(object: ObjectReference) {
alloc_bit::unset_alloc_bit(object);
}

pub(super) fn set_page_mark(page_addr: Address) {
side_metadata::store_atomic(&ACTIVE_PAGE_METADATA_SPEC, page_addr, 1, Ordering::SeqCst);
}
Expand All @@ -224,9 +210,10 @@ pub(super) fn set_chunk_mark(chunk_start: Address) {
}

pub unsafe fn unset_alloc_bit_unsafe(object: ObjectReference) {
side_metadata::store(&ALLOC_SIDE_METADATA_SPEC, object.to_address(), 0);
alloc_bit::unset_alloc_bit_unsafe(object);
}

#[allow(unused)]
pub fn unset_mark_bit<VM: VMBinding>(object: ObjectReference, ordering: Option<Ordering>) {
store_metadata::<VM>(
&VM::VMObjectModel::LOCAL_MARK_BIT_SPEC,
Expand Down
Loading