Skip to content

Fix VO bits for Immix #849

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 28 commits into from
Jun 24, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
465f445
WIP: Fix VO bits for Immix
wks Jun 7, 2023
409b336
WIP: Implement CopyFromMarkBits strategy
wks Jun 8, 2023
a1a6ed8
Merge branch 'master' into immix-clear-vo-bits
wks Jun 8, 2023
831b8d7
WIP: Minor fixes
wks Jun 8, 2023
7dec9a2
Extract VO-bit-related code into a module
wks Jun 8, 2023
a1a0a5e
Minor changes
wks Jun 14, 2023
75b32cc
Rename and minor fixes
wks Jun 14, 2023
bade4dd
Clippy and fmt
wks Jun 14, 2023
0983db5
cargo doc
wks Jun 14, 2023
baedfe0
Let sanity GC check VO bit if enabled
wks Jun 14, 2023
722ee07
Do not use std::array::from_fn before bumping MSRV
wks Jun 15, 2023
723f862
Do not clutter main code.
wks Jun 15, 2023
c91bbaa
Make the update strategy not Immix-specific
wks Jun 15, 2023
9ee0e0a
Formatting and comment
wks Jun 15, 2023
3348422
Comments and minor fixes
wks Jun 15, 2023
20b8de8
Update src/vm/object_model.rs
wks Jun 16, 2023
b690e65
Suppress clippy warning for 1.61.0
wks Jun 16, 2023
7bbaf5d
Merge branch 'master' into immix-clear-vo-bits
wks Jun 16, 2023
e4b4ab6
vo_bit implies eager_sweeping
wks Jun 16, 2023
a4385df
Support StickyImmix
wks Jun 16, 2023
3369085
Update comment to workaround CI script
wks Jun 19, 2023
976f420
Merge branch 'master' into immix-clear-vo-bits
wks Jun 21, 2023
de5ab0e
Add unit test for copying side metadata.
wks Jun 22, 2023
dbd3e00
Metadata sanity support for bcopy metadata.
wks Jun 22, 2023
dff91eb
Minor fixes
wks Jun 23, 2023
cfa7ed6
Update helper.rs
qinsoon Jun 23, 2023
dd5c3b5
Use ClearAndReconstruct for in-header mark bits
wks Jun 23, 2023
bd2903d
Merge branch 'master' into immix-clear-vo-bits
wks Jun 23, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 6 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -83,9 +83,12 @@ ro_space = []
code_space = []

# Global valid object (VO) bit metadata.
# The VO bit is set when an object is allocated, and cleared when it is reclaimed.
# See `src/util/metadata/vo_bit.rs`
vo_bit = []
# The VO bit is set when an object is allocated, and cleared when the GC determines it is dead.
# See `src/util/metadata/vo_bit/mod.rs`
#
# eager_sweeping: VO bits for dead objects must have been cleared by the end of each GC.
# Native MarkSweep only ensures this in eager sweeping mode.
vo_bit = ["eager_sweeping"]

# conservative garbage collection support
is_mmtk_object = ["vo_bit"]
Expand Down
2 changes: 2 additions & 0 deletions src/plan/generational/immix/global.rs
Original file line number Diff line number Diff line change
Expand Up @@ -247,6 +247,8 @@ impl<VM: VMBinding> GenImmix<VM> {
// We don't need to unlog objects at tracing. Instead, we unlog objects at copying.
// Any object is moved into the mature space, or is copied inside the mature space. We will unlog it.
unlog_object_when_traced: false,
// In GenImmix, young objects are not allocated in ImmixSpace directly.
mixed_age: false,
},
);

Expand Down
1 change: 1 addition & 0 deletions src/plan/immix/global.rs
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,7 @@ impl<VM: VMBinding> Immix<VM> {
ImmixSpaceArgs {
reset_log_bit_in_major_gc: false,
unlog_object_when_traced: false,
mixed_age: false,
},
)
}
Expand Down
2 changes: 2 additions & 0 deletions src/plan/sticky/immix/global.rs
Original file line number Diff line number Diff line change
Expand Up @@ -311,6 +311,8 @@ impl<VM: VMBinding> StickyImmix<VM> {
// In full heap GC, mature objects may die, and their unlogged bit needs to be reset.
// Along with the option above, we unlog them again during tracing.
reset_log_bit_in_major_gc: true,
// In StickyImmix, both young and old objects are allocated in the ImmixSpace.
mixed_age: true,
},
);
let full_heap_gc_count = immix.base().stats.new_event_counter("majorGC", true, true);
Expand Down
4 changes: 4 additions & 0 deletions src/policy/copyspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -242,6 +242,10 @@ impl<VM: VMBinding> CopySpace<VM> {
semantics.unwrap(),
worker.get_copy_context_mut(),
);

#[cfg(feature = "vo_bit")]
crate::util::metadata::vo_bit::set_vo_bit::<VM>(new_object);

trace!("Forwarding pointer");
queue.enqueue(new_object);
trace!("Copied [{:?} -> {:?}]", object, new_object);
Expand Down
51 changes: 46 additions & 5 deletions src/policy/immix/block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ use crate::util::heap::blockpageresource::BlockPool;
use crate::util::heap::chunk_map::Chunk;
use crate::util::linear_scan::{Region, RegionIterator};
use crate::util::metadata::side_metadata::{MetadataByteArrayRef, SideMetadataSpec};
#[cfg(feature = "vo_bit")]
use crate::util::metadata::vo_bit;
use crate::util::Address;
use crate::vm::*;
use std::sync::atomic::Ordering;
Expand Down Expand Up @@ -166,8 +168,6 @@ impl Block {

/// Deinitalize a block before releasing.
pub fn deinit(&self) {
#[cfg(feature = "vo_bit")]
crate::util::metadata::vo_bit::bzero_vo_bit(self.start(), Self::BYTES);
self.set_state(BlockState::Unallocated);
}

Expand Down Expand Up @@ -198,11 +198,17 @@ impl Block {
match self.get_state() {
BlockState::Unallocated => false,
BlockState::Unmarked => {
#[cfg(feature = "vo_bit")]
vo_bit::helper::on_region_swept::<VM, _>(self, false);

// Release the block if it is allocated but not marked by the current GC.
space.release_block(*self);
true
}
BlockState::Marked => {
#[cfg(feature = "vo_bit")]
vo_bit::helper::on_region_swept::<VM, _>(self, true);

// The block is live.
false
}
Expand All @@ -224,9 +230,6 @@ impl Block {
holes += 1;
}

#[cfg(feature = "vo_bit")]
crate::util::metadata::vo_bit::bzero_vo_bit(line.start(), Line::BYTES);

#[cfg(feature = "immix_zero_on_release")]
crate::util::memory::zero(line.start(), Line::BYTES);

Expand All @@ -235,6 +238,9 @@ impl Block {
}

if marked_lines == 0 {
#[cfg(feature = "vo_bit")]
vo_bit::helper::on_region_swept::<VM, _>(self, false);

// Release the block if non of its lines are marked.
space.release_block(*self);
true
Expand All @@ -254,10 +260,45 @@ impl Block {
mark_histogram[holes] += marked_lines;
// Record number of holes in block side metadata.
self.set_holes(holes);

#[cfg(feature = "vo_bit")]
vo_bit::helper::on_region_swept::<VM, _>(self, true);

false
}
}
}

/// Clear VO bits metadata for unmarked regions.
/// This is useful for clearing VO bits during nursery GC for StickyImmix
/// at which time young objects (allocated in unmarked regions) may die
/// but we always consider old objects (in marked regions) as live.
#[cfg(feature = "vo_bit")]
pub fn clear_vo_bits_for_unmarked_regions(&self, line_mark_state: Option<u8>) {
match line_mark_state {
None => {
match self.get_state() {
BlockState::Unmarked => {
// It may contain young objects. Clear it.
vo_bit::bzero_vo_bit(self.start(), Self::BYTES);
}
BlockState::Marked => {
// It contains old objects. Skip it.
}
_ => unreachable!(),
}
}
Some(state) => {
// With lines.
for line in self.lines() {
if !line.is_marked(state) {
// It may contain young objects. Clear it.
vo_bit::bzero_vo_bit(line.start(), Line::BYTES);
}
}
}
}
}
}

/// A non-block single-linked list to store blocks.
Expand Down
129 changes: 118 additions & 11 deletions src/policy/immix/immixspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ use crate::util::heap::BlockPageResource;
use crate::util::heap::PageResource;
use crate::util::linear_scan::{Region, RegionIterator};
use crate::util::metadata::side_metadata::SideMetadataSpec;
#[cfg(feature = "vo_bit")]
use crate::util::metadata::vo_bit;
use crate::util::metadata::{self, MetadataSpec};
use crate::util::object_forwarding as ForwardingWord;
use crate::util::{Address, ObjectReference};
Expand Down Expand Up @@ -67,6 +69,12 @@ pub struct ImmixSpaceArgs {
/// bit to differentiate them. So we reset all the log bits in major GCs,
/// and unlogged the objects when they are traced (alive).
pub reset_log_bit_in_major_gc: bool,
/// Whether this ImmixSpace instance contains both young and old objects.
/// This affects the updating of valid-object bits. If some lines or blocks of this ImmixSpace
/// instance contain young objects, their VO bits need to be updated during this GC. Currently
/// only StickyImmix is affected. GenImmix allocates young objects in a separete CopySpace
/// nursery and its VO bits can be cleared in bulk.
pub mixed_age: bool,
}

unsafe impl<VM: VMBinding> Sync for ImmixSpace<VM> {}
Expand Down Expand Up @@ -278,6 +286,8 @@ impl<VM: VMBinding> ImmixSpace<VM> {
}

super::validate_features();
#[cfg(feature = "vo_bit")]
vo_bit::helper::validate_config::<VM>();
let vm_map = args.vm_map;
let scheduler = args.scheduler.clone();
let common =
Expand Down Expand Up @@ -394,6 +404,42 @@ impl<VM: VMBinding> ImmixSpace<VM> {
}
}
}

#[cfg(feature = "vo_bit")]
if vo_bit::helper::need_to_clear_vo_bits_before_tracing::<VM>() {
let maybe_scope = if major_gc {
// If it is major GC, we always clear all VO bits because we are doing full-heap
// tracing.
Some(VOBitsClearingScope::FullGC)
} else if self.space_args.mixed_age {
// StickyImmix nursery GC.
// Some lines (or blocks) contain only young objects,
// while other lines (or blocks) contain only old objects.
if super::BLOCK_ONLY {
// Block only. Young objects are only allocated into fully empty blocks.
// Only clear unmarked blocks.
Some(VOBitsClearingScope::BlockOnly)
} else {
// Young objects are allocated into empty lines.
// Only clear unmarked lines.
let line_mark_state = self.line_mark_state.load(Ordering::SeqCst);
Some(VOBitsClearingScope::Line {
state: line_mark_state,
})
}
} else {
// GenImmix nursery GC. We do nothing to the ImmixSpace because the nursery is a
// separate CopySpace. It'll clear its own VO bits.
None
};

if let Some(scope) = maybe_scope {
let work_packets = self
.chunk_map
.generate_tasks(|chunk| Box::new(ClearVOBitsAfterPrepare { chunk, scope }));
self.scheduler.work_buckets[WorkBucketStage::ClearVOBits].bulk_add(work_packets);
}
}
}

/// Release for the immix space. This is called when a GC finished.
Expand Down Expand Up @@ -503,11 +549,8 @@ impl<VM: VMBinding> ImmixSpace<VM> {
object: ObjectReference,
) -> ObjectReference {
#[cfg(feature = "vo_bit")]
debug_assert!(
crate::util::metadata::vo_bit::is_vo_bit_set::<VM>(object),
"{:x}: VO bit not set",
object
);
vo_bit::helper::on_trace_object::<VM>(object);

if self.attempt_mark(object, self.mark_state) {
// Mark block and lines
if !super::BLOCK_ONLY {
Expand All @@ -517,6 +560,10 @@ impl<VM: VMBinding> ImmixSpace<VM> {
} else {
Block::containing::<VM>(object).set_state(BlockState::Marked);
}

#[cfg(feature = "vo_bit")]
vo_bit::helper::on_object_marked::<VM>(object);

// Visit node
queue.enqueue(object);
self.unlog_object_if_needed(object);
Expand All @@ -537,12 +584,10 @@ impl<VM: VMBinding> ImmixSpace<VM> {
) -> ObjectReference {
let copy_context = worker.get_copy_context_mut();
debug_assert!(!super::BLOCK_ONLY);

#[cfg(feature = "vo_bit")]
debug_assert!(
crate::util::metadata::vo_bit::is_vo_bit_set::<VM>(object),
"{:x}: VO bit not set",
object
);
vo_bit::helper::on_trace_object::<VM>(object);

let forwarding_status = ForwardingWord::attempt_to_forward::<VM>(object);
if ForwardingWord::state_is_forwarded_or_being_forwarded(forwarding_status) {
// We lost the forwarding race as some other thread has set the forwarding word; wait
Expand Down Expand Up @@ -589,16 +634,30 @@ impl<VM: VMBinding> ImmixSpace<VM> {
self.attempt_mark(object, self.mark_state);
ForwardingWord::clear_forwarding_bits::<VM>(object);
Block::containing::<VM>(object).set_state(BlockState::Marked);

#[cfg(feature = "vo_bit")]
vo_bit::helper::on_object_marked::<VM>(object);

object
} else {
// We are forwarding objects. When the copy allocator allocates the block, it should
// mark the block. So we do not need to explicitly mark it here.
ForwardingWord::forward_object::<VM>(object, semantics, copy_context)

// Clippy complains if the "vo_bit" feature is not enabled.
#[allow(clippy::let_and_return)]
let new_object =
ForwardingWord::forward_object::<VM>(object, semantics, copy_context);

#[cfg(feature = "vo_bit")]
vo_bit::helper::on_object_forwarded::<VM>(new_object);

new_object
};
debug_assert_eq!(
Block::containing::<VM>(new_object).get_state(),
BlockState::Marked
);

queue.enqueue(new_object);
debug_assert!(new_object.is_live());
self.unlog_object_if_needed(new_object);
Expand Down Expand Up @@ -1002,3 +1061,51 @@ impl<VM: VMBinding> ImmixHybridCopyContext<VM> {
self.defrag_allocator.immix_space()
}
}

#[cfg(feature = "vo_bit")]
#[derive(Clone, Copy)]
enum VOBitsClearingScope {
/// Clear all VO bits in all blocks.
FullGC,
/// Clear unmarked blocks, only.
BlockOnly,
/// Clear unmarked lines, only. (i.e. lines with line mark state **not** equal to `state`).
Line { state: u8 },
}

/// A work packet to clear VO bit metadata after Prepare.
#[cfg(feature = "vo_bit")]
struct ClearVOBitsAfterPrepare {
chunk: Chunk,
scope: VOBitsClearingScope,
}

#[cfg(feature = "vo_bit")]
impl<VM: VMBinding> GCWork<VM> for ClearVOBitsAfterPrepare {
fn do_work(&mut self, _worker: &mut GCWorker<VM>, _mmtk: &'static MMTK<VM>) {
match self.scope {
VOBitsClearingScope::FullGC => {
vo_bit::bzero_vo_bit(self.chunk.start(), Chunk::BYTES);
}
VOBitsClearingScope::BlockOnly => {
self.clear_blocks(None);
}
VOBitsClearingScope::Line { state } => {
self.clear_blocks(Some(state));
}
}
}
}

#[cfg(feature = "vo_bit")]
impl ClearVOBitsAfterPrepare {
fn clear_blocks(&mut self, line_mark_state: Option<u8>) {
for block in self
.chunk
.iter_region::<Block>()
.filter(|block| block.get_state() != BlockState::Unallocated)
{
block.clear_vo_bits_for_unmarked_regions(line_mark_state);
}
}
}
28 changes: 8 additions & 20 deletions src/scheduler/scheduler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,11 @@ use super::*;
use crate::mmtk::MMTK;
use crate::util::opaque_pointer::*;
use crate::util::options::AffinityKind;
use crate::util::rust_util::array_from_fn;
use crate::vm::Collection;
use crate::vm::{GCThreadContext, VMBinding};
use crossbeam::deque::{self, Steal};
use enum_map::Enum;
use enum_map::{enum_map, EnumMap};
use enum_map::{Enum, EnumMap};
use std::collections::HashMap;
use std::sync::Arc;

Expand Down Expand Up @@ -37,24 +37,12 @@ impl<VM: VMBinding> GCWorkScheduler<VM> {
let worker_group = WorkerGroup::new(num_workers);

// Create work buckets for workers.
let mut work_buckets = enum_map! {
WorkBucketStage::Unconstrained => WorkBucket::new(true, worker_monitor.clone()),
WorkBucketStage::Prepare => WorkBucket::new(false, worker_monitor.clone()),
WorkBucketStage::Closure => WorkBucket::new(false, worker_monitor.clone()),
WorkBucketStage::SoftRefClosure => WorkBucket::new(false, worker_monitor.clone()),
WorkBucketStage::WeakRefClosure => WorkBucket::new(false, worker_monitor.clone()),
WorkBucketStage::FinalRefClosure => WorkBucket::new(false, worker_monitor.clone()),
WorkBucketStage::PhantomRefClosure => WorkBucket::new(false, worker_monitor.clone()),
WorkBucketStage::VMRefClosure => WorkBucket::new(false, worker_monitor.clone()),
WorkBucketStage::CalculateForwarding => WorkBucket::new(false, worker_monitor.clone()),
WorkBucketStage::SecondRoots => WorkBucket::new(false, worker_monitor.clone()),
WorkBucketStage::RefForwarding => WorkBucket::new(false, worker_monitor.clone()),
WorkBucketStage::FinalizableForwarding => WorkBucket::new(false, worker_monitor.clone()),
WorkBucketStage::VMRefForwarding => WorkBucket::new(false, worker_monitor.clone()),
WorkBucketStage::Compact => WorkBucket::new(false, worker_monitor.clone()),
WorkBucketStage::Release => WorkBucket::new(false, worker_monitor.clone()),
WorkBucketStage::Final => WorkBucket::new(false, worker_monitor.clone()),
};
// TODO: Replace `array_from_fn` with `std::array::from_fn` after bumping MSRV.
let mut work_buckets = EnumMap::from_array(array_from_fn(|stage_num| {
let stage = WorkBucketStage::from_usize(stage_num);
let active = stage == WorkBucketStage::Unconstrained;
WorkBucket::new(active, worker_monitor.clone())
}));

// Set the open condition of each bucket.
{
Expand Down
Loading