Skip to content

Commit d2324cf

Browse files
authored
Global allocation bit (#390)
* add alloc_bit module * make alloc_bit global * add global_alloc_bit feature
1 parent 53a46c5 commit d2324cf

File tree

18 files changed

+255
-50
lines changed

18 files changed

+255
-50
lines changed

Cargo.toml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,7 @@ perf_counter = ["pfm"]
4949
# .github/scripts/ci-common.sh extracts features from the following part (including from comments).
5050
# So be careful when editing or adding stuff to the section below.
5151

52+
5253
# Do not modify the following line - ci-common.sh matches it
5354
# -- Non mutually exclusive features --
5455

@@ -57,6 +58,9 @@ vm_space = []
5758
ro_space = []
5859
code_space = []
5960

61+
# metadata
62+
global_alloc_bit = []
63+
6064
# Run sanity GC
6165
sanity = []
6266
# Run analysis

src/plan/immix/global.rs

Lines changed: 18 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ use crate::util::heap::layout::heap_layout::Mmapper;
1616
use crate::util::heap::layout::heap_layout::VMMap;
1717
use crate::util::heap::layout::vm_layout_constants::{HEAP_END, HEAP_START};
1818
use crate::util::heap::HeapMeta;
19+
use crate::util::metadata::side_metadata::SideMetadataContext;
1920
use crate::util::metadata::side_metadata::SideMetadataSanity;
2021
use crate::util::options::UnsafeOptionsWrapper;
2122
#[cfg(feature = "sanity")]
@@ -170,10 +171,24 @@ impl<VM: VMBinding> Immix<VM> {
170171
scheduler: Arc<GCWorkScheduler<VM>>,
171172
) -> Self {
172173
let mut heap = HeapMeta::new(HEAP_START, HEAP_END);
173-
174+
let global_metadata_specs = SideMetadataContext::new_global_specs(&[]);
174175
let immix = Immix {
175-
immix_space: ImmixSpace::new("immix", vm_map, mmapper, &mut heap, scheduler, vec![]),
176-
common: CommonPlan::new(vm_map, mmapper, options, heap, &IMMIX_CONSTRAINTS, vec![]),
176+
immix_space: ImmixSpace::new(
177+
"immix",
178+
vm_map,
179+
mmapper,
180+
&mut heap,
181+
scheduler,
182+
global_metadata_specs.clone(),
183+
),
184+
common: CommonPlan::new(
185+
vm_map,
186+
mmapper,
187+
options,
188+
heap,
189+
&IMMIX_CONSTRAINTS,
190+
global_metadata_specs,
191+
),
177192
};
178193

179194
{

src/plan/marksweep/global.rs

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,8 @@ use crate::policy::space::Space;
1414
use crate::scheduler::gc_work::*;
1515
use crate::scheduler::*;
1616
use crate::util::alloc::allocators::AllocatorSelector;
17+
#[cfg(not(feature = "global_alloc_bit"))]
18+
use crate::util::alloc_bit::ALLOC_SIDE_METADATA_SPEC;
1719
#[cfg(feature = "analysis")]
1820
use crate::util::analysis::GcHookWork;
1921
use crate::util::heap::layout::heap_layout::Mmapper;
@@ -136,8 +138,18 @@ impl<VM: VMBinding> MarkSweep<VM> {
136138
options: Arc<UnsafeOptionsWrapper>,
137139
) -> Self {
138140
let heap = HeapMeta::new(HEAP_START, HEAP_END);
141+
// if global_alloc_bit is enabled, ALLOC_SIDE_METADATA_SPEC will be added to
142+
// SideMetadataContext by default, so we don't need to add it here.
143+
#[cfg(feature = "global_alloc_bit")]
139144
let global_metadata_specs =
140145
SideMetadataContext::new_global_specs(&[ACTIVE_CHUNK_METADATA_SPEC]);
146+
// if global_alloc_bit is NOT enabled,
147+
// we need to add ALLOC_SIDE_METADATA_SPEC to SideMetadataContext here.
148+
#[cfg(not(feature = "global_alloc_bit"))]
149+
let global_metadata_specs = SideMetadataContext::new_global_specs(&[
150+
ALLOC_SIDE_METADATA_SPEC,
151+
ACTIVE_CHUNK_METADATA_SPEC,
152+
]);
141153

142154
let res = MarkSweep {
143155
ms: MallocSpace::new(global_metadata_specs.clone()),

src/policy/copyspace.rs

Lines changed: 27 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,8 @@ use crate::policy::space::SpaceOptions;
44
use crate::policy::space::{CommonSpace, Space, SFT};
55
use crate::util::constants::CARD_META_PAGES_PER_REGION;
66
use crate::util::heap::layout::heap_layout::{Mmapper, VMMap};
7+
#[cfg(feature = "global_alloc_bit")]
8+
use crate::util::heap::layout::vm_layout_constants::BYTES_IN_CHUNK;
79
use crate::util::heap::HeapMeta;
810
use crate::util::heap::VMRequest;
911
use crate::util::heap::{MonotonePageResource, PageResource};
@@ -38,7 +40,10 @@ impl<VM: VMBinding> SFT for CopySpace<VM> {
3840
fn is_sane(&self) -> bool {
3941
!self.from_space()
4042
}
41-
fn initialize_object_metadata(&self, _object: ObjectReference, _alloc: bool) {}
43+
fn initialize_object_metadata(&self, _object: ObjectReference, _alloc: bool) {
44+
#[cfg(feature = "global_alloc_bit")]
45+
crate::util::alloc_bit::set_alloc_bit(_object);
46+
}
4247
#[inline(always)]
4348
fn get_forwarded_object(&self, object: ObjectReference) -> Option<ObjectReference> {
4449
if !self.from_space() {
@@ -142,12 +147,27 @@ impl<VM: VMBinding> CopySpace<VM> {
142147

143148
pub fn release(&self) {
144149
unsafe {
150+
#[cfg(feature = "global_alloc_bit")]
151+
self.reset_alloc_bit();
145152
self.pr.reset();
146153
}
147154
self.common.metadata.reset();
148155
self.from_space.store(false, Ordering::SeqCst);
149156
}
150157

158+
#[cfg(feature = "global_alloc_bit")]
159+
unsafe fn reset_alloc_bit(&self) {
160+
let current_chunk = self.pr.get_current_chunk();
161+
if self.common.contiguous {
162+
crate::util::alloc_bit::bzero_alloc_bit(
163+
self.common.start,
164+
current_chunk + BYTES_IN_CHUNK - self.common.start,
165+
);
166+
} else {
167+
unimplemented!();
168+
}
169+
}
170+
151171
fn from_space(&self) -> bool {
152172
self.from_space.load(Ordering::SeqCst)
153173
}
@@ -164,6 +184,12 @@ impl<VM: VMBinding> CopySpace<VM> {
164184
if !self.from_space() {
165185
return object;
166186
}
187+
#[cfg(feature = "global_alloc_bit")]
188+
debug_assert!(
189+
crate::util::alloc_bit::is_alloced(object),
190+
"{:x}: alloc bit not set",
191+
object
192+
);
167193
trace!("attempting to forward");
168194
let forwarding_status = object_forwarding::attempt_to_forward::<VM>(object);
169195
trace!("checking if object is being forwarded");

src/policy/immix/block.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -225,6 +225,8 @@ impl Block {
225225
/// Deinitalize a block before releasing.
226226
#[inline]
227227
pub fn deinit(&self) {
228+
#[cfg(feature = "global_alloc_bit")]
229+
crate::util::alloc_bit::bzero_alloc_bit(self.start(), Self::BYTES);
228230
self.set_state(BlockState::Unallocated);
229231
}
230232

src/policy/immix/immixspace.rs

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,10 @@ impl<VM: VMBinding> SFT for ImmixSpace<VM> {
6767
fn is_sane(&self) -> bool {
6868
true
6969
}
70-
fn initialize_object_metadata(&self, _object: ObjectReference, _alloc: bool) {}
70+
fn initialize_object_metadata(&self, _object: ObjectReference, _alloc: bool) {
71+
#[cfg(feature = "global_alloc_bit")]
72+
crate::util::alloc_bit::set_alloc_bit(_object);
73+
}
7174
}
7275

7376
impl<VM: VMBinding> Space<VM> for ImmixSpace<VM> {
@@ -305,6 +308,12 @@ impl<VM: VMBinding> ImmixSpace<VM> {
305308
semantics: AllocationSemantics,
306309
copy_context: &mut impl CopyContext,
307310
) -> ObjectReference {
311+
#[cfg(feature = "global_alloc_bit")]
312+
debug_assert!(
313+
crate::util::alloc_bit::is_alloced(object),
314+
"{:x}: alloc bit not set",
315+
object
316+
);
308317
if Block::containing::<VM>(object).is_defrag_source() {
309318
self.trace_object_with_opportunistic_copy(trace, object, semantics, copy_context)
310319
} else {
@@ -358,6 +367,8 @@ impl<VM: VMBinding> ImmixSpace<VM> {
358367
Block::containing::<VM>(object).set_state(BlockState::Marked);
359368
object
360369
} else {
370+
#[cfg(feature = "global_alloc_bit")]
371+
crate::util::alloc_bit::unset_alloc_bit(object);
361372
ForwardingWord::forward_object::<VM, _>(object, semantics, copy_context)
362373
};
363374
if !super::MARK_LINE_AT_SCAN_TIME {

src/policy/immortalspace.rs

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -73,6 +73,8 @@ impl<VM: VMBinding> SFT for ImmortalSpace<VM> {
7373
if self.common.needs_log_bit {
7474
VM::VMObjectModel::GLOBAL_LOG_BIT_SPEC.mark_as_unlogged::<VM>(object, Ordering::SeqCst);
7575
}
76+
#[cfg(feature = "global_alloc_bit")]
77+
crate::util::alloc_bit::set_alloc_bit(object);
7678
}
7779
}
7880

@@ -183,6 +185,12 @@ impl<VM: VMBinding> ImmortalSpace<VM> {
183185
trace: &mut T,
184186
object: ObjectReference,
185187
) -> ObjectReference {
188+
#[cfg(feature = "global_alloc_bit")]
189+
debug_assert!(
190+
crate::util::alloc_bit::is_alloced(object),
191+
"{:x}: alloc bit not set",
192+
object
193+
);
186194
if ImmortalSpace::<VM>::test_and_mark(object, self.mark_state) {
187195
trace.process_node(object);
188196
}

src/policy/largeobjectspace.rs

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -73,6 +73,8 @@ impl<VM: VMBinding> SFT for LargeObjectSpace<VM> {
7373
VM::VMObjectModel::GLOBAL_LOG_BIT_SPEC.mark_as_unlogged::<VM>(object, Ordering::SeqCst);
7474
}
7575

76+
#[cfg(feature = "global_alloc_bit")]
77+
crate::util::alloc_bit::set_alloc_bit(object);
7678
let cell = VM::VMObjectModel::object_start_ref(object);
7779
self.treadmill.add_to_treadmill(cell, alloc);
7880
}
@@ -172,6 +174,12 @@ impl<VM: VMBinding> LargeObjectSpace<VM> {
172174
trace: &mut T,
173175
object: ObjectReference,
174176
) -> ObjectReference {
177+
#[cfg(feature = "global_alloc_bit")]
178+
debug_assert!(
179+
crate::util::alloc_bit::is_alloced(object),
180+
"{:x}: alloc bit not set",
181+
object
182+
);
175183
let nursery_object = self.is_in_nursery(object);
176184
if !self.in_nursery_gc || nursery_object {
177185
// Note that test_and_mark() has side effects
@@ -192,11 +200,15 @@ impl<VM: VMBinding> LargeObjectSpace<VM> {
192200
if sweep_nursery {
193201
for cell in self.treadmill.collect_nursery() {
194202
// println!("- cn {}", cell);
203+
#[cfg(feature = "global_alloc_bit")]
204+
crate::util::alloc_bit::unset_addr_alloc_bit(cell);
195205
self.pr.release_pages(get_super_page(cell));
196206
}
197207
} else {
198208
for cell in self.treadmill.collect() {
199209
// println!("- ts {}", cell);
210+
#[cfg(feature = "global_alloc_bit")]
211+
crate::util::alloc_bit::unset_addr_alloc_bit(cell);
200212
self.pr.release_pages(get_super_page(cell));
201213
}
202214
}

src/policy/lockfreeimmortalspace.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,8 @@ impl<VM: VMBinding> SFT for LockFreeImmortalSpace<VM> {
5555
unimplemented!()
5656
}
5757
fn initialize_object_metadata(&self, _object: ObjectReference, _alloc: bool) {
58-
unimplemented!()
58+
#[cfg(feature = "global_alloc_bit")]
59+
crate::util::alloc_bit::set_alloc_bit(_object);
5960
}
6061
}
6162

src/policy/mallocspace/global.rs

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -172,7 +172,6 @@ impl<VM: VMBinding> MallocSpace<VM> {
172172
metadata: SideMetadataContext {
173173
global: global_side_metadata_specs,
174174
local: metadata::extract_side_metadata(&[
175-
MetadataSpec::OnSide(ALLOC_SIDE_METADATA_SPEC),
176175
MetadataSpec::OnSide(ACTIVE_PAGE_METADATA_SPEC),
177176
*VM::VMObjectModel::LOCAL_MARK_BIT_SPEC,
178177
]),
@@ -334,13 +333,15 @@ impl<VM: VMBinding> MallocSpace<VM> {
334333
let mut last_on_page_boundary = false;
335334

336335
debug_assert!(
337-
ALLOC_SIDE_METADATA_SPEC.log_min_obj_size == mark_bit_spec.log_min_obj_size,
336+
crate::util::alloc_bit::ALLOC_SIDE_METADATA_SPEC.log_min_obj_size
337+
== mark_bit_spec.log_min_obj_size,
338338
"Alloc-bit and mark-bit metadata have different minimum object sizes!"
339339
);
340340

341341
// For bulk xor'ing 128-bit vectors on architectures with vector instructions
342342
// Each bit represents an object of LOG_MIN_OBJ_SIZE size
343-
let bulk_load_size: usize = 128 * (1 << ALLOC_SIDE_METADATA_SPEC.log_min_obj_size);
343+
let bulk_load_size: usize =
344+
128 * (1 << crate::util::alloc_bit::ALLOC_SIDE_METADATA_SPEC.log_min_obj_size);
344345

345346
while address < chunk_end {
346347
// We extensively tested the performance of the following if-statement and were
@@ -363,7 +364,8 @@ impl<VM: VMBinding> MallocSpace<VM> {
363364
last_on_page_boundary = false;
364365
}
365366

366-
let alloc_128: u128 = unsafe { load128(&ALLOC_SIDE_METADATA_SPEC, address) };
367+
let alloc_128: u128 =
368+
unsafe { load128(&crate::util::alloc_bit::ALLOC_SIDE_METADATA_SPEC, address) };
367369
let mark_128: u128 = unsafe { load128(&mark_bit_spec, address) };
368370

369371
// Check if there are dead objects in the bulk loaded region

src/policy/mallocspace/metadata.rs

Lines changed: 13 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
1+
use crate::util::alloc_bit;
12
use crate::util::heap::layout::vm_layout_constants::{BYTES_IN_CHUNK, LOG_BYTES_IN_CHUNK};
23
use crate::util::metadata::load_metadata;
34
use crate::util::metadata::side_metadata;
45
use crate::util::metadata::side_metadata::SideMetadataContext;
56
use crate::util::metadata::side_metadata::SideMetadataOffset;
67
use crate::util::metadata::side_metadata::SideMetadataSpec;
7-
use crate::util::metadata::side_metadata::GLOBAL_SIDE_METADATA_BASE_OFFSET;
88
use crate::util::metadata::side_metadata::LOCAL_SIDE_METADATA_BASE_OFFSET;
99
use crate::util::metadata::store_metadata;
1010
use crate::util::Address;
@@ -38,25 +38,11 @@ lazy_static! {
3838
/// overwriting the previous mapping.
3939
pub(crate) const ACTIVE_CHUNK_METADATA_SPEC: SideMetadataSpec = SideMetadataSpec {
4040
is_global: true,
41-
offset: GLOBAL_SIDE_METADATA_BASE_OFFSET,
41+
offset: SideMetadataOffset::layout_after(&crate::util::alloc_bit::ALLOC_SIDE_METADATA_SPEC),
4242
log_num_of_bits: 3,
4343
log_min_obj_size: LOG_BYTES_IN_CHUNK as usize,
4444
};
4545

46-
/// This is the metadata spec for the alloc-bit.
47-
///
48-
/// An alloc-bit is required per min-object-size aligned address, rather than per object, and can only exist as side metadata.
49-
///
50-
/// The other metadata used by MallocSpace is mark-bit, which is per-object and can be kept in object header if the VM allows it.
51-
/// Thus, mark-bit is vm-dependant and is part of each VM's ObjectModel.
52-
///
53-
pub(crate) const ALLOC_SIDE_METADATA_SPEC: SideMetadataSpec = SideMetadataSpec {
54-
is_global: false,
55-
offset: LOCAL_SIDE_METADATA_BASE_OFFSET,
56-
log_num_of_bits: 0,
57-
log_min_obj_size: constants::LOG_MIN_OBJECT_SIZE as usize,
58-
};
59-
6046
/// Metadata spec for the active page byte
6147
///
6248
/// The active page metadata is used to accurately track the total number of pages that have
@@ -69,7 +55,7 @@ pub(crate) const ALLOC_SIDE_METADATA_SPEC: SideMetadataSpec = SideMetadataSpec {
6955
// how many pages are active in this metadata spec. Explore SIMD vectorization with 8-bit integers
7056
pub(crate) const ACTIVE_PAGE_METADATA_SPEC: SideMetadataSpec = SideMetadataSpec {
7157
is_global: false,
72-
offset: SideMetadataOffset::layout_after(&ALLOC_SIDE_METADATA_SPEC),
58+
offset: LOCAL_SIDE_METADATA_BASE_OFFSET,
7359
log_num_of_bits: 3,
7460
log_min_obj_size: constants::LOG_BYTES_IN_PAGE as usize,
7561
};
@@ -153,11 +139,11 @@ pub fn is_alloced(object: ObjectReference) -> bool {
153139
}
154140

155141
pub fn is_alloced_object(address: Address) -> bool {
156-
side_metadata::load_atomic(&ALLOC_SIDE_METADATA_SPEC, address, Ordering::SeqCst) == 1
142+
alloc_bit::is_alloced_object(address)
157143
}
158144

159145
pub unsafe fn is_alloced_object_unsafe(address: Address) -> bool {
160-
side_metadata::load(&ALLOC_SIDE_METADATA_SPEC, address) == 1
146+
alloc_bit::is_alloced_object_unsafe(address)
161147
}
162148

163149
pub fn is_marked<VM: VMBinding>(object: ObjectReference, ordering: Option<Ordering>) -> bool {
@@ -192,12 +178,7 @@ pub unsafe fn is_chunk_marked_unsafe(chunk_start: Address) -> bool {
192178
}
193179

194180
pub fn set_alloc_bit(object: ObjectReference) {
195-
side_metadata::store_atomic(
196-
&ALLOC_SIDE_METADATA_SPEC,
197-
object.to_address(),
198-
1,
199-
Ordering::SeqCst,
200-
);
181+
alloc_bit::set_alloc_bit(object);
201182
}
202183

203184
pub fn set_mark_bit<VM: VMBinding>(object: ObjectReference, ordering: Option<Ordering>) {
@@ -210,6 +191,11 @@ pub fn set_mark_bit<VM: VMBinding>(object: ObjectReference, ordering: Option<Ord
210191
);
211192
}
212193

194+
#[allow(unused)]
195+
pub fn unset_alloc_bit(object: ObjectReference) {
196+
alloc_bit::unset_alloc_bit(object);
197+
}
198+
213199
pub(super) fn set_page_mark(page_addr: Address) {
214200
side_metadata::store_atomic(&ACTIVE_PAGE_METADATA_SPEC, page_addr, 1, Ordering::SeqCst);
215201
}
@@ -224,9 +210,10 @@ pub(super) fn set_chunk_mark(chunk_start: Address) {
224210
}
225211

226212
pub unsafe fn unset_alloc_bit_unsafe(object: ObjectReference) {
227-
side_metadata::store(&ALLOC_SIDE_METADATA_SPEC, object.to_address(), 0);
213+
alloc_bit::unset_alloc_bit_unsafe(object);
228214
}
229215

216+
#[allow(unused)]
230217
pub fn unset_mark_bit<VM: VMBinding>(object: ObjectReference, ordering: Option<Ordering>) {
231218
store_metadata::<VM>(
232219
&VM::VMObjectModel::LOCAL_MARK_BIT_SPEC,

0 commit comments

Comments
 (0)