Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions src/hotspot/share/gc/shenandoah/shenandoahFreeSet.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1566,11 +1566,11 @@ HeapWord* ShenandoahFreeSet::try_allocate_in(ShenandoahHeapRegion* r, Shenandoah
return nullptr;
}
HeapWord* result = nullptr;
// We must call try_recycle_under_lock() even if !r->is_trash(). The reason is that if r is being recycled at this
// We must call try_recycle even if !r->is_trash(). The reason is that if r is being recycled at this
// moment by a GC worker thread, it may appear to be not trash even though it has not yet been fully recycled. If
// we proceed without waiting for the worker to finish recycling the region, the worker thread may overwrite the
// region's affiliation with FREE after we set the region's affiliation to req.afiliation() below
r->try_recycle_under_lock();
r->try_recycle();
in_new_region = r->is_empty();
if (in_new_region) {
log_debug(gc, free)("Using new region (%zu) for %s (" PTR_FORMAT ").",
Expand Down Expand Up @@ -1825,7 +1825,7 @@ HeapWord* ShenandoahFreeSet::allocate_contiguous(ShenandoahAllocRequest& req, bo
for (idx_t i = beg; i <= end; i++) {
ShenandoahHeapRegion* r = _heap->get_region(i);
assert(i == beg || _heap->get_region(i - 1)->index() + 1 == r->index(), "Should be contiguous");
r->try_recycle_under_lock();
r->try_recycle();
assert(r->is_empty(), "Should be empty");
r->set_affiliation(req.affiliation());
if (i == beg) {
Expand All @@ -1851,7 +1851,7 @@ HeapWord* ShenandoahFreeSet::allocate_contiguous(ShenandoahAllocRequest& req, bo
ShenandoahHeapRegion* r = _heap->get_region(i);
assert(i == beg || _heap->get_region(i - 1)->index() + 1 == r->index(), "Should be contiguous");
assert(r->is_empty(), "Should be empty");
r->try_recycle_under_lock();
r->try_recycle();
r->set_affiliation(req.affiliation());
r->make_regular_allocation(req.affiliation());
if ((i == end) && (used_words_in_last_region > 0)) {
Expand Down Expand Up @@ -1970,7 +1970,7 @@ bool ShenandoahFreeSet::flip_to_old_gc(ShenandoahHeapRegion* r) {
ShenandoahRightLeftIterator iterator(&_partitions, ShenandoahFreeSetPartitionId::OldCollector, true);
idx_t unusable_trash = -1;
for (unusable_trash = iterator.current(); iterator.has_next(); unusable_trash = iterator.next()) {
const ShenandoahHeapRegion* region = _heap->get_region(unusable_trash);
ShenandoahHeapRegion* region = _heap->get_region(unusable_trash);
if (region->is_trash() && _heap->is_concurrent_weak_root_in_progress()) {
break;
}
Expand Down
4 changes: 2 additions & 2 deletions src/hotspot/share/gc/shenandoah/shenandoahFullGC.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -522,7 +522,7 @@ class ShenandoahEnsureHeapActiveClosure: public ShenandoahHeapRegionClosure {
public:
void heap_region_do(ShenandoahHeapRegion* r) override {
if (r->is_trash()) {
r->try_recycle_under_lock();
r->try_recycle();
// No need to adjust_interval_for_recycled_old_region. That will be taken care of during freeset rebuild.
}
if (r->is_cset()) {
Expand Down Expand Up @@ -970,7 +970,7 @@ class ShenandoahPostCompactClosure : public ShenandoahHeapRegionClosure {
// Recycle all trash regions
if (r->is_trash()) {
live = 0;
r->try_recycle_under_lock();
r->try_recycle();
// No need to adjust_interval_for_recycled_old_region. That will be taken care of during freeset rebuild.
} else {
if (r->is_old()) {
Expand Down
2 changes: 1 addition & 1 deletion src/hotspot/share/gc/shenandoah/shenandoahHeap.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -789,7 +789,7 @@ bool ShenandoahHeap::is_in(const void* p) const {
}

// Now check if we point to a live section in active region.
const ShenandoahHeapRegion* r = heap_region_containing(p);
ShenandoahHeapRegion* r = heap_region_containing(p);
if (p >= r->top()) {
return false;
}
Expand Down
58 changes: 14 additions & 44 deletions src/hotspot/share/gc/shenandoah/shenandoahHeapRegion.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,6 @@ ShenandoahHeapRegion::ShenandoahHeapRegion(HeapWord* start, size_t index, bool c
if (ZapUnusedHeapArea && committed) {
SpaceMangler::mangle_region(MemRegion(_bottom, _end));
}
_recycling.unset();
}

void ShenandoahHeapRegion::report_illegal_transition(const char *method) {
Expand Down Expand Up @@ -290,6 +289,10 @@ void ShenandoahHeapRegion::make_cset() {

void ShenandoahHeapRegion::make_trash() {
shenandoah_assert_heaplocked();
// Trash region will be recycled under _recycle_lock(w/o heap if recycle is done by GC threads)
// The state transition to _trash must be done under _recycle_lock ensure GC threads always see correct state later
// when they recycle all the trash regions.
ShenandoahRegionRecycleLocker locker(&_recycle_lock);
reset_age();
switch (state()) {
case _humongous_start:
Expand Down Expand Up @@ -560,9 +563,8 @@ ShenandoahHeapRegion* ShenandoahHeapRegion::humongous_start_region() const {
return r;
}


void ShenandoahHeapRegion::recycle_internal() {
assert(_recycling.is_set() && is_trash(), "Wrong state");
assert(state() == _trash, "Must be trash");
ShenandoahHeap* heap = ShenandoahHeap::heap();

_mixed_candidate_garbage_words = 0;
Expand All @@ -579,47 +581,15 @@ void ShenandoahHeapRegion::recycle_internal() {
set_affiliation(FREE);
}

// Upon return, this region has been recycled. We try to recycle it.
// We may fail if some other thread recycled it before we do.
void ShenandoahHeapRegion::try_recycle_under_lock() {
shenandoah_assert_heaplocked();
if (is_trash() && _recycling.try_set()) {
if (is_trash()) {
// At freeset rebuild time, which precedes recycling of collection set, we treat all cset regions as
// part of capacity, as empty, as fully available, and as unaffiliated. This provides short-lived optimism
// for triggering heuristics. It greatly simplifies and reduces the locking overhead required
// by more time-precise accounting of these details.
recycle_internal();
}
_recycling.unset();
} else {
// Ensure recycling is unset before returning to mutator to continue memory allocation.
// Otherwise, the mutator might see region as fully recycled and might change its affiliation only to have
// the racing GC worker thread overwrite its affiliation to FREE.
while (_recycling.is_set()) {
if (os::is_MP()) {
SpinPause();
} else {
os::naked_yield();
}
}
}
}

// Note that return from try_recycle() does not mean the region has been recycled. It only means that
// some GC worker thread has taken responsibility to recycle the region, eventually.
// Recycle the region state is _trash.
void ShenandoahHeapRegion::try_recycle() {
shenandoah_assert_not_heaplocked();
if (is_trash() && _recycling.try_set()) {
// Double check region state after win the race to set recycling flag
if (is_trash()) {
// At freeset rebuild time, which precedes recycling of collection set, we treat all cset regions as
// part of capacity, as empty, as fully available, and as unaffiliated. This provides short-lived optimism
// for triggering and pacing heuristics. It greatly simplifies and reduces the locking overhead required
// by more time-precise accounting of these details.
recycle_internal();
}
_recycling.unset();
ShenandoahRegionRecycleLocker locker(&_recycle_lock);
if (state() == _trash) {
// At freeset rebuild time, which precedes recycling of collection set, we treat all cset regions as
// part of capacity, as empty, as fully available, and as unaffiliated. This provides short-lived optimism
// for triggering and pacing heuristics. It greatly simplifies and reduces the locking overhead required
// by more time-precise accounting of these details.
recycle_internal();
}
}

Expand Down Expand Up @@ -834,7 +804,7 @@ void ShenandoahHeapRegion::set_state(RegionState to) {
evt.set_to(to);
evt.commit();
}
_state.store_relaxed(to);
_state.release_store(to);
}

void ShenandoahHeapRegion::record_pin() {
Expand Down
13 changes: 7 additions & 6 deletions src/hotspot/share/gc/shenandoah/shenandoahHeapRegion.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,10 @@
class VMStructs;
class ShenandoahHeapRegionStateConstant;

// Use ShenandoahLock as recycle lock
typedef ShenandoahLock ShenandoahRegionRecycleLock;
typedef ShenandoahLocker<ShenandoahRegionRecycleLock> ShenandoahRegionRecycleLocker;

class ShenandoahHeapRegion {
friend class VMStructs;
friend class ShenandoahHeapRegionStateConstant;
Expand Down Expand Up @@ -197,7 +201,6 @@ class ShenandoahHeapRegion {
bool is_regular() const { return state() == _regular; }
bool is_humongous_continuation() const { return state() == _humongous_cont; }
bool is_regular_pinned() const { return state() == _pinned; }
bool is_trash() const { return state() == _trash; }

// Derived state predicates (boolean combinations of individual states)
bool static is_empty_state(RegionState state) { return state == _empty_committed || state == _empty_uncommitted; }
Expand All @@ -210,6 +213,7 @@ class ShenandoahHeapRegion {
bool is_cset() const { auto cur_state = state(); return cur_state == _cset || cur_state == _pinned_cset; }
bool is_pinned() const { auto cur_state = state(); return cur_state == _pinned || cur_state == _pinned_cset || cur_state == _pinned_humongous_start; }

inline bool is_trash();
inline bool is_young() const;
inline bool is_old() const;
inline bool is_affiliated() const;
Expand All @@ -218,7 +222,7 @@ class ShenandoahHeapRegion {
bool is_alloc_allowed() const { auto cur_state = state(); return is_empty_state(cur_state) || cur_state == _regular || cur_state == _pinned; }
bool is_stw_move_allowed() const { auto cur_state = state(); return cur_state == _regular || cur_state == _cset || (ShenandoahHumongousMoves && cur_state == _humongous_start); }

RegionState state() const { return _state.load_relaxed(); }
RegionState state() const { return _state.load_acquire(); }
int state_ordinal() const { return region_state_to_ordinal(state()); }

void record_pin();
Expand Down Expand Up @@ -269,8 +273,7 @@ class ShenandoahHeapRegion {
bool _promoted_in_place;
CENSUS_NOISE(uint _youth;) // tracks epochs of retrograde ageing (rejuvenation)

ShenandoahSharedFlag _recycling; // Used to indicate that the region is being recycled; see try_recycle*().

ShenandoahRegionRecycleLock _recycle_lock; // Recycle of a region must be done under lock
bool _needs_bitmap_reset;

public:
Expand Down Expand Up @@ -414,8 +417,6 @@ class ShenandoahHeapRegion {

void print_on(outputStream* st) const;

void try_recycle_under_lock();

void try_recycle();

inline void begin_preemptible_coalesce_and_fill() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,13 @@ inline const char* ShenandoahHeapRegion::affiliation_name() const {
return shenandoah_affiliation_name(affiliation());
}

inline bool ShenandoahHeapRegion::is_trash() {
// State transitions to _trash(trashing) and from _trash(recycling) are done
// under _recycle_lock, therefore the lock is required here for reliable result.
ShenandoahRegionRecycleLocker locker(&_recycle_lock);
return state() == _trash;
}

inline bool ShenandoahHeapRegion::is_young() const {
return affiliation() == YOUNG_GENERATION;
}
Expand Down