@@ -642,6 +642,38 @@ ParallelCompactData::summarize_split_space(size_t src_region,
642642 return source_next;
643643}
644644
645+ size_t ParallelCompactData::live_words_in_space (const MutableSpace* space,
646+ HeapWord** full_region_prefix_end) {
647+ size_t cur_region = addr_to_region_idx (space->bottom ());
648+ const size_t end_region = addr_to_region_idx (region_align_up (space->top ()));
649+ size_t live_words = 0 ;
650+ if (full_region_prefix_end == nullptr ) {
651+ for (/* empty */ ; cur_region < end_region; ++cur_region) {
652+ live_words += _region_data[cur_region].data_size ();
653+ }
654+ } else {
655+ bool first_set = false ;
656+ for (/* empty */ ; cur_region < end_region; ++cur_region) {
657+ size_t live_words_in_region = _region_data[cur_region].data_size ();
658+ if (!first_set && live_words_in_region < RegionSize) {
659+ *full_region_prefix_end = region_to_addr (cur_region);
660+ first_set = true ;
661+ }
662+ live_words += live_words_in_region;
663+ }
664+ if (!first_set) {
665+ // All regions are full of live objs.
666+ assert (is_region_aligned (space->top ()), " inv" );
667+ *full_region_prefix_end = space->top ();
668+ }
669+ assert (*full_region_prefix_end != nullptr , " postcondition" );
670+ assert (is_region_aligned (*full_region_prefix_end), " inv" );
671+ assert (*full_region_prefix_end >= space->bottom (), " in-range" );
672+ assert (*full_region_prefix_end <= space->top (), " in-range" );
673+ }
674+ return live_words;
675+ }
676+
645677bool ParallelCompactData::summarize (SplitInfo& split_info,
646678 HeapWord* source_beg, HeapWord* source_end,
647679 HeapWord** source_next,
@@ -982,93 +1014,19 @@ void PSParallelCompact::post_compact()
9821014 Universe::heap ()->record_whole_heap_examined_timestamp ();
9831015}
9841016
985- ParallelCompactData::RegionData*
986- PSParallelCompact::first_dead_space_region (const RegionData* beg,
987- const RegionData* end)
988- {
989- const size_t region_size = ParallelCompactData::RegionSize;
990- ParallelCompactData& sd = summary_data ();
991- size_t left = sd.region (beg);
992- size_t right = end > beg ? sd.region (end) - 1 : left;
993-
994- // Binary search.
995- while (left < right) {
996- // Equivalent to (left + right) / 2, but does not overflow.
997- const size_t middle = left + (right - left) / 2 ;
998- RegionData* const middle_ptr = sd.region (middle);
999- HeapWord* const dest = middle_ptr->destination ();
1000- HeapWord* const addr = sd.region_to_addr (middle);
1001- assert (dest != nullptr , " sanity" );
1002- assert (dest <= addr, " must move left" );
1003-
1004- if (middle > left && dest < addr) {
1005- right = middle - 1 ;
1006- } else if (middle < right && middle_ptr->data_size () == region_size) {
1007- left = middle + 1 ;
1008- } else {
1009- return middle_ptr;
1010- }
1011- }
1012- return sd.region (left);
1013- }
1014-
1015- // Return the address of the end of the dense prefix, a.k.a. the start of the
1016- // compacted region. The address is always on a region boundary.
1017- //
1018- // Completely full regions at the left are skipped, since no compaction can
1019- // occur in those regions. Then the maximum amount of dead wood to allow is
1020- // computed, based on the density (amount live / capacity) of the generation;
1021- // the region with approximately that amount of dead space to the left is
1022- // identified as the limit region. Regions between the last completely full
1023- // region and the limit region are scanned and the one that has the best
1024- // (maximum) reclaimed_ratio() is selected.
1025- HeapWord*
1026- PSParallelCompact::compute_dense_prefix (const SpaceId id,
1027- bool maximum_compaction)
1028- {
1017+ HeapWord* PSParallelCompact::compute_dense_prefix_for_old_space (MutableSpace* old_space,
1018+ HeapWord* full_region_prefix_end) {
10291019 const size_t region_size = ParallelCompactData::RegionSize;
10301020 const ParallelCompactData& sd = summary_data ();
10311021
1032- const MutableSpace* const space = _space_info[id].space ();
1033- HeapWord* const top = space->top ();
1034- HeapWord* const top_aligned_up = sd.region_align_up (top);
1035- HeapWord* const new_top = _space_info[id].new_top ();
1036- HeapWord* const new_top_aligned_up = sd.region_align_up (new_top);
1037- HeapWord* const bottom = space->bottom ();
1038- const RegionData* const beg_cp = sd.addr_to_region_ptr (bottom);
1039- const RegionData* const top_cp = sd.addr_to_region_ptr (top_aligned_up);
1040- const RegionData* const new_top_cp =
1041- sd.addr_to_region_ptr (new_top_aligned_up);
1042-
1043- // Skip full regions at the beginning of the space--they are necessarily part
1044- // of the dense prefix.
1045- const RegionData* const full_cp = first_dead_space_region (beg_cp, new_top_cp);
1046- assert (full_cp->destination () == sd.region_to_addr (full_cp) ||
1047- space->is_empty (), " no dead space allowed to the left" );
1048- assert (full_cp->data_size () < region_size || full_cp == new_top_cp - 1 ,
1049- " region must have dead space" );
1050-
1051- // The gc number is saved whenever a maximum compaction is done, and used to
1052- // determine when the maximum compaction interval has expired. This avoids
1053- // successive max compactions for different reasons.
1054- const uint total_invocations = ParallelScavengeHeap::heap ()->total_full_collections ();
1055- assert (total_invocations >= _maximum_compaction_gc_num, " sanity" );
1056- const size_t gcs_since_max = total_invocations - _maximum_compaction_gc_num;
1057- const bool interval_ended = gcs_since_max > HeapMaximumCompactionInterval ||
1058- total_invocations == HeapFirstMaximumCompactionCount;
1059- if (maximum_compaction || full_cp == top_cp || interval_ended) {
1060- _maximum_compaction_gc_num = total_invocations;
1061- return sd.region_to_addr (full_cp);
1062- }
1063-
10641022 // Iteration starts with the region *after* the full-region-prefix-end.
1065- const RegionData* const start_region = full_cp ;
1023+ const RegionData* const start_region = sd. addr_to_region_ptr (full_region_prefix_end) ;
10661024 // If final region is not full, iteration stops before that region,
10671025 // because fill_dense_prefix_end assumes that prefix_end <= top.
1068- const RegionData* const end_region = sd.addr_to_region_ptr (space ->top ());
1026+ const RegionData* const end_region = sd.addr_to_region_ptr (old_space ->top ());
10691027 assert (start_region <= end_region, " inv" );
10701028
1071- size_t max_waste = space ->capacity_in_words () * (MarkSweepDeadRatio / 100.0 );
1029+ size_t max_waste = old_space ->capacity_in_words () * (MarkSweepDeadRatio / 100.0 );
10721030 const RegionData* cur_region = start_region;
10731031 for (/* empty */ ; cur_region < end_region; ++cur_region) {
10741032 assert (region_size >= cur_region->data_size (), " inv" );
@@ -1081,24 +1039,11 @@ PSParallelCompact::compute_dense_prefix(const SpaceId id,
10811039
10821040 HeapWord* const prefix_end = sd.region_to_addr (cur_region);
10831041 assert (sd.is_region_aligned (prefix_end), " postcondition" );
1084- assert (prefix_end >= sd. region_to_addr (full_cp) , " in-range" );
1085- assert (prefix_end <= space ->top (), " in-range" );
1042+ assert (prefix_end >= full_region_prefix_end , " in-range" );
1043+ assert (prefix_end <= old_space ->top (), " in-range" );
10861044 return prefix_end;
10871045}
10881046
1089- void PSParallelCompact::summarize_spaces_quick ()
1090- {
1091- for (unsigned int i = 0 ; i < last_space_id; ++i) {
1092- const MutableSpace* space = _space_info[i].space ();
1093- HeapWord** nta = _space_info[i].new_top_addr ();
1094- bool result = _summary_data.summarize (_space_info[i].split_info (),
1095- space->bottom (), space->top (), nullptr ,
1096- space->bottom (), space->end (), nta);
1097- assert (result, " space must fit into itself" );
1098- _space_info[i].set_dense_prefix (space->bottom ());
1099- }
1100- }
1101-
11021047void PSParallelCompact::fill_dense_prefix_end (SpaceId id) {
11031048 // Comparing two sizes to decide if filling is required:
11041049 //
@@ -1123,6 +1068,12 @@ void PSParallelCompact::fill_dense_prefix_end(SpaceId id) {
11231068 }
11241069 assert (CollectedHeap::min_fill_size () == 2 , " inv" );
11251070 HeapWord* const dense_prefix_end = dense_prefix (id);
1071+ assert (_summary_data.is_region_aligned (dense_prefix_end), " precondition" );
1072+ assert (dense_prefix_end <= space (id)->top (), " precondition" );
1073+ if (dense_prefix_end == space (id)->top ()) {
1074+ // Must not have single-word gap right before prefix-end/top.
1075+ return ;
1076+ }
11261077 RegionData* const region_after_dense_prefix = _summary_data.addr_to_region_ptr (dense_prefix_end);
11271078 idx_t const dense_prefix_bit = _mark_bitmap.addr_to_bit (dense_prefix_end);
11281079
@@ -1147,56 +1098,6 @@ void PSParallelCompact::fill_dense_prefix_end(SpaceId id) {
11471098 }
11481099}
11491100
1150- void
1151- PSParallelCompact::summarize_space (SpaceId id, bool maximum_compaction)
1152- {
1153- assert (id < last_space_id, " id out of range" );
1154- assert (_space_info[id].dense_prefix () == _space_info[id].space ()->bottom (),
1155- " should have been reset in summarize_spaces_quick()" );
1156-
1157- const MutableSpace* space = _space_info[id].space ();
1158- if (_space_info[id].new_top () != space->bottom ()) {
1159- HeapWord* dense_prefix_end = compute_dense_prefix (id, maximum_compaction);
1160- _space_info[id].set_dense_prefix (dense_prefix_end);
1161-
1162- // Recompute the summary data, taking into account the dense prefix. If
1163- // every last byte will be reclaimed, then the existing summary data which
1164- // compacts everything can be left in place.
1165- if (!maximum_compaction && dense_prefix_end != space->bottom ()) {
1166- // If dead space crosses the dense prefix boundary, it is (at least
1167- // partially) filled with a dummy object, marked live and added to the
1168- // summary data. This simplifies the copy/update phase and must be done
1169- // before the final locations of objects are determined, to prevent
1170- // leaving a fragment of dead space that is too small to fill.
1171- fill_dense_prefix_end (id);
1172-
1173- // Compute the destination of each Region, and thus each object.
1174- _summary_data.summarize_dense_prefix (space->bottom (), dense_prefix_end);
1175- _summary_data.summarize (_space_info[id].split_info (),
1176- dense_prefix_end, space->top (), nullptr ,
1177- dense_prefix_end, space->end (),
1178- _space_info[id].new_top_addr ());
1179- }
1180- }
1181-
1182- if (log_develop_is_enabled (Trace, gc, compaction)) {
1183- const size_t region_size = ParallelCompactData::RegionSize;
1184- HeapWord* const dense_prefix_end = _space_info[id].dense_prefix ();
1185- const size_t dp_region = _summary_data.addr_to_region_idx (dense_prefix_end);
1186- const size_t dp_words = pointer_delta (dense_prefix_end, space->bottom ());
1187- HeapWord* const new_top = _space_info[id].new_top ();
1188- const HeapWord* nt_aligned_up = _summary_data.region_align_up (new_top);
1189- const size_t cr_words = pointer_delta (nt_aligned_up, dense_prefix_end);
1190- log_develop_trace (gc, compaction)(
1191- " id=%d cap=" SIZE_FORMAT " dp=" PTR_FORMAT " "
1192- " dp_region=" SIZE_FORMAT " " " dp_count=" SIZE_FORMAT " "
1193- " cr_count=" SIZE_FORMAT " " " nt=" PTR_FORMAT,
1194- id, space->capacity_in_words (), p2i (dense_prefix_end),
1195- dp_region, dp_words / region_size,
1196- cr_words / region_size, p2i (new_top));
1197- }
1198- }
1199-
12001101#ifndef PRODUCT
12011102void PSParallelCompact::summary_phase_msg (SpaceId dst_space_id,
12021103 HeapWord* dst_beg, HeapWord* dst_end,
@@ -1220,33 +1121,75 @@ void PSParallelCompact::summary_phase_msg(SpaceId dst_space_id,
12201121}
12211122#endif // #ifndef PRODUCT
12221123
1223- void PSParallelCompact::summary_phase (bool maximum_compaction)
1224- {
1225- GCTraceTime (Info, gc, phases) tm (" Summary Phase" , &_gc_timer);
1124+ bool PSParallelCompact::reassess_maximum_compaction (bool maximum_compaction,
1125+ size_t total_live_words,
1126+ MutableSpace* const old_space,
1127+ HeapWord* full_region_prefix_end) {
1128+ // Check if all live objs are larger than old-gen.
1129+ const bool is_old_gen_overflowing = (total_live_words > old_space->capacity_in_words ());
12261130
1227- // Quick summarization of each space into itself, to see how much is live.
1228- summarize_spaces_quick ();
1131+ // JVM flags
1132+ const uint total_invocations = ParallelScavengeHeap::heap ()->total_full_collections ();
1133+ assert (total_invocations >= _maximum_compaction_gc_num, " sanity" );
1134+ const size_t gcs_since_max = total_invocations - _maximum_compaction_gc_num;
1135+ const bool is_interval_ended = gcs_since_max > HeapMaximumCompactionInterval
1136+ || total_invocations == HeapFirstMaximumCompactionCount;
12291137
1230- log_develop_trace (gc, compaction)( " summary phase: after summarizing each space to self " );
1231- NOT_PRODUCT ( print_region_ranges ());
1232- NOT_PRODUCT ( print_initial_summary_data (_summary_data, _space_info ));
1138+ // If all regions in old-gen are full
1139+ const bool is_region_full =
1140+ full_region_prefix_end >= _summary_data. region_align_down (old_space-> top ( ));
12331141
1234- // The amount of live data that will end up in old space (assuming it fits).
1235- size_t old_space_total_live = 0 ;
1236- for (unsigned int id = old_space_id; id < last_space_id; ++id) {
1237- old_space_total_live += pointer_delta (_space_info[id].new_top (),
1238- _space_info[id].space ()->bottom ());
1142+ if (maximum_compaction || is_old_gen_overflowing || is_interval_ended || is_region_full) {
1143+ _maximum_compaction_gc_num = total_invocations;
1144+ return true ;
12391145 }
12401146
1147+ return false ;
1148+ }
1149+
1150+ void PSParallelCompact::summary_phase (bool maximum_compaction)
1151+ {
1152+ GCTraceTime (Info, gc, phases) tm (" Summary Phase" , &_gc_timer);
1153+
12411154 MutableSpace* const old_space = _space_info[old_space_id].space ();
1242- const size_t old_capacity = old_space->capacity_in_words ();
1243- if (old_space_total_live > old_capacity) {
1244- // XXX - should also try to expand
1245- maximum_compaction = true ;
1246- }
1155+ {
1156+ size_t total_live_words = 0 ;
1157+ HeapWord* full_region_prefix_end = nullptr ;
1158+ {
1159+ // old-gen
1160+ size_t live_words = _summary_data.live_words_in_space (old_space,
1161+ &full_region_prefix_end);
1162+ total_live_words += live_words;
1163+ }
1164+ // young-gen
1165+ for (uint i = eden_space_id; i < last_space_id; ++i) {
1166+ const MutableSpace* space = _space_info[i].space ();
1167+ size_t live_words = _summary_data.live_words_in_space (space);
1168+ total_live_words += live_words;
1169+ _space_info[i].set_new_top (space->bottom () + live_words);
1170+ _space_info[i].set_dense_prefix (space->bottom ());
1171+ }
12471172
1248- // Old generations.
1249- summarize_space (old_space_id, maximum_compaction);
1173+ maximum_compaction = reassess_maximum_compaction (maximum_compaction,
1174+ total_live_words,
1175+ old_space,
1176+ full_region_prefix_end);
1177+ HeapWord* dense_prefix_end =
1178+ maximum_compaction ? full_region_prefix_end
1179+ : compute_dense_prefix_for_old_space (old_space,
1180+ full_region_prefix_end);
1181+ SpaceId id = old_space_id;
1182+ _space_info[id].set_dense_prefix (dense_prefix_end);
1183+
1184+ if (dense_prefix_end != old_space->bottom ()) {
1185+ fill_dense_prefix_end (id);
1186+ _summary_data.summarize_dense_prefix (old_space->bottom (), dense_prefix_end);
1187+ }
1188+ _summary_data.summarize (_space_info[id].split_info (),
1189+ dense_prefix_end, old_space->top (), nullptr ,
1190+ dense_prefix_end, old_space->end (),
1191+ _space_info[id].new_top_addr ());
1192+ }
12501193
12511194 // Summarize the remaining spaces in the young gen. The initial target space
12521195 // is the old gen. If a space does not fit entirely into the target, then the
0 commit comments