@@ -1369,10 +1369,13 @@ MapArchiveResult MetaspaceShared::map_archives(FileMapInfo* static_mapinfo, File
13691369 assert (static_mapinfo->mapping_end_offset () == dynamic_mapinfo->mapping_base_offset (), " no gap" );
13701370 }
13711371
1372- ReservedSpace archive_space_rs, class_space_rs;
1372+ ReservedSpace total_space_rs, archive_space_rs, class_space_rs;
13731373 MapArchiveResult result = MAP_ARCHIVE_OTHER_FAILURE;
1374- char * mapped_base_address = reserve_address_space_for_archives (static_mapinfo, dynamic_mapinfo,
1375- use_requested_addr, archive_space_rs,
1374+ char * mapped_base_address = reserve_address_space_for_archives (static_mapinfo,
1375+ dynamic_mapinfo,
1376+ use_requested_addr,
1377+ total_space_rs,
1378+ archive_space_rs,
13761379 class_space_rs);
13771380 if (mapped_base_address == NULL ) {
13781381 result = MAP_ARCHIVE_MMAP_FAILURE;
@@ -1422,6 +1425,7 @@ MapArchiveResult MetaspaceShared::map_archives(FileMapInfo* static_mapinfo, File
14221425 // this with use_requested_addr, since we're going to patch all the
14231426 // pointers anyway so there's no benefit to mmap.
14241427 if (use_requested_addr) {
1428+ assert (!total_space_rs.is_reserved (), " Should not be reserved for Windows" );
14251429 log_info (cds)(" Windows mmap workaround: releasing archive space." );
14261430 archive_space_rs.release ();
14271431 }
@@ -1477,6 +1481,7 @@ MapArchiveResult MetaspaceShared::map_archives(FileMapInfo* static_mapinfo, File
14771481 // cover both archive and class space.
14781482 address cds_base = (address)static_mapinfo->mapped_base ();
14791483 address ccs_end = (address)class_space_rs.end ();
1484+ assert (ccs_end > cds_base, " Sanity check" );
14801485 CompressedKlassPointers::initialize (cds_base, ccs_end - cds_base);
14811486
14821487 // map_heap_regions() compares the current narrow oop and klass encodings
@@ -1489,7 +1494,7 @@ MapArchiveResult MetaspaceShared::map_archives(FileMapInfo* static_mapinfo, File
14891494 } else {
14901495 unmap_archive (static_mapinfo);
14911496 unmap_archive (dynamic_mapinfo);
1492- release_reserved_spaces (archive_space_rs, class_space_rs);
1497+ release_reserved_spaces (total_space_rs, archive_space_rs, class_space_rs);
14931498 }
14941499
14951500 return result;
@@ -1538,6 +1543,10 @@ MapArchiveResult MetaspaceShared::map_archives(FileMapInfo* static_mapinfo, File
15381543// Return:
15391544//
15401545// - On success:
1546+ // - total_space_rs will be reserved as whole for archive_space_rs and
1547+ // class_space_rs if UseCompressedClassPointers is true.
1548+ // On Windows, try reserve archive_space_rs and class_space_rs
1549+ // separately first if use_archive_base_addr is true.
15411550// - archive_space_rs will be reserved and large enough to host static and
15421551// if needed dynamic archive: [Base, A).
15431552// archive_space_rs.base and size will be aligned to CDS reserve
@@ -1552,6 +1561,7 @@ MapArchiveResult MetaspaceShared::map_archives(FileMapInfo* static_mapinfo, File
15521561char * MetaspaceShared::reserve_address_space_for_archives (FileMapInfo* static_mapinfo,
15531562 FileMapInfo* dynamic_mapinfo,
15541563 bool use_archive_base_addr,
1564+ ReservedSpace& total_space_rs,
15551565 ReservedSpace& archive_space_rs,
15561566 ReservedSpace& class_space_rs) {
15571567
@@ -1617,34 +1627,53 @@ char* MetaspaceShared::reserve_address_space_for_archives(FileMapInfo* static_ma
16171627 align_up (archive_space_size + gap_size + class_space_size,
16181628 os::vm_allocation_granularity ());
16191629
1620- ReservedSpace total_rs;
1621- if (base_address != NULL ) {
1622- // Reserve at the given archive base address, or not at all.
1623- total_rs = ReservedSpace (total_range_size, archive_space_alignment,
1624- false /* bool large */ , (char *) base_address);
1630+ assert (total_range_size > ccs_begin_offset, " must be" );
1631+ if (use_windows_memory_mapping () && use_archive_base_addr) {
1632+ if (base_address != nullptr ) {
1633+ // On Windows, we cannot safely split a reserved memory space into two (see JDK-8255917).
1634+ // Hence, we optimistically reserve archive space and class space side-by-side. We only
1635+ // do this for use_archive_base_addr=true since for use_archive_base_addr=false case
1636+ // caller will not split the combined space for mapping, instead read the archive data
1637+ // via sequential file IO.
1638+ address ccs_base = base_address + archive_space_size + gap_size;
1639+ archive_space_rs = ReservedSpace (archive_space_size, archive_space_alignment,
1640+ false /* large */ , (char *)base_address);
1641+ class_space_rs = ReservedSpace (class_space_size, class_space_alignment,
1642+ false /* large */ , (char *)ccs_base);
1643+ }
1644+ if (!archive_space_rs.is_reserved () || !class_space_rs.is_reserved ()) {
1645+ release_reserved_spaces (total_space_rs, archive_space_rs, class_space_rs);
1646+ return NULL ;
1647+ }
16251648 } else {
1626- // Reserve at any address, but leave it up to the platform to choose a good one.
1627- total_rs = Metaspace::reserve_address_space_for_compressed_classes (total_range_size);
1628- }
1629-
1630- if (!total_rs.is_reserved ()) {
1631- return NULL ;
1632- }
1633-
1634- // Paranoid checks:
1635- assert (base_address == NULL || (address)total_rs.base () == base_address,
1636- " Sanity (" PTR_FORMAT " vs " PTR_FORMAT " )" , p2i (base_address), p2i (total_rs.base ()));
1637- assert (is_aligned (total_rs.base (), archive_space_alignment), " Sanity" );
1638- assert (total_rs.size () == total_range_size, " Sanity" );
1639- assert (CompressedKlassPointers::is_valid_base ((address)total_rs.base ()), " Sanity" );
1649+ if (use_archive_base_addr && base_address != nullptr ) {
1650+ total_space_rs = ReservedSpace (total_range_size, archive_space_alignment,
1651+ false /* bool large */ , (char *) base_address);
1652+ } else {
1653+ // Reserve at any address, but leave it up to the platform to choose a good one.
1654+ total_space_rs = Metaspace::reserve_address_space_for_compressed_classes (total_range_size);
1655+ }
16401656
1641- // Now split up the space into ccs and cds archive. For simplicity, just leave
1642- // the gap reserved at the end of the archive space.
1643- archive_space_rs = total_rs.first_part (ccs_begin_offset,
1644- (size_t )os::vm_allocation_granularity (),
1645- /* split=*/ true );
1646- class_space_rs = total_rs.last_part (ccs_begin_offset);
1657+ if (!total_space_rs.is_reserved ()) {
1658+ return NULL ;
1659+ }
16471660
1661+ // Paranoid checks:
1662+ assert (base_address == NULL || (address)total_space_rs.base () == base_address,
1663+ " Sanity (" PTR_FORMAT " vs " PTR_FORMAT " )" , p2i (base_address), p2i (total_space_rs.base ()));
1664+ assert (is_aligned (total_space_rs.base (), archive_space_alignment), " Sanity" );
1665+ assert (total_space_rs.size () == total_range_size, " Sanity" );
1666+ assert (CompressedKlassPointers::is_valid_base ((address)total_space_rs.base ()), " Sanity" );
1667+
1668+ // Now split up the space into ccs and cds archive. For simplicity, just leave
1669+ // the gap reserved at the end of the archive space. Do not do real splitting.
1670+ archive_space_rs = total_space_rs.first_part (ccs_begin_offset,
1671+ (size_t )os::vm_allocation_granularity (),
1672+ /* split=*/ false );
1673+ class_space_rs = total_space_rs.last_part (ccs_begin_offset);
1674+ MemTracker::record_virtual_memory_split_reserved (total_space_rs.base (), total_space_rs.size (),
1675+ ccs_begin_offset);
1676+ }
16481677 assert (is_aligned (archive_space_rs.base (), archive_space_alignment), " Sanity" );
16491678 assert (is_aligned (archive_space_rs.size (), archive_space_alignment), " Sanity" );
16501679 assert (is_aligned (class_space_rs.base (), class_space_alignment), " Sanity" );
@@ -1663,15 +1692,21 @@ char* MetaspaceShared::reserve_address_space_for_archives(FileMapInfo* static_ma
16631692
16641693}
16651694
1666- void MetaspaceShared::release_reserved_spaces (ReservedSpace& archive_space_rs,
1695+ void MetaspaceShared::release_reserved_spaces (ReservedSpace& total_space_rs,
1696+ ReservedSpace& archive_space_rs,
16671697 ReservedSpace& class_space_rs) {
1668- if (archive_space_rs.is_reserved ()) {
1669- log_debug (cds)(" Released shared space (archive) " INTPTR_FORMAT, p2i (archive_space_rs.base ()));
1670- archive_space_rs.release ();
1671- }
1672- if (class_space_rs.is_reserved ()) {
1673- log_debug (cds)(" Released shared space (classes) " INTPTR_FORMAT, p2i (class_space_rs.base ()));
1674- class_space_rs.release ();
1698+ if (total_space_rs.is_reserved ()) {
1699+ log_debug (cds)(" Released shared space (archive + class) " INTPTR_FORMAT, p2i (total_space_rs.base ()));
1700+ total_space_rs.release ();
1701+ } else {
1702+ if (archive_space_rs.is_reserved ()) {
1703+ log_debug (cds)(" Released shared space (archive) " INTPTR_FORMAT, p2i (archive_space_rs.base ()));
1704+ archive_space_rs.release ();
1705+ }
1706+ if (class_space_rs.is_reserved ()) {
1707+ log_debug (cds)(" Released shared space (classes) " INTPTR_FORMAT, p2i (class_space_rs.base ()));
1708+ class_space_rs.release ();
1709+ }
16751710 }
16761711}
16771712
@@ -1715,6 +1750,7 @@ void MetaspaceShared::unmap_archive(FileMapInfo* mapinfo) {
17151750 assert (UseSharedSpaces, " must be runtime" );
17161751 if (mapinfo != NULL ) {
17171752 mapinfo->unmap_regions (archive_regions, archive_regions_count);
1753+ mapinfo->unmap_region (MetaspaceShared::bm);
17181754 mapinfo->set_is_mapped (false );
17191755 }
17201756}
0 commit comments