272
273 // First try to reserve the space at the specified SharedBaseAddress.
274 assert(!_shared_rs.is_reserved(), "must be");
275 if (use_requested_base) {
276 _shared_rs = reserve_shared_space(cds_total, shared_base);
277 }
278 if (_shared_rs.is_reserved()) {
279 assert(shared_base == 0 || _shared_rs.base() == shared_base, "should match");
280 } else {
281 // Get a mmap region anywhere if the SharedBaseAddress fails.
282 _shared_rs = reserve_shared_space(cds_total);
283 }
284 if (!_shared_rs.is_reserved()) {
285 vm_exit_during_initialization("Unable to reserve memory for shared space",
286 err_msg(SIZE_FORMAT " bytes.", cds_total));
287 }
288
289 #ifdef _LP64
290 // During dump time, we allocate 4GB (UnscaledClassSpaceMax) of space and split it up:
291 // + The upper 1 GB is used as the "temporary compressed class space" -- preload_classes()
292 // will store Klasses into this space.
293 // + The lower 3 GB is used for the archive -- when preload_classes() is done,
294 // ArchiveCompactor will copy the class metadata into this space, first the RW parts,
295 // then the RO parts.
296
297 size_t max_archive_size = align_down(cds_total * 3 / 4, reserve_alignment);
298 ReservedSpace tmp_class_space = _shared_rs.last_part(max_archive_size);
299 CompressedClassSpaceSize = align_down(tmp_class_space.size(), reserve_alignment);
300 _shared_rs = _shared_rs.first_part(max_archive_size);
301
302 if (UseCompressedClassPointers) {
303 // Set up compress class pointers.
304 CompressedKlassPointers::set_base((address)_shared_rs.base());
305 // Set narrow_klass_shift to be LogKlassAlignmentInBytes. This is consistent
306 // with AOT.
307 CompressedKlassPointers::set_shift(LogKlassAlignmentInBytes);
308 // Set the range of klass addresses to 4GB.
309 CompressedKlassPointers::set_range(cds_total);
310 Metaspace::initialize_class_space(tmp_class_space);
311 }
312 log_info(cds)("narrow_klass_base = " PTR_FORMAT ", narrow_klass_shift = %d",
489 assert(_i2i_entry_code_buffers_size == total_size, "must not change");
490 return _i2i_entry_code_buffers;
491 }
492
493 uintx MetaspaceShared::object_delta_uintx(void* obj) {
494 Arguments::assert_is_dumping_archive();
495 if (DumpSharedSpaces) {
496 assert(shared_rs()->contains(obj), "must be");
497 } else {
498 assert(is_in_shared_metaspace(obj) || DynamicArchive::is_in_target_space(obj), "must be");
499 }
500 address base_address = address(SharedBaseAddress);
501 uintx deltax = address(obj) - base_address;
502 return deltax;
503 }
504
505 // Global object for holding classes that have been loaded. Since this
506 // is run at a safepoint just before exit, this is the entire set of classes.
507 static GrowableArray<Klass*>* _global_klass_objects;
508
509 GrowableArray<Klass*>* MetaspaceShared::collected_klasses() {
510 return _global_klass_objects;
511 }
512
513 static void collect_array_classes(Klass* k) {
514 _global_klass_objects->append_if_missing(k);
515 if (k->is_array_klass()) {
516 // Add in the array classes too
517 ArrayKlass* ak = ArrayKlass::cast(k);
518 Klass* h = ak->higher_dimension();
519 if (h != NULL) {
520 h->array_klasses_do(collect_array_classes);
521 }
522 }
523 }
524
525 class CollectClassesClosure : public KlassClosure {
526 void do_klass(Klass* k) {
527 if (k->is_instance_klass() &&
528 SystemDictionaryShared::is_excluded_class(InstanceKlass::cast(k))) {
1508
1509 FileMapInfo::check_nonempty_dir_in_shared_path_table();
1510
1511 NOT_PRODUCT(SystemDictionary::verify();)
1512 // The following guarantee is meant to ensure that no loader constraints
1513 // exist yet, since the constraints table is not shared. This becomes
1514 // more important now that we don't re-initialize vtables/itables for
1515 // shared classes at runtime, where constraints were previously created.
1516 guarantee(SystemDictionary::constraints()->number_of_entries() == 0,
1517 "loader constraints are not saved");
1518 guarantee(SystemDictionary::placeholders()->number_of_entries() == 0,
1519 "placeholders are not saved");
1520
1521 // At this point, many classes have been loaded.
1522 // Gather systemDictionary classes in a global array and do everything to
1523 // that so we don't have to walk the SystemDictionary again.
1524 SystemDictionaryShared::check_excluded_classes();
1525 _global_klass_objects = new GrowableArray<Klass*>(1000);
1526 CollectClassesClosure collect_classes;
1527 ClassLoaderDataGraph::loaded_classes_do(&collect_classes);
1528
1529 print_class_stats();
1530
1531 // Ensure the ConstMethods won't be modified at run-time
1532 log_info(cds)("Updating ConstMethods ... ");
1533 rewrite_nofast_bytecodes_and_calculate_fingerprints(THREAD);
1534 log_info(cds)("done. ");
1535
1536 // Remove all references outside the metadata
1537 log_info(cds)("Removing unshareable information ... ");
1538 remove_unshareable_in_classes();
1539 log_info(cds)("done. ");
1540
1541 MetaspaceShared::allocate_cloned_cpp_vtptrs();
1542 char* cloned_vtables = _mc_region.top();
1543 MetaspaceShared::allocate_cpp_vtable_clones();
1544
1545 ArchiveCompactor::initialize();
1546 ArchiveCompactor::copy_and_compact();
1547
1548 dump_symbols();
1549
1550 // Dump supported java heap objects
1551 _closed_archive_heap_regions = NULL;
1552 _open_archive_heap_regions = NULL;
1553 dump_java_heap_objects();
1554
1555 ArchiveCompactor::relocate_well_known_klasses();
1556
1557 char* serialized_data = dump_read_only_tables();
1558 _ro_region.pack();
1559
1560 // The vtable clones contain addresses of the current process.
1561 // We don't want to write these addresses into the archive.
1562 MetaspaceShared::zero_cpp_vtable_clones_for_writing();
1563
1564 // relocate the data so that it can be mapped to Arguments::default_SharedBaseAddress()
1565 // without runtime relocation.
1566 relocate_to_default_base_address(&ptrmap);
1567
1568 // Create and write the archive file that maps the shared spaces.
1569
1570 FileMapInfo* mapinfo = new FileMapInfo(true);
1571 mapinfo->populate_header(os::vm_allocation_granularity());
1572 mapinfo->set_serialized_data(serialized_data);
1573 mapinfo->set_cloned_vtables(cloned_vtables);
1574 mapinfo->set_i2i_entry_code_buffers(MetaspaceShared::i2i_entry_code_buffers(),
1575 MetaspaceShared::i2i_entry_code_buffers_size());
1576 mapinfo->open_for_write();
1577 MetaspaceShared::write_core_archive_regions(mapinfo, _closed_archive_heap_oopmaps, _open_archive_heap_oopmaps);
1578 _total_closed_archive_region_size = mapinfo->write_archive_heap_regions(
1579 _closed_archive_heap_regions,
1580 _closed_archive_heap_oopmaps,
1581 MetaspaceShared::first_closed_archive_heap_region,
1582 MetaspaceShared::max_closed_archive_heap_region);
1614
1615 void VM_PopulateDumpSharedSpace::print_region_stats(FileMapInfo *map_info) {
1616 // Print statistics of all the regions
1617 const size_t bitmap_used = map_info->space_at(MetaspaceShared::bm)->used();
1618 const size_t bitmap_reserved = map_info->space_at(MetaspaceShared::bm)->used_aligned();
1619 const size_t total_reserved = _ro_region.reserved() + _rw_region.reserved() +
1620 _mc_region.reserved() +
1621 bitmap_reserved +
1622 _total_closed_archive_region_size +
1623 _total_open_archive_region_size;
1624 const size_t total_bytes = _ro_region.used() + _rw_region.used() +
1625 _mc_region.used() +
1626 bitmap_used +
1627 _total_closed_archive_region_size +
1628 _total_open_archive_region_size;
1629 const double total_u_perc = percent_of(total_bytes, total_reserved);
1630
1631 _mc_region.print(total_reserved);
1632 _rw_region.print(total_reserved);
1633 _ro_region.print(total_reserved);
1634 print_bitmap_region_stats(bitmap_reserved, total_reserved);
1635 print_heap_region_stats(_closed_archive_heap_regions, "ca", total_reserved);
1636 print_heap_region_stats(_open_archive_heap_regions, "oa", total_reserved);
1637
1638 log_debug(cds)("total : " SIZE_FORMAT_W(9) " [100.0%% of total] out of " SIZE_FORMAT_W(9) " bytes [%5.1f%% used]",
1639 total_bytes, total_reserved, total_u_perc);
1640 }
1641
1642 void VM_PopulateDumpSharedSpace::print_bitmap_region_stats(size_t size, size_t total_size) {
1643 log_debug(cds)("bm space: " SIZE_FORMAT_W(9) " [ %4.1f%% of total] out of " SIZE_FORMAT_W(9) " bytes [100.0%% used] at " INTPTR_FORMAT,
1644 size, size/double(total_size)*100.0, size, p2i(NULL));
1645 }
1646
1647 void VM_PopulateDumpSharedSpace::print_heap_region_stats(GrowableArray<MemRegion> *heap_mem,
1648 const char *name, size_t total_size) {
1649 int arr_len = heap_mem == NULL ? 0 : heap_mem->length();
1650 for (int i = 0; i < arr_len; i++) {
1651 char* start = (char*)heap_mem->at(i).start();
1652 size_t size = heap_mem->at(i).byte_size();
1653 char* top = start + size;
1654 log_debug(cds)("%s%d space: " SIZE_FORMAT_W(9) " [ %4.1f%% of total] out of " SIZE_FORMAT_W(9) " bytes [100.0%% used] at " INTPTR_FORMAT,
1655 name, i, size, size/double(total_size)*100.0, size, p2i(start));
1656
1657 }
1658 }
1659
1660 void MetaspaceShared::write_core_archive_regions(FileMapInfo* mapinfo,
1661 GrowableArray<ArchiveHeapOopmapInfo>* closed_oopmaps,
1662 GrowableArray<ArchiveHeapOopmapInfo>* open_oopmaps) {
1663 // Make sure NUM_CDS_REGIONS (exported in cds.h) agrees with
1664 // MetaspaceShared::n_regions (internal to hotspot).
|
272
273 // First try to reserve the space at the specified SharedBaseAddress.
274 assert(!_shared_rs.is_reserved(), "must be");
275 if (use_requested_base) {
276 _shared_rs = reserve_shared_space(cds_total, shared_base);
277 }
278 if (_shared_rs.is_reserved()) {
279 assert(shared_base == 0 || _shared_rs.base() == shared_base, "should match");
280 } else {
281 // Get a mmap region anywhere if the SharedBaseAddress fails.
282 _shared_rs = reserve_shared_space(cds_total);
283 }
284 if (!_shared_rs.is_reserved()) {
285 vm_exit_during_initialization("Unable to reserve memory for shared space",
286 err_msg(SIZE_FORMAT " bytes.", cds_total));
287 }
288
289 #ifdef _LP64
290 // During dump time, we allocate 4GB (UnscaledClassSpaceMax) of space and split it up:
291 // + The upper 1 GB is used as the "temporary compressed class space" -- preload_classes()
292 // will store Klasses into this space. Symbols are also stored here (instead of malloc'ed)
293 // so that they are always in a predictable order, which means -Xshare:dump will generate
294 // an archive with deterministic content.
295 // + The lower 3 GB is used for the archive -- when preload_classes() is done,
296 // ArchiveCompactor will copy the class metadata into this space, first the RW parts,
297 // then the RO parts.
298
299 size_t max_archive_size = align_down(cds_total * 3 / 4, reserve_alignment);
300 ReservedSpace tmp_class_space = _shared_rs.last_part(max_archive_size);
301 CompressedClassSpaceSize = align_down(tmp_class_space.size(), reserve_alignment);
302 _shared_rs = _shared_rs.first_part(max_archive_size);
303
304 if (UseCompressedClassPointers) {
305 // Set up compress class pointers.
306 CompressedKlassPointers::set_base((address)_shared_rs.base());
307 // Set narrow_klass_shift to be LogKlassAlignmentInBytes. This is consistent
308 // with AOT.
309 CompressedKlassPointers::set_shift(LogKlassAlignmentInBytes);
310 // Set the range of klass addresses to 4GB.
311 CompressedKlassPointers::set_range(cds_total);
312 Metaspace::initialize_class_space(tmp_class_space);
313 }
314 log_info(cds)("narrow_klass_base = " PTR_FORMAT ", narrow_klass_shift = %d",
491 assert(_i2i_entry_code_buffers_size == total_size, "must not change");
492 return _i2i_entry_code_buffers;
493 }
494
495 uintx MetaspaceShared::object_delta_uintx(void* obj) {
496 Arguments::assert_is_dumping_archive();
497 if (DumpSharedSpaces) {
498 assert(shared_rs()->contains(obj), "must be");
499 } else {
500 assert(is_in_shared_metaspace(obj) || DynamicArchive::is_in_target_space(obj), "must be");
501 }
502 address base_address = address(SharedBaseAddress);
503 uintx deltax = address(obj) - base_address;
504 return deltax;
505 }
506
507 // Global object for holding classes that have been loaded. Since this
508 // is run at a safepoint just before exit, this is the entire set of classes.
509 static GrowableArray<Klass*>* _global_klass_objects;
510
511 static int global_klass_compare(Klass** a, Klass **b) {
512 return a[0]->name()->fast_compare(b[0]->name());
513 }
514
515 GrowableArray<Klass*>* MetaspaceShared::collected_klasses() {
516 return _global_klass_objects;
517 }
518
519 static void collect_array_classes(Klass* k) {
520 _global_klass_objects->append_if_missing(k);
521 if (k->is_array_klass()) {
522 // Add in the array classes too
523 ArrayKlass* ak = ArrayKlass::cast(k);
524 Klass* h = ak->higher_dimension();
525 if (h != NULL) {
526 h->array_klasses_do(collect_array_classes);
527 }
528 }
529 }
530
531 class CollectClassesClosure : public KlassClosure {
532 void do_klass(Klass* k) {
533 if (k->is_instance_klass() &&
534 SystemDictionaryShared::is_excluded_class(InstanceKlass::cast(k))) {
1514
1515 FileMapInfo::check_nonempty_dir_in_shared_path_table();
1516
1517 NOT_PRODUCT(SystemDictionary::verify();)
1518 // The following guarantee is meant to ensure that no loader constraints
1519 // exist yet, since the constraints table is not shared. This becomes
1520 // more important now that we don't re-initialize vtables/itables for
1521 // shared classes at runtime, where constraints were previously created.
1522 guarantee(SystemDictionary::constraints()->number_of_entries() == 0,
1523 "loader constraints are not saved");
1524 guarantee(SystemDictionary::placeholders()->number_of_entries() == 0,
1525 "placeholders are not saved");
1526
1527 // At this point, many classes have been loaded.
1528 // Gather systemDictionary classes in a global array and do everything to
1529 // that so we don't have to walk the SystemDictionary again.
1530 SystemDictionaryShared::check_excluded_classes();
1531 _global_klass_objects = new GrowableArray<Klass*>(1000);
1532 CollectClassesClosure collect_classes;
1533 ClassLoaderDataGraph::loaded_classes_do(&collect_classes);
1534 _global_klass_objects->sort(global_klass_compare);
1535
1536 print_class_stats();
1537
1538 // Ensure the ConstMethods won't be modified at run-time
1539 log_info(cds)("Updating ConstMethods ... ");
1540 rewrite_nofast_bytecodes_and_calculate_fingerprints(THREAD);
1541 log_info(cds)("done. ");
1542
1543 // Remove all references outside the metadata
1544 log_info(cds)("Removing unshareable information ... ");
1545 remove_unshareable_in_classes();
1546 log_info(cds)("done. ");
1547
1548 MetaspaceShared::allocate_cloned_cpp_vtptrs();
1549 char* cloned_vtables = _mc_region.top();
1550 MetaspaceShared::allocate_cpp_vtable_clones();
1551
1552 ArchiveCompactor::initialize();
1553 ArchiveCompactor::copy_and_compact();
1554
1555 dump_symbols();
1556
1557 // Dump supported java heap objects
1558 _closed_archive_heap_regions = NULL;
1559 _open_archive_heap_regions = NULL;
1560 dump_java_heap_objects();
1561
1562 ArchiveCompactor::relocate_well_known_klasses();
1563
1564 char* serialized_data = dump_read_only_tables();
1565 _ro_region.pack();
1566
1567 // The vtable clones contain addresses of the current process.
1568 // We don't want to write these addresses into the archive. Same for i2i buffer.
1569 MetaspaceShared::zero_cpp_vtable_clones_for_writing();
1570 memset(MetaspaceShared::i2i_entry_code_buffers(), 0,
1571 MetaspaceShared::i2i_entry_code_buffers_size());
1572
1573 // relocate the data so that it can be mapped to Arguments::default_SharedBaseAddress()
1574 // without runtime relocation.
1575 relocate_to_default_base_address(&ptrmap);
1576
1577 // Create and write the archive file that maps the shared spaces.
1578
1579 FileMapInfo* mapinfo = new FileMapInfo(true);
1580 mapinfo->populate_header(os::vm_allocation_granularity());
1581 mapinfo->set_serialized_data(serialized_data);
1582 mapinfo->set_cloned_vtables(cloned_vtables);
1583 mapinfo->set_i2i_entry_code_buffers(MetaspaceShared::i2i_entry_code_buffers(),
1584 MetaspaceShared::i2i_entry_code_buffers_size());
1585 mapinfo->open_for_write();
1586 MetaspaceShared::write_core_archive_regions(mapinfo, _closed_archive_heap_oopmaps, _open_archive_heap_oopmaps);
1587 _total_closed_archive_region_size = mapinfo->write_archive_heap_regions(
1588 _closed_archive_heap_regions,
1589 _closed_archive_heap_oopmaps,
1590 MetaspaceShared::first_closed_archive_heap_region,
1591 MetaspaceShared::max_closed_archive_heap_region);
1623
1624 void VM_PopulateDumpSharedSpace::print_region_stats(FileMapInfo *map_info) {
1625 // Print statistics of all the regions
1626 const size_t bitmap_used = map_info->space_at(MetaspaceShared::bm)->used();
1627 const size_t bitmap_reserved = map_info->space_at(MetaspaceShared::bm)->used_aligned();
1628 const size_t total_reserved = _ro_region.reserved() + _rw_region.reserved() +
1629 _mc_region.reserved() +
1630 bitmap_reserved +
1631 _total_closed_archive_region_size +
1632 _total_open_archive_region_size;
1633 const size_t total_bytes = _ro_region.used() + _rw_region.used() +
1634 _mc_region.used() +
1635 bitmap_used +
1636 _total_closed_archive_region_size +
1637 _total_open_archive_region_size;
1638 const double total_u_perc = percent_of(total_bytes, total_reserved);
1639
1640 _mc_region.print(total_reserved);
1641 _rw_region.print(total_reserved);
1642 _ro_region.print(total_reserved);
1643 print_bitmap_region_stats(bitmap_used, total_reserved);
1644 print_heap_region_stats(_closed_archive_heap_regions, "ca", total_reserved);
1645 print_heap_region_stats(_open_archive_heap_regions, "oa", total_reserved);
1646
1647 log_debug(cds)("total : " SIZE_FORMAT_W(9) " [100.0%% of total] out of " SIZE_FORMAT_W(9) " bytes [%5.1f%% used]",
1648 total_bytes, total_reserved, total_u_perc);
1649 }
1650
1651 void VM_PopulateDumpSharedSpace::print_bitmap_region_stats(size_t size, size_t total_size) {
1652 log_debug(cds)("bm space: " SIZE_FORMAT_W(9) " [ %4.1f%% of total] out of " SIZE_FORMAT_W(9) " bytes [100.0%% used]",
1653 size, size/double(total_size)*100.0, size);
1654 }
1655
1656 void VM_PopulateDumpSharedSpace::print_heap_region_stats(GrowableArray<MemRegion> *heap_mem,
1657 const char *name, size_t total_size) {
1658 int arr_len = heap_mem == NULL ? 0 : heap_mem->length();
1659 for (int i = 0; i < arr_len; i++) {
1660 char* start = (char*)heap_mem->at(i).start();
1661 size_t size = heap_mem->at(i).byte_size();
1662 char* top = start + size;
1663 log_debug(cds)("%s%d space: " SIZE_FORMAT_W(9) " [ %4.1f%% of total] out of " SIZE_FORMAT_W(9) " bytes [100.0%% used] at " INTPTR_FORMAT,
1664 name, i, size, size/double(total_size)*100.0, size, p2i(start));
1665
1666 }
1667 }
1668
1669 void MetaspaceShared::write_core_archive_regions(FileMapInfo* mapinfo,
1670 GrowableArray<ArchiveHeapOopmapInfo>* closed_oopmaps,
1671 GrowableArray<ArchiveHeapOopmapInfo>* open_oopmaps) {
1672 // Make sure NUM_CDS_REGIONS (exported in cds.h) agrees with
1673 // MetaspaceShared::n_regions (internal to hotspot).
|