< prev index next >
src/hotspot/share/memory/metaspaceShared.cpp
Print this page
@@ -287,11 +287,13 @@
}
#ifdef _LP64
// During dump time, we allocate 4GB (UnscaledClassSpaceMax) of space and split it up:
// + The upper 1 GB is used as the "temporary compressed class space" -- preload_classes()
- // will store Klasses into this space.
+ // will store Klasses into this space. Symbols are also stored here (instead of malloc'ed)
+ // so that they are always in a predictable order, which means -Xshare:dump will generate
+ // an archive with deterministic content.
// + The lower 3 GB is used for the archive -- when preload_classes() is done,
// ArchiveCompactor will copy the class metadata into this space, first the RW parts,
// then the RO parts.
size_t max_archive_size = align_down(cds_total * 3 / 4, reserve_alignment);
@@ -504,10 +506,14 @@
// Global object for holding classes that have been loaded. Since this
// is run at a safepoint just before exit, this is the entire set of classes.
static GrowableArray<Klass*>* _global_klass_objects;
+static int global_klass_compare(Klass** a, Klass **b) {
+ return a[0]->name()->fast_compare(b[0]->name());
+}
+
GrowableArray<Klass*>* MetaspaceShared::collected_klasses() {
return _global_klass_objects;
}
static void collect_array_classes(Klass* k) {
@@ -1523,10 +1529,11 @@
// that so we don't have to walk the SystemDictionary again.
SystemDictionaryShared::check_excluded_classes();
_global_klass_objects = new GrowableArray<Klass*>(1000);
CollectClassesClosure collect_classes;
ClassLoaderDataGraph::loaded_classes_do(&collect_classes);
+ _global_klass_objects->sort(global_klass_compare);
print_class_stats();
// Ensure the ConstMethods won't be modified at run-time
log_info(cds)("Updating ConstMethods ... ");
@@ -1556,12 +1563,14 @@
char* serialized_data = dump_read_only_tables();
_ro_region.pack();
// The vtable clones contain addresses of the current process.
- // We don't want to write these addresses into the archive.
+ // We don't want to write these addresses into the archive. Same for i2i buffer.
MetaspaceShared::zero_cpp_vtable_clones_for_writing();
+ memset(MetaspaceShared::i2i_entry_code_buffers(), 0,
+ MetaspaceShared::i2i_entry_code_buffers_size());
// relocate the data so that it can be mapped to Arguments::default_SharedBaseAddress()
// without runtime relocation.
relocate_to_default_base_address(&ptrmap);
@@ -1629,21 +1638,21 @@
const double total_u_perc = percent_of(total_bytes, total_reserved);
_mc_region.print(total_reserved);
_rw_region.print(total_reserved);
_ro_region.print(total_reserved);
- print_bitmap_region_stats(bitmap_reserved, total_reserved);
+ print_bitmap_region_stats(bitmap_used, total_reserved);
print_heap_region_stats(_closed_archive_heap_regions, "ca", total_reserved);
print_heap_region_stats(_open_archive_heap_regions, "oa", total_reserved);
log_debug(cds)("total : " SIZE_FORMAT_W(9) " [100.0%% of total] out of " SIZE_FORMAT_W(9) " bytes [%5.1f%% used]",
total_bytes, total_reserved, total_u_perc);
}
void VM_PopulateDumpSharedSpace::print_bitmap_region_stats(size_t size, size_t total_size) {
- log_debug(cds)("bm space: " SIZE_FORMAT_W(9) " [ %4.1f%% of total] out of " SIZE_FORMAT_W(9) " bytes [100.0%% used] at " INTPTR_FORMAT,
- size, size/double(total_size)*100.0, size, p2i(NULL));
+ log_debug(cds)("bm space: " SIZE_FORMAT_W(9) " [ %4.1f%% of total] out of " SIZE_FORMAT_W(9) " bytes [100.0%% used]",
+ size, size/double(total_size)*100.0, size);
}
void VM_PopulateDumpSharedSpace::print_heap_region_stats(GrowableArray<MemRegion> *heap_mem,
const char *name, size_t total_size) {
int arr_len = heap_mem == NULL ? 0 : heap_mem->length();
< prev index next >