< prev index next >

src/hotspot/share/memory/metaspaceShared.cpp

Print this page

*** 21,48 **** * questions. * */ #include "precompiled.hpp" - #include "jvm.h" #include "classfile/classLoaderDataGraph.hpp" #include "classfile/classListParser.hpp" #include "classfile/classLoaderExt.hpp" - #include "classfile/dictionary.hpp" #include "classfile/loaderConstraints.hpp" #include "classfile/javaClasses.inline.hpp" #include "classfile/placeholders.hpp" #include "classfile/symbolTable.hpp" #include "classfile/stringTable.hpp" #include "classfile/systemDictionary.hpp" #include "classfile/systemDictionaryShared.hpp" #include "code/codeCache.hpp" - #include "gc/shared/softRefPolicy.hpp" #include "interpreter/bytecodeStream.hpp" #include "interpreter/bytecodes.hpp" #include "logging/log.hpp" #include "logging/logMessage.hpp" #include "memory/archiveUtils.inline.hpp" #include "memory/dynamicArchive.hpp" #include "memory/filemap.hpp" #include "memory/heapShared.inline.hpp" #include "memory/metaspace.hpp" --- 21,46 ---- * questions. * */ #include "precompiled.hpp" #include "classfile/classLoaderDataGraph.hpp" #include "classfile/classListParser.hpp" #include "classfile/classLoaderExt.hpp" #include "classfile/loaderConstraints.hpp" #include "classfile/javaClasses.inline.hpp" #include "classfile/placeholders.hpp" #include "classfile/symbolTable.hpp" #include "classfile/stringTable.hpp" #include "classfile/systemDictionary.hpp" #include "classfile/systemDictionaryShared.hpp" #include "code/codeCache.hpp" #include "interpreter/bytecodeStream.hpp" #include "interpreter/bytecodes.hpp" #include "logging/log.hpp" #include "logging/logMessage.hpp" + #include "memory/archiveBuilder.hpp" #include "memory/archiveUtils.inline.hpp" #include "memory/dynamicArchive.hpp" #include "memory/filemap.hpp" #include "memory/heapShared.inline.hpp" #include "memory/metaspace.hpp"
*** 57,71 **** #include "oops/methodData.hpp" #include "oops/objArrayKlass.hpp" #include "oops/objArrayOop.hpp" #include "oops/oop.inline.hpp" #include "oops/typeArrayKlass.hpp" - #include "prims/jvmtiRedefineClasses.hpp" #include "runtime/handles.inline.hpp" #include "runtime/os.hpp" #include "runtime/safepointVerifiers.hpp" - #include "runtime/signature.hpp" #include "runtime/timerTrace.hpp" #include "runtime/vmThread.hpp" #include "runtime/vmOperations.hpp" #include "utilities/align.hpp" #include "utilities/bitMap.inline.hpp" --- 55,67 ----
*** 622,697 **** // Global object for holding classes that have been loaded. Since this // is run at a safepoint just before exit, this is the entire set of classes. static GrowableArray<Klass*>* _global_klass_objects; - static int global_klass_compare(Klass** a, Klass **b) { - return a[0]->name()->fast_compare(b[0]->name()); - } - GrowableArray<Klass*>* MetaspaceShared::collected_klasses() { return _global_klass_objects; } - static void collect_array_classes(Klass* k) { - _global_klass_objects->append_if_missing(k); - if (k->is_array_klass()) { - // Add in the array classes too - ArrayKlass* ak = ArrayKlass::cast(k); - Klass* h = ak->higher_dimension(); - if (h != NULL) { - h->array_klasses_do(collect_array_classes); - } - } - } - - class CollectClassesClosure : public KlassClosure { - void do_klass(Klass* k) { - if (k->is_instance_klass() && - SystemDictionaryShared::is_excluded_class(InstanceKlass::cast(k))) { - // Don't add to the _global_klass_objects - } else { - _global_klass_objects->append_if_missing(k); - } - if (k->is_array_klass()) { - // Add in the array classes too - ArrayKlass* ak = ArrayKlass::cast(k); - Klass* h = ak->higher_dimension(); - if (h != NULL) { - h->array_klasses_do(collect_array_classes); - } - } - } - }; - - // Global object for holding symbols that created during class loading. See SymbolTable::new_symbol - static GrowableArray<Symbol*>* _global_symbol_objects = NULL; - - static int compare_symbols_by_address(Symbol** a, Symbol** b) { - if (a[0] < b[0]) { - return -1; - } else if (a[0] == b[0]) { - ResourceMark rm; - log_warning(cds)("Duplicated symbol %s unexpected", (*a)->as_C_string()); - return 0; - } else { - return 1; - } - } - - void MetaspaceShared::add_symbol(Symbol* sym) { - MutexLocker ml(CDSAddSymbol_lock, Mutex::_no_safepoint_check_flag); - if (_global_symbol_objects == NULL) { - _global_symbol_objects = new (ResourceObj::C_HEAP, mtSymbol) GrowableArray<Symbol*>(2048, mtSymbol); - } - _global_symbol_objects->append(sym); - } - - GrowableArray<Symbol*>* MetaspaceShared::collected_symbols() { - return _global_symbol_objects; - } - static void remove_unshareable_in_classes() { for (int i = 0; i < _global_klass_objects->length(); i++) { Klass* k = _global_klass_objects->at(i); if (!k->is_objArray_klass()) { // InstanceKlass and TypeArrayKlass will in turn call remove_unshareable_info --- 618,631 ----
*** 1069,1220 **** start += sizeof(intptr_t); size -= sizeof(intptr_t); } } - // This is for dumping detailed statistics for the allocations - // in the shared spaces. - class DumpAllocStats : public ResourceObj { - public: - - // Here's poor man's enum inheritance - #define SHAREDSPACE_OBJ_TYPES_DO(f) \ - METASPACE_OBJ_TYPES_DO(f) \ - f(SymbolHashentry) \ - f(SymbolBucket) \ - f(StringHashentry) \ - f(StringBucket) \ - f(Other) - - enum Type { - // Types are MetaspaceObj::ClassType, MetaspaceObj::SymbolType, etc - SHAREDSPACE_OBJ_TYPES_DO(METASPACE_OBJ_TYPE_DECLARE) - _number_of_types - }; - - static const char * type_name(Type type) { - switch(type) { - SHAREDSPACE_OBJ_TYPES_DO(METASPACE_OBJ_TYPE_NAME_CASE) - default: - ShouldNotReachHere(); - return NULL; - } - } - - public: - enum { RO = 0, RW = 1 }; - - int _counts[2][_number_of_types]; - int _bytes [2][_number_of_types]; - - DumpAllocStats() { - memset(_counts, 0, sizeof(_counts)); - memset(_bytes, 0, sizeof(_bytes)); - }; - - void record(MetaspaceObj::Type type, int byte_size, bool read_only) { - assert(int(type) >= 0 && type < MetaspaceObj::_number_of_types, "sanity"); - int which = (read_only) ? RO : RW; - _counts[which][type] ++; - _bytes [which][type] += byte_size; - } - - void record_other_type(int byte_size, bool read_only) { - int which = (read_only) ? RO : RW; - _bytes [which][OtherType] += byte_size; - } - void print_stats(int ro_all, int rw_all, int mc_all); - }; - - void DumpAllocStats::print_stats(int ro_all, int rw_all, int mc_all) { - // Calculate size of data that was not allocated by Metaspace::allocate() - MetaspaceSharedStats *stats = MetaspaceShared::stats(); - - // symbols - _counts[RO][SymbolHashentryType] = stats->symbol.hashentry_count; - _bytes [RO][SymbolHashentryType] = stats->symbol.hashentry_bytes; - - _counts[RO][SymbolBucketType] = stats->symbol.bucket_count; - _bytes [RO][SymbolBucketType] = stats->symbol.bucket_bytes; - - // strings - _counts[RO][StringHashentryType] = stats->string.hashentry_count; - _bytes [RO][StringHashentryType] = stats->string.hashentry_bytes; - - _counts[RO][StringBucketType] = stats->string.bucket_count; - _bytes [RO][StringBucketType] = stats->string.bucket_bytes; - - // TODO: count things like dictionary, vtable, etc - _bytes[RW][OtherType] += mc_all; - rw_all += mc_all; // mc is mapped Read/Write - - // prevent divide-by-zero - if (ro_all < 1) { - ro_all = 1; - } - if (rw_all < 1) { - rw_all = 1; - } - - int all_ro_count = 0; - int all_ro_bytes = 0; - int all_rw_count = 0; - int all_rw_bytes = 0; - - // To make fmt_stats be a syntactic constant (for format warnings), use #define. - #define fmt_stats "%-20s: %8d %10d %5.1f | %8d %10d %5.1f | %8d %10d %5.1f" - const char *sep = "--------------------+---------------------------+---------------------------+--------------------------"; - const char *hdr = " ro_cnt ro_bytes % | rw_cnt rw_bytes % | all_cnt all_bytes %"; - - LogMessage(cds) msg; - - msg.debug("Detailed metadata info (excluding st regions; rw stats include mc regions):"); - msg.debug("%s", hdr); - msg.debug("%s", sep); - for (int type = 0; type < int(_number_of_types); type ++) { - const char *name = type_name((Type)type); - int ro_count = _counts[RO][type]; - int ro_bytes = _bytes [RO][type]; - int rw_count = _counts[RW][type]; - int rw_bytes = _bytes [RW][type]; - int count = ro_count + rw_count; - int bytes = ro_bytes + rw_bytes; - - double ro_perc = percent_of(ro_bytes, ro_all); - double rw_perc = percent_of(rw_bytes, rw_all); - double perc = percent_of(bytes, ro_all + rw_all); - - msg.debug(fmt_stats, name, - ro_count, ro_bytes, ro_perc, - rw_count, rw_bytes, rw_perc, - count, bytes, perc); - - all_ro_count += ro_count; - all_ro_bytes += ro_bytes; - all_rw_count += rw_count; - all_rw_bytes += rw_bytes; - } - - int all_count = all_ro_count + all_rw_count; - int all_bytes = all_ro_bytes + all_rw_bytes; - - double all_ro_perc = percent_of(all_ro_bytes, ro_all); - double all_rw_perc = percent_of(all_rw_bytes, rw_all); - double all_perc = percent_of(all_bytes, ro_all + rw_all); - - msg.debug("%s", sep); - msg.debug(fmt_stats, "Total", - all_ro_count, all_ro_bytes, all_ro_perc, - all_rw_count, all_rw_bytes, all_rw_perc, - all_count, all_bytes, all_perc); - - assert(all_ro_bytes == ro_all, "everything should have been counted"); - assert(all_rw_bytes == rw_all, "everything should have been counted"); - - #undef fmt_stats - } - // Populate the shared space. class VM_PopulateDumpSharedSpace: public VM_Operation { private: GrowableArray<MemRegion> *_closed_archive_heap_regions; --- 1003,1012 ----
*** 1227,1237 **** void dump_archive_heap_oopmaps() NOT_CDS_JAVA_HEAP_RETURN; void dump_archive_heap_oopmaps(GrowableArray<MemRegion>* regions, GrowableArray<ArchiveHeapOopmapInfo>* oopmaps); void dump_symbols(); char* dump_read_only_tables(); - void print_class_stats(); void print_region_stats(FileMapInfo* map_info); void print_bitmap_region_stats(size_t size, size_t total_size); void print_heap_region_stats(GrowableArray<MemRegion> *heap_mem, const char *name, size_t total_size); void relocate_to_requested_base_address(CHeapBitMap* ptrmap); --- 1019,1028 ----
*** 1241,1531 **** VMOp_Type type() const { return VMOp_PopulateDumpSharedSpace; } void doit(); // outline because gdb sucks bool allow_nested_vm_operations() const { return true; } }; // class VM_PopulateDumpSharedSpace ! // ArchiveCompactor -- ! // ! // This class is the central piece of shared archive compaction -- all metaspace data are ! // initially allocated outside of the shared regions. ArchiveCompactor copies the ! // metaspace data into their final location in the shared regions. ! ! class ArchiveCompactor : AllStatic { ! static const int INITIAL_TABLE_SIZE = 8087; ! static const int MAX_TABLE_SIZE = 1000000; ! ! static DumpAllocStats* _alloc_stats; ! ! typedef KVHashtable<address, address, mtInternal> RelocationTable; ! static RelocationTable* _new_loc_table; ! public: ! static void initialize() { ! _alloc_stats = new(ResourceObj::C_HEAP, mtInternal)DumpAllocStats; ! _new_loc_table = new RelocationTable(INITIAL_TABLE_SIZE); ! } ! static DumpAllocStats* alloc_stats() { ! return _alloc_stats; ! } ! ! // Use this when you allocate space with MetaspaceShare::read_only_space_alloc() ! // outside of ArchiveCompactor::allocate(). These are usually for misc tables ! // that are allocated in the RO space. ! class OtherROAllocMark { ! char* _oldtop; ! public: ! OtherROAllocMark() { ! _oldtop = _ro_region.top(); ! } ! ~OtherROAllocMark() { ! char* newtop = _ro_region.top(); ! ArchiveCompactor::alloc_stats()->record_other_type(int(newtop - _oldtop), true); ! } ! }; ! ! static void allocate(MetaspaceClosure::Ref* ref, bool read_only) { ! address obj = ref->obj(); ! int bytes = ref->size() * BytesPerWord; ! char* p; ! size_t alignment = BytesPerWord; ! char* oldtop; ! char* newtop; ! ! if (read_only) { ! oldtop = _ro_region.top(); ! p = _ro_region.allocate(bytes, alignment); ! newtop = _ro_region.top(); ! } else { ! oldtop = _rw_region.top(); ! if (ref->msotype() == MetaspaceObj::ClassType) { ! // Save a pointer immediate in front of an InstanceKlass, so ! // we can do a quick lookup from InstanceKlass* -> RunTimeSharedClassInfo* ! // without building another hashtable. See RunTimeSharedClassInfo::get_for() ! // in systemDictionaryShared.cpp. ! Klass* klass = (Klass*)obj; ! if (klass->is_instance_klass()) { ! SystemDictionaryShared::validate_before_archiving(InstanceKlass::cast(klass)); ! _rw_region.allocate(sizeof(address), BytesPerWord); ! } ! } ! p = _rw_region.allocate(bytes, alignment); ! newtop = _rw_region.top(); ! } ! memcpy(p, obj, bytes); ! intptr_t* archived_vtable = MetaspaceShared::get_archived_cpp_vtable(ref->msotype(), (address)p); ! if (archived_vtable != NULL) { ! *(address*)p = (address)archived_vtable; ! ArchivePtrMarker::mark_pointer((address*)p); ! } ! ! assert(_new_loc_table->lookup(obj) == NULL, "each object can be relocated at most once"); ! _new_loc_table->add(obj, (address)p); ! log_trace(cds)("Copy: " PTR_FORMAT " ==> " PTR_FORMAT " %d", p2i(obj), p2i(p), bytes); ! if (_new_loc_table->maybe_grow(MAX_TABLE_SIZE)) { ! log_info(cds, hashtables)("Expanded _new_loc_table to %d", _new_loc_table->table_size()); ! } ! _alloc_stats->record(ref->msotype(), int(newtop - oldtop), read_only); ! } ! ! static address get_new_loc(MetaspaceClosure::Ref* ref) { ! address* pp = _new_loc_table->lookup(ref->obj()); ! assert(pp != NULL, "must be"); ! return *pp; ! } ! ! private: ! // Makes a shallow copy of visited MetaspaceObj's ! class ShallowCopier: public UniqueMetaspaceClosure { ! bool _read_only; ! public: ! ShallowCopier(bool read_only) : _read_only(read_only) {} ! ! virtual bool do_unique_ref(Ref* ref, bool read_only) { ! if (read_only == _read_only) { ! allocate(ref, read_only); ! } ! return true; // recurse into ref.obj() ! } ! }; ! ! // Relocate embedded pointers within a MetaspaceObj's shallow copy ! class ShallowCopyEmbeddedRefRelocator: public UniqueMetaspaceClosure { ! public: ! virtual bool do_unique_ref(Ref* ref, bool read_only) { ! address new_loc = get_new_loc(ref); ! RefRelocator refer; ! ref->metaspace_pointers_do_at(&refer, new_loc); ! return true; // recurse into ref.obj() ! } ! virtual void push_special(SpecialRef type, Ref* ref, intptr_t* p) { ! assert(type == _method_entry_ref, "only special type allowed for now"); ! address obj = ref->obj(); ! address new_obj = get_new_loc(ref); ! size_t offset = pointer_delta(p, obj, sizeof(u1)); ! intptr_t* new_p = (intptr_t*)(new_obj + offset); ! assert(*p == *new_p, "must be a copy"); ! ArchivePtrMarker::mark_pointer((address*)new_p); ! } ! }; ! ! // Relocate a reference to point to its shallow copy ! class RefRelocator: public MetaspaceClosure { ! public: ! virtual bool do_ref(Ref* ref, bool read_only) { ! if (ref->not_null()) { ! ref->update(get_new_loc(ref)); ! ArchivePtrMarker::mark_pointer(ref->addr()); ! } ! return false; // Do not recurse. ! } ! }; ! ! #ifdef ASSERT ! class IsRefInArchiveChecker: public MetaspaceClosure { ! public: ! virtual bool do_ref(Ref* ref, bool read_only) { ! if (ref->not_null()) { ! char* obj = (char*)ref->obj(); ! assert(_ro_region.contains(obj) || _rw_region.contains(obj), ! "must be relocated to point to CDS archive"); ! } ! return false; // Do not recurse. ! } ! }; ! #endif ! ! public: ! static void copy_and_compact() { ! ResourceMark rm; ! ! log_info(cds)("Scanning all metaspace objects ... "); ! { ! // allocate and shallow-copy RW objects, immediately following the MC region ! log_info(cds)("Allocating RW objects ... "); ! _mc_region.pack(&_rw_region); ! ! ResourceMark rm; ! ShallowCopier rw_copier(false); ! iterate_roots(&rw_copier); ! } ! { ! // allocate and shallow-copy of RO object, immediately following the RW region ! log_info(cds)("Allocating RO objects ... "); ! _rw_region.pack(&_ro_region); ! ! ResourceMark rm; ! ShallowCopier ro_copier(true); ! iterate_roots(&ro_copier); ! } ! { ! log_info(cds)("Relocating embedded pointers ... "); ! ResourceMark rm; ! ShallowCopyEmbeddedRefRelocator emb_reloc; ! iterate_roots(&emb_reloc); ! } ! { ! log_info(cds)("Relocating external roots ... "); ! ResourceMark rm; ! RefRelocator ext_reloc; ! iterate_roots(&ext_reloc); ! } ! { ! log_info(cds)("Fixing symbol identity hash ... "); ! os::init_random(0x12345678); ! GrowableArray<Symbol*>* all_symbols = MetaspaceShared::collected_symbols(); ! all_symbols->sort(compare_symbols_by_address); ! for (int i = 0; i < all_symbols->length(); i++) { ! assert(all_symbols->at(i)->is_permanent(), "archived symbols must be permanent"); ! all_symbols->at(i)->update_identity_hash(); ! } ! } ! #ifdef ASSERT ! { ! log_info(cds)("Verifying external roots ... "); ! ResourceMark rm; ! IsRefInArchiveChecker checker; ! iterate_roots(&checker); ! } ! #endif ! } ! ! // We must relocate the System::_well_known_klasses only after we have copied the ! // java objects in during dump_java_heap_objects(): during the object copy, we operate on ! // old objects which assert that their klass is the original klass. ! static void relocate_well_known_klasses() { ! { ! log_info(cds)("Relocating SystemDictionary::_well_known_klasses[] ... "); ! ResourceMark rm; ! RefRelocator ext_reloc; ! SystemDictionary::well_known_klasses_do(&ext_reloc); ! } ! // NOTE: after this point, we shouldn't have any globals that can reach the old ! // objects. ! ! // We cannot use any of the objects in the heap anymore (except for the ! // shared strings) because their headers no longer point to valid Klasses. ! } ! ! static void iterate_roots(MetaspaceClosure* it) { ! // To ensure deterministic contents in the archive, we just need to ensure that ! // we iterate the MetsapceObjs in a deterministic order. It doesn't matter where ! // the MetsapceObjs are located originally, as they are copied sequentially into ! // the archive during the iteration. ! // ! // The only issue here is that the symbol table and the system directories may be ! // randomly ordered, so we copy the symbols and klasses into two arrays and sort ! // them deterministically. ! // ! // During -Xshare:dump, the order of Symbol creation is strictly determined by ! // the SharedClassListFile (class loading is done in a single thread and the JIT ! // is disabled). Also, Symbols are allocated in monotonically increasing addresses ! // (see Symbol::operator new(size_t, int)). So if we iterate the Symbols by ! // ascending address order, we ensure that all Symbols are copied into deterministic ! // locations in the archive. ! GrowableArray<Symbol*>* symbols = _global_symbol_objects; ! for (int i = 0; i < symbols->length(); i++) { ! it->push(symbols->adr_at(i)); ! } ! if (_global_klass_objects != NULL) { ! // Need to fix up the pointers ! for (int i = 0; i < _global_klass_objects->length(); i++) { ! // NOTE -- this requires that the vtable is NOT yet patched, or else we are hosed. ! it->push(_global_klass_objects->adr_at(i)); ! } ! } FileMapInfo::metaspace_pointers_do(it, false); SystemDictionaryShared::dumptime_classes_do(it); Universe::metaspace_pointers_do(it); SymbolTable::metaspace_pointers_do(it); vmSymbols::metaspace_pointers_do(it); - - it->finish(); - } - - static Klass* get_relocated_klass(Klass* orig_klass) { - assert(DumpSharedSpaces, "dump time only"); - address* pp = _new_loc_table->lookup((address)orig_klass); - assert(pp != NULL, "must be"); - Klass* klass = (Klass*)(*pp); - assert(klass->is_klass(), "must be"); - return klass; } }; - DumpAllocStats* ArchiveCompactor::_alloc_stats; - ArchiveCompactor::RelocationTable* ArchiveCompactor::_new_loc_table; - void VM_PopulateDumpSharedSpace::dump_symbols() { log_info(cds)("Dumping symbol table ..."); NOT_PRODUCT(SymbolTable::verify()); SymbolTable::write_to_archive(); } char* VM_PopulateDumpSharedSpace::dump_read_only_tables() { ! ArchiveCompactor::OtherROAllocMark mark; log_info(cds)("Removing java_mirror ... "); if (!HeapShared::is_heap_object_archiving_allowed()) { Universe::clear_basic_type_mirrors(); } --- 1032,1064 ---- VMOp_Type type() const { return VMOp_PopulateDumpSharedSpace; } void doit(); // outline because gdb sucks bool allow_nested_vm_operations() const { return true; } }; // class VM_PopulateDumpSharedSpace ! class StaticArchiveBuilder : public ArchiveBuilder { public: ! StaticArchiveBuilder(DumpRegion* rw_region, DumpRegion* ro_region) ! : ArchiveBuilder(rw_region, ro_region) {} ! virtual void iterate_roots(MetaspaceClosure* it, bool is_relocating_pointers) { FileMapInfo::metaspace_pointers_do(it, false); SystemDictionaryShared::dumptime_classes_do(it); Universe::metaspace_pointers_do(it); SymbolTable::metaspace_pointers_do(it); vmSymbols::metaspace_pointers_do(it); } }; void VM_PopulateDumpSharedSpace::dump_symbols() { log_info(cds)("Dumping symbol table ..."); NOT_PRODUCT(SymbolTable::verify()); SymbolTable::write_to_archive(); } char* VM_PopulateDumpSharedSpace::dump_read_only_tables() { ! ArchiveBuilder::OtherROAllocMark mark; log_info(cds)("Removing java_mirror ... "); if (!HeapShared::is_heap_object_archiving_allowed()) { Universe::clear_basic_type_mirrors(); }
*** 1545,1575 **** dump_archive_heap_oopmaps(); return start; } - void VM_PopulateDumpSharedSpace::print_class_stats() { - log_info(cds)("Number of classes %d", _global_klass_objects->length()); - { - int num_type_array = 0, num_obj_array = 0, num_inst = 0; - for (int i = 0; i < _global_klass_objects->length(); i++) { - Klass* k = _global_klass_objects->at(i); - if (k->is_instance_klass()) { - num_inst ++; - } else if (k->is_objArray_klass()) { - num_obj_array ++; - } else { - assert(k->is_typeArray_klass(), "sanity"); - num_type_array ++; - } - } - log_info(cds)(" instance classes = %5d", num_inst); - log_info(cds)(" obj array classes = %5d", num_obj_array); - log_info(cds)(" type array classes = %5d", num_type_array); - } - } - void VM_PopulateDumpSharedSpace::relocate_to_requested_base_address(CHeapBitMap* ptrmap) { intx addr_delta = MetaspaceShared::final_delta(); if (addr_delta == 0) { ArchivePtrMarker::compact((address)SharedBaseAddress, (address)_ro_region.top()); } else { --- 1078,1087 ----
*** 1638,1653 **** // At this point, many classes have been loaded. // Gather systemDictionary classes in a global array and do everything to // that so we don't have to walk the SystemDictionary again. SystemDictionaryShared::check_excluded_classes(); - _global_klass_objects = new GrowableArray<Klass*>(1000); - CollectClassesClosure collect_classes; - ClassLoaderDataGraph::loaded_classes_do(&collect_classes); - _global_klass_objects->sort(global_klass_compare); ! print_class_stats(); // Ensure the ConstMethods won't be modified at run-time log_info(cds)("Updating ConstMethods ... "); rewrite_nofast_bytecodes_and_calculate_fingerprints(THREAD); log_info(cds)("done. "); --- 1150,1163 ---- // At this point, many classes have been loaded. // Gather systemDictionary classes in a global array and do everything to // that so we don't have to walk the SystemDictionary again. SystemDictionaryShared::check_excluded_classes(); ! StaticArchiveBuilder builder(&_rw_region, &_ro_region); ! builder.gather_klasses_and_symbols(); ! _global_klass_objects = builder.klasses(); // Ensure the ConstMethods won't be modified at run-time log_info(cds)("Updating ConstMethods ... "); rewrite_nofast_bytecodes_and_calculate_fingerprints(THREAD); log_info(cds)("done. ");
*** 1655,1679 **** // Remove all references outside the metadata log_info(cds)("Removing unshareable information ... "); remove_unshareable_in_classes(); log_info(cds)("done. "); MetaspaceShared::allocate_cloned_cpp_vtptrs(); char* cloned_vtables = _mc_region.top(); MetaspaceShared::allocate_cpp_vtable_clones(); ! ArchiveCompactor::initialize(); ! ArchiveCompactor::copy_and_compact(); dump_symbols(); // Dump supported java heap objects _closed_archive_heap_regions = NULL; _open_archive_heap_regions = NULL; dump_java_heap_objects(); ! ArchiveCompactor::relocate_well_known_klasses(); char* serialized_data = dump_read_only_tables(); _ro_region.pack(); // The vtable clones contain addresses of the current process. --- 1165,1194 ---- // Remove all references outside the metadata log_info(cds)("Removing unshareable information ... "); remove_unshareable_in_classes(); log_info(cds)("done. "); + builder.gather_source_objs(); + MetaspaceShared::allocate_cloned_cpp_vtptrs(); char* cloned_vtables = _mc_region.top(); MetaspaceShared::allocate_cpp_vtable_clones(); ! _mc_region.pack(&_rw_region); ! builder.dump_rw_region(); ! _rw_region.pack(&_ro_region); ! builder.dump_ro_region(); ! builder.relocate_pointers(); dump_symbols(); // Dump supported java heap objects _closed_archive_heap_regions = NULL; _open_archive_heap_regions = NULL; dump_java_heap_objects(); ! builder.relocate_well_known_klasses(); char* serialized_data = dump_read_only_tables(); _ro_region.pack(); // The vtable clones contain addresses of the current process.
*** 1712,1723 **** mapinfo->write_header(); print_region_stats(mapinfo); mapinfo->close(); if (log_is_enabled(Info, cds)) { ! ArchiveCompactor::alloc_stats()->print_stats(int(_ro_region.used()), int(_rw_region.used()), ! int(_mc_region.used())); } if (PrintSystemDictionaryAtExit) { SystemDictionary::print(); } --- 1227,1237 ---- mapinfo->write_header(); print_region_stats(mapinfo); mapinfo->close(); if (log_is_enabled(Info, cds)) { ! builder.print_stats(int(_ro_region.used()), int(_rw_region.used()), int(_mc_region.used())); } if (PrintSystemDictionaryAtExit) { SystemDictionary::print(); }
*** 1799,1815 **** // Update a Java object to point its Klass* to the new location after // shared archive has been compacted. void MetaspaceShared::relocate_klass_ptr(oop o) { assert(DumpSharedSpaces, "sanity"); ! Klass* k = ArchiveCompactor::get_relocated_klass(o->klass()); o->set_klass(k); } Klass* MetaspaceShared::get_relocated_klass(Klass *k, bool is_final) { assert(DumpSharedSpaces, "sanity"); ! k = ArchiveCompactor::get_relocated_klass(k); if (is_final) { k = (Klass*)(address(k) + final_delta()); } return k; } --- 1313,1329 ---- // Update a Java object to point its Klass* to the new location after // shared archive has been compacted. void MetaspaceShared::relocate_klass_ptr(oop o) { assert(DumpSharedSpaces, "sanity"); ! Klass* k = ArchiveBuilder::get_relocated_klass(o->klass()); o->set_klass(k); } Klass* MetaspaceShared::get_relocated_klass(Klass *k, bool is_final) { assert(DumpSharedSpaces, "sanity"); ! k = ArchiveBuilder::get_relocated_klass(k); if (is_final) { k = (Klass*)(address(k) + final_delta()); } return k; }
*** 2012,2022 **** // See FileMapInfo::write_archive_heap_regions() for details. _closed_archive_heap_regions = new GrowableArray<MemRegion>(2); _open_archive_heap_regions = new GrowableArray<MemRegion>(2); HeapShared::archive_java_heap_objects(_closed_archive_heap_regions, _open_archive_heap_regions); ! ArchiveCompactor::OtherROAllocMark mark; HeapShared::write_subgraph_info_table(); } void VM_PopulateDumpSharedSpace::dump_archive_heap_oopmaps() { if (HeapShared::is_heap_object_archiving_allowed()) { --- 1526,1536 ---- // See FileMapInfo::write_archive_heap_regions() for details. _closed_archive_heap_regions = new GrowableArray<MemRegion>(2); _open_archive_heap_regions = new GrowableArray<MemRegion>(2); HeapShared::archive_java_heap_objects(_closed_archive_heap_regions, _open_archive_heap_regions); ! ArchiveBuilder::OtherROAllocMark mark; HeapShared::write_subgraph_info_table(); } void VM_PopulateDumpSharedSpace::dump_archive_heap_oopmaps() { if (HeapShared::is_heap_object_archiving_allowed()) {
< prev index next >