--- old/src/hotspot/share/classfile/classLoaderData.cpp 2018-03-05 09:15:03.840011100 +0100 +++ new/src/hotspot/share/classfile/classLoaderData.cpp 2018-03-05 09:15:03.511007900 +0100 @@ -710,7 +710,7 @@ } // release the metaspace - Metaspace *m = _metaspace; + ClassLoaderMetaspace *m = _metaspace; if (m != NULL) { _metaspace = NULL; delete m; @@ -764,32 +764,32 @@ return is_builtin_class_loader_data() && !is_anonymous(); } -Metaspace* ClassLoaderData::metaspace_non_null() { +ClassLoaderMetaspace* ClassLoaderData::metaspace_non_null() { // If the metaspace has not been allocated, create a new one. Might want // to create smaller arena for Reflection class loaders also. // The reason for the delayed allocation is because some class loaders are // simply for delegating with no metadata of their own. // Lock-free access requires load_acquire. - Metaspace* metaspace = OrderAccess::load_acquire(&_metaspace); + ClassLoaderMetaspace* metaspace = OrderAccess::load_acquire(&_metaspace); if (metaspace == NULL) { MutexLockerEx ml(_metaspace_lock, Mutex::_no_safepoint_check_flag); // Check if _metaspace got allocated while we were waiting for this lock. if ((metaspace = _metaspace) == NULL) { if (this == the_null_class_loader_data()) { assert (class_loader() == NULL, "Must be"); - metaspace = new Metaspace(_metaspace_lock, Metaspace::BootMetaspaceType); + metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::BootMetaspaceType); } else if (is_anonymous()) { if (class_loader() != NULL) { log_trace(class, loader, data)("is_anonymous: %s", class_loader()->klass()->internal_name()); } - metaspace = new Metaspace(_metaspace_lock, Metaspace::AnonymousMetaspaceType); + metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::AnonymousMetaspaceType); } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) { if (class_loader() != NULL) { log_trace(class, loader, data)("is_reflection: %s", class_loader()->klass()->internal_name()); } - metaspace = new Metaspace(_metaspace_lock, Metaspace::ReflectionMetaspaceType); + metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::ReflectionMetaspaceType); } else { - metaspace = new Metaspace(_metaspace_lock, Metaspace::StandardMetaspaceType); + metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::StandardMetaspaceType); } // Ensure _metaspace is stable, since it is examined without a lock OrderAccess::release_store(&_metaspace, metaspace); @@ -1266,7 +1266,8 @@ bool ClassLoaderDataGraph::unload_list_contains(const void* x) { assert(SafepointSynchronize::is_at_safepoint(), "only safe to call at safepoint"); for (ClassLoaderData* cld = _unloading; cld != NULL; cld = cld->next()) { - if (cld->metaspace_or_null() != NULL && cld->metaspace_or_null()->contains(x)) { + // Todo: this seems wrong. I opened JDK-8199007. + if (cld->metaspace_or_null() != NULL && Metaspace::contains(x)) { return true; } } --- old/src/hotspot/share/classfile/classLoaderData.hpp 2018-03-05 09:15:05.771022800 +0100 +++ new/src/hotspot/share/classfile/classLoaderData.hpp 2018-03-05 09:15:05.432020200 +0100 @@ -240,7 +240,7 @@ Dependencies _dependencies; // holds dependencies from this class loader // data to others. - Metaspace * volatile _metaspace; // Meta-space where meta-data defined by the + ClassLoaderMetaspace * volatile _metaspace; // Meta-space where meta-data defined by the // classes in the class loader are allocated. Mutex* _metaspace_lock; // Locks the metaspace for allocations and setup. bool _unloading; // true if this class loader goes away @@ -281,8 +281,8 @@ // ReadOnly and ReadWrite metaspaces (static because only on the null // class loader for now). - static Metaspace* _ro_metaspace; - static Metaspace* _rw_metaspace; + static ClassLoaderMetaspace* _ro_metaspace; + static ClassLoaderMetaspace* _rw_metaspace; TRACE_DEFINE_TRACE_ID_FIELD; @@ -331,7 +331,7 @@ bool is_alive(BoolObjectClosure* is_alive_closure) const; // Accessors - Metaspace* metaspace_or_null() const { return _metaspace; } + ClassLoaderMetaspace* metaspace_or_null() const { return _metaspace; } static ClassLoaderData* the_null_class_loader_data() { return _the_null_class_loader_data; @@ -368,7 +368,7 @@ // The Metaspace is created lazily so may be NULL. This // method will allocate a Metaspace if needed. - Metaspace* metaspace_non_null(); + ClassLoaderMetaspace* metaspace_non_null(); oop class_loader() const { return _class_loader; } @@ -446,9 +446,9 @@ ClassLoaderDataGraphMetaspaceIterator(); ~ClassLoaderDataGraphMetaspaceIterator(); bool repeat() { return _data != NULL; } - Metaspace* get_next() { + ClassLoaderMetaspace* get_next() { assert(_data != NULL, "Should not be NULL in call to the iterator"); - Metaspace* result = _data->metaspace_or_null(); + ClassLoaderMetaspace* result = _data->metaspace_or_null(); _data = _data->next(); // This result might be NULL for class loaders without metaspace // yet. It would be nice to return only non-null results but --- old/src/hotspot/share/classfile/classLoaderStats.cpp 2018-03-05 09:15:07.664040700 +0100 +++ new/src/hotspot/share/classfile/classLoaderStats.cpp 2018-03-05 09:15:07.318038800 +0100 @@ -76,7 +76,7 @@ } _total_classes += csc._num_classes; - Metaspace* ms = cld->metaspace_or_null(); + ClassLoaderMetaspace* ms = cld->metaspace_or_null(); if (ms != NULL) { if(cld->is_anonymous()) { cls->_anon_chunk_sz += ms->allocated_chunks_bytes(); --- old/src/hotspot/share/memory/metaspace.cpp 2018-03-05 09:15:09.514056700 +0100 +++ new/src/hotspot/share/memory/metaspace.cpp 2018-03-05 09:15:09.176054100 +0100 @@ -1226,6 +1226,7 @@ // SpaceManager - used by Metaspace to handle allocations class SpaceManager : public CHeapObj { friend class Metaspace; + friend class ClassLoaderMetaspace; friend class Metadebug; private: @@ -3831,7 +3832,7 @@ size_t used = 0; ClassLoaderDataGraphMetaspaceIterator iter; while (iter.repeat()) { - Metaspace* msp = iter.get_next(); + ClassLoaderMetaspace* msp = iter.get_next(); // Sum allocated_blocks_words for each metaspace if (msp != NULL) { used += msp->used_words_slow(mdtype); @@ -3844,7 +3845,7 @@ size_t free = 0; ClassLoaderDataGraphMetaspaceIterator iter; while (iter.repeat()) { - Metaspace* msp = iter.get_next(); + ClassLoaderMetaspace* msp = iter.get_next(); if (msp != NULL) { free += msp->free_words_slow(mdtype); } @@ -3861,7 +3862,7 @@ size_t capacity = 0; ClassLoaderDataGraphMetaspaceIterator iter; while (iter.repeat()) { - Metaspace* msp = iter.get_next(); + ClassLoaderMetaspace* msp = iter.get_next(); if (msp != NULL) { capacity += msp->capacity_words_slow(mdtype); } @@ -3994,7 +3995,7 @@ size_t cls_specialized_count = 0, cls_small_count = 0, cls_medium_count = 0, cls_humongous_count = 0; ClassLoaderDataGraphMetaspaceIterator iter; while (iter.repeat()) { - Metaspace* msp = iter.get_next(); + ClassLoaderMetaspace* msp = iter.get_next(); if (msp != NULL) { cls_specialized_waste += msp->class_vsm()->sum_waste_in_chunks_in_use(SpecializedIndex); cls_specialized_count += msp->class_vsm()->sum_count_in_chunks_in_use(SpecializedIndex); @@ -4021,7 +4022,7 @@ ClassLoaderDataGraphMetaspaceIterator iter; while (iter.repeat()) { - Metaspace* msp = iter.get_next(); + ClassLoaderMetaspace* msp = iter.get_next(); if (msp != NULL) { specialized_waste += msp->vsm()->sum_waste_in_chunks_in_use(SpecializedIndex); specialized_count += msp->vsm()->sum_count_in_chunks_in_use(SpecializedIndex); @@ -4106,7 +4107,7 @@ assert(SafepointSynchronize::is_at_safepoint(), "Must be at a safepoint"); if (cld->is_unloading()) return; - Metaspace* msp = cld->metaspace_or_null(); + ClassLoaderMetaspace* msp = cld->metaspace_or_null(); if (msp == NULL) { return; } @@ -4125,11 +4126,11 @@ } private: - void print_metaspace(Metaspace* msp, bool anonymous); + void print_metaspace(ClassLoaderMetaspace* msp, bool anonymous); void print_summary() const; }; -void PrintCLDMetaspaceInfoClosure::print_metaspace(Metaspace* msp, bool anonymous){ +void PrintCLDMetaspaceInfoClosure::print_metaspace(ClassLoaderMetaspace* msp, bool anonymous){ assert(msp != NULL, "Sanity"); SpaceManager* vsm = msp->vsm(); const char* unit = scale_unit(_scale); @@ -4328,13 +4329,13 @@ size_t Metaspace::_commit_alignment = 0; size_t Metaspace::_reserve_alignment = 0; -Metaspace::Metaspace(Mutex* lock, MetaspaceType type) { +ClassLoaderMetaspace::ClassLoaderMetaspace(Mutex* lock, Metaspace::MetaspaceType type) { initialize(lock, type); } -Metaspace::~Metaspace() { +ClassLoaderMetaspace::~ClassLoaderMetaspace() { delete _vsm; - if (using_class_space()) { + if (Metaspace::using_class_space()) { delete _class_vsm; } } @@ -4673,7 +4674,7 @@ MetaspaceGC::post_initialize(); } -void Metaspace::initialize_first_chunk(MetaspaceType type, MetadataType mdtype) { +void ClassLoaderMetaspace::initialize_first_chunk(Metaspace::MetaspaceType type, Metaspace::MetadataType mdtype) { Metachunk* chunk = get_initialization_chunk(type, mdtype); if (chunk != NULL) { // Add to this manager's list of chunks in use and current_chunk(). @@ -4681,14 +4682,14 @@ } } -Metachunk* Metaspace::get_initialization_chunk(MetaspaceType type, MetadataType mdtype) { +Metachunk* ClassLoaderMetaspace::get_initialization_chunk(Metaspace::MetaspaceType type, Metaspace::MetadataType mdtype) { size_t chunk_word_size = get_space_manager(mdtype)->get_initial_chunk_size(type); // Get a chunk from the chunk freelist - Metachunk* chunk = get_chunk_manager(mdtype)->chunk_freelist_allocate(chunk_word_size); + Metachunk* chunk = Metaspace::get_chunk_manager(mdtype)->chunk_freelist_allocate(chunk_word_size); if (chunk == NULL) { - chunk = get_space_list(mdtype)->get_new_chunk(chunk_word_size, + chunk = Metaspace::get_space_list(mdtype)->get_new_chunk(chunk_word_size, get_space_manager(mdtype)->medium_chunk_bunch()); } @@ -4705,25 +4706,25 @@ } } -void Metaspace::initialize(Mutex* lock, MetaspaceType type) { - verify_global_initialization(); +void ClassLoaderMetaspace::initialize(Mutex* lock, Metaspace::MetaspaceType type) { + Metaspace::verify_global_initialization(); // Allocate SpaceManager for metadata objects. - _vsm = new SpaceManager(NonClassType, type, lock); + _vsm = new SpaceManager(Metaspace::NonClassType, type, lock); - if (using_class_space()) { + if (Metaspace::using_class_space()) { // Allocate SpaceManager for classes. - _class_vsm = new SpaceManager(ClassType, type, lock); + _class_vsm = new SpaceManager(Metaspace::ClassType, type, lock); } MutexLockerEx cl(SpaceManager::expand_lock(), Mutex::_no_safepoint_check_flag); // Allocate chunk for metadata objects - initialize_first_chunk(type, NonClassType); + initialize_first_chunk(type, Metaspace::NonClassType); // Allocate chunk for class metadata objects - if (using_class_space()) { - initialize_first_chunk(type, ClassType); + if (Metaspace::using_class_space()) { + initialize_first_chunk(type, Metaspace::ClassType); } } @@ -4732,18 +4733,18 @@ return ReservedSpace::allocation_align_size_up(byte_size) / wordSize; } -MetaWord* Metaspace::allocate(size_t word_size, MetadataType mdtype) { - assert(!_frozen, "sanity"); +MetaWord* ClassLoaderMetaspace::allocate(size_t word_size, Metaspace::MetadataType mdtype) { + Metaspace::assert_not_frozen(); // Don't use class_vsm() unless UseCompressedClassPointers is true. - if (is_class_space_allocation(mdtype)) { + if (Metaspace::is_class_space_allocation(mdtype)) { return class_vsm()->allocate(word_size); } else { return vsm()->allocate(word_size); } } -MetaWord* Metaspace::expand_and_allocate(size_t word_size, MetadataType mdtype) { - assert(!_frozen, "sanity"); +MetaWord* ClassLoaderMetaspace::expand_and_allocate(size_t word_size, Metaspace::MetadataType mdtype) { + Metaspace::assert_not_frozen(); size_t delta_bytes = MetaspaceGC::delta_capacity_until_GC(word_size * BytesPerWord); assert(delta_bytes > 0, "Must be"); @@ -4761,7 +4762,7 @@ } while (!incremented && res == NULL); if (incremented) { - tracer()->report_gc_threshold(before, after, + Metaspace::tracer()->report_gc_threshold(before, after, MetaspaceGCThresholdUpdater::ExpandAndAllocate); log_trace(gc, metaspace)("Increase capacity to GC from " SIZE_FORMAT " to " SIZE_FORMAT, before, after); } @@ -4769,18 +4770,18 @@ return res; } -size_t Metaspace::used_words_slow(MetadataType mdtype) const { - if (mdtype == ClassType) { - return using_class_space() ? class_vsm()->sum_used_in_chunks_in_use() : 0; +size_t ClassLoaderMetaspace::used_words_slow(Metaspace::MetadataType mdtype) const { + if (mdtype == Metaspace::ClassType) { + return Metaspace::using_class_space() ? class_vsm()->sum_used_in_chunks_in_use() : 0; } else { return vsm()->sum_used_in_chunks_in_use(); // includes overhead! } } -size_t Metaspace::free_words_slow(MetadataType mdtype) const { - assert(!_frozen, "sanity"); - if (mdtype == ClassType) { - return using_class_space() ? class_vsm()->sum_free_in_chunks_in_use() : 0; +size_t ClassLoaderMetaspace::free_words_slow(Metaspace::MetadataType mdtype) const { + Metaspace::assert_not_frozen(); + if (mdtype == Metaspace::ClassType) { + return Metaspace::using_class_space() ? class_vsm()->sum_free_in_chunks_in_use() : 0; } else { return vsm()->sum_free_in_chunks_in_use(); } @@ -4791,40 +4792,40 @@ // have been made. Don't include space in the global freelist and // in the space available in the dictionary which // is already counted in some chunk. -size_t Metaspace::capacity_words_slow(MetadataType mdtype) const { - if (mdtype == ClassType) { - return using_class_space() ? class_vsm()->sum_capacity_in_chunks_in_use() : 0; +size_t ClassLoaderMetaspace::capacity_words_slow(Metaspace::MetadataType mdtype) const { + if (mdtype == Metaspace::ClassType) { + return Metaspace::using_class_space() ? class_vsm()->sum_capacity_in_chunks_in_use() : 0; } else { return vsm()->sum_capacity_in_chunks_in_use(); } } -size_t Metaspace::used_bytes_slow(MetadataType mdtype) const { +size_t ClassLoaderMetaspace::used_bytes_slow(Metaspace::MetadataType mdtype) const { return used_words_slow(mdtype) * BytesPerWord; } -size_t Metaspace::capacity_bytes_slow(MetadataType mdtype) const { +size_t ClassLoaderMetaspace::capacity_bytes_slow(Metaspace::MetadataType mdtype) const { return capacity_words_slow(mdtype) * BytesPerWord; } -size_t Metaspace::allocated_blocks_bytes() const { +size_t ClassLoaderMetaspace::allocated_blocks_bytes() const { return vsm()->allocated_blocks_bytes() + - (using_class_space() ? class_vsm()->allocated_blocks_bytes() : 0); + (Metaspace::using_class_space() ? class_vsm()->allocated_blocks_bytes() : 0); } -size_t Metaspace::allocated_chunks_bytes() const { +size_t ClassLoaderMetaspace::allocated_chunks_bytes() const { return vsm()->allocated_chunks_bytes() + - (using_class_space() ? class_vsm()->allocated_chunks_bytes() : 0); + (Metaspace::using_class_space() ? class_vsm()->allocated_chunks_bytes() : 0); } -void Metaspace::deallocate(MetaWord* ptr, size_t word_size, bool is_class) { - assert(!_frozen, "sanity"); +void ClassLoaderMetaspace::deallocate(MetaWord* ptr, size_t word_size, bool is_class) { + Metaspace::assert_not_frozen(); assert(!SafepointSynchronize::is_at_safepoint() || Thread::current()->is_VM_thread(), "should be the VM thread"); MutexLockerEx ml(vsm()->lock(), Mutex::_no_safepoint_check_flag); - if (is_class && using_class_space()) { + if (is_class && Metaspace::using_class_space()) { class_vsm()->deallocate(ptr, word_size); } else { vsm()->deallocate(ptr, word_size); @@ -4869,8 +4870,8 @@ return result; } -size_t Metaspace::class_chunk_size(size_t word_size) { - assert(using_class_space(), "Has to use class space"); +size_t ClassLoaderMetaspace::class_chunk_size(size_t word_size) { + assert(Metaspace::using_class_space(), "Has to use class space"); return class_vsm()->calc_chunk_size(word_size); } @@ -4897,7 +4898,7 @@ bool out_of_compressed_class_space = false; if (is_class_space_allocation(mdtype)) { - Metaspace* metaspace = loader_data->metaspace_non_null(); + ClassLoaderMetaspace* metaspace = loader_data->metaspace_non_null(); out_of_compressed_class_space = MetaspaceUtils::committed_bytes(Metaspace::ClassType) + (metaspace->class_chunk_size(word_size) * BytesPerWord) > @@ -4950,11 +4951,11 @@ } } -void Metaspace::print_on(outputStream* out) const { +void ClassLoaderMetaspace::print_on(outputStream* out) const { // Print both class virtual space counts and metaspace. if (Verbose) { vsm()->print_on(out); - if (using_class_space()) { + if (Metaspace::using_class_space()) { class_vsm()->print_on(out); } } @@ -4975,17 +4976,17 @@ return get_space_list(NonClassType)->contains(ptr); } -void Metaspace::verify() { +void ClassLoaderMetaspace::verify() { vsm()->verify(); - if (using_class_space()) { + if (Metaspace::using_class_space()) { class_vsm()->verify(); } } -void Metaspace::dump(outputStream* const out) const { +void ClassLoaderMetaspace::dump(outputStream* const out) const { out->print_cr("\nVirtual space manager: " INTPTR_FORMAT, p2i(vsm())); vsm()->dump(out); - if (using_class_space()) { + if (Metaspace::using_class_space()) { out->print_cr("\nClass space manager: " INTPTR_FORMAT, p2i(class_vsm())); class_vsm()->dump(out); } --- old/src/hotspot/share/memory/metaspace.hpp 2018-03-05 09:15:11.500072100 +0100 +++ new/src/hotspot/share/memory/metaspace.hpp 2018-03-05 09:15:11.167070900 +0100 @@ -80,16 +80,21 @@ // allocate() method returns a block for use as a // quantum of metadata. -class Metaspace : public CHeapObj { - friend class VMStructs; +// Namespace for important central static functions +// (auxiliary stuff goes into MetaspaceUtils) +class Metaspace : public AllStatic { +/* friend class VMStructs; friend class SpaceManager; friend class VM_CollectForMetadataAllocation; friend class MetaspaceGC; friend class MetaspaceUtils; - friend class MetaspaceShared; + friend class CollectedHeap; friend class PrintCLDMetaspaceInfoClosure; friend class MetaspaceAllocationTest; +*/ + friend class MetaspaceShared; + public: enum MetadataType { @@ -105,15 +110,6 @@ }; private: - static void verify_global_initialization(); - - void initialize(Mutex* lock, MetaspaceType type); - - // Initialize the first chunk for a Metaspace. Used for - // special cases such as the boot class loader, reflection - // class loader and anonymous class loader. - void initialize_first_chunk(MetaspaceType type, MetadataType mdtype); - Metachunk* get_initialization_chunk(MetaspaceType type, MetadataType mdtype); // Align up the word size to the allocation word size static size_t align_word_size_up(size_t); @@ -136,23 +132,6 @@ static size_t _reserve_alignment; DEBUG_ONLY(static bool _frozen;) - SpaceManager* _vsm; - SpaceManager* vsm() const { return _vsm; } - - SpaceManager* _class_vsm; - SpaceManager* class_vsm() const { return _class_vsm; } - SpaceManager* get_space_manager(MetadataType mdtype) { - assert(mdtype != MetadataTypeCount, "MetadaTypeCount can't be used as mdtype"); - return mdtype == ClassType ? class_vsm() : vsm(); - } - - // Allocate space for metadata of type mdtype. This is space - // within a Metachunk and is used by - // allocate(ClassLoaderData*, size_t, bool, MetadataType, TRAPS) - MetaWord* allocate(size_t word_size, MetadataType mdtype); - - MetaWord* expand_and_allocate(size_t size, MetadataType mdtype); - // Virtual Space lists for both classes and other metadata static VirtualSpaceList* _space_list; static VirtualSpaceList* _class_space_list; @@ -187,6 +166,9 @@ assert(DumpSharedSpaces, "sanity"); DEBUG_ONLY(_frozen = true;) } + static void assert_not_frozen() { + assert(!_frozen, "sanity"); + } #ifdef _LP64 static void allocate_metaspace_compressed_klass_ptrs(char* requested_addr, address cds_base); #endif @@ -201,17 +183,15 @@ static void initialize_class_space(ReservedSpace rs); #endif - size_t class_chunk_size(size_t word_size); public: - Metaspace(Mutex* lock, MetaspaceType type); - ~Metaspace(); - static void ergo_initialize(); static void global_initialize(); static void post_initialize(); + static void verify_global_initialization(); + static size_t first_chunk_word_size() { return _first_chunk_word_size; } static size_t first_class_chunk_word_size() { return _first_class_chunk_word_size; } @@ -220,16 +200,6 @@ static size_t commit_alignment() { return _commit_alignment; } static size_t commit_alignment_words() { return _commit_alignment / BytesPerWord; } - size_t used_words_slow(MetadataType mdtype) const; - size_t free_words_slow(MetadataType mdtype) const; - size_t capacity_words_slow(MetadataType mdtype) const; - - size_t used_bytes_slow(MetadataType mdtype) const; - size_t capacity_bytes_slow(MetadataType mdtype) const; - - size_t allocated_blocks_bytes() const; - size_t allocated_chunks_bytes() const; - static MetaWord* allocate(ClassLoaderData* loader_data, size_t word_size, MetaspaceObj::Type type, TRAPS); void deallocate(MetaWord* ptr, size_t byte_size, bool is_class); @@ -237,8 +207,6 @@ static bool contains(const void* ptr); static bool contains_non_shared(const void* ptr); - void dump(outputStream* const out) const; - // Free empty virtualspaces static void purge(MetadataType mdtype); static void purge(); @@ -248,10 +216,6 @@ static const char* metadata_type_name(Metaspace::MetadataType mdtype); - void print_on(outputStream* st) const; - // Debugging support - void verify(); - static void print_compressed_class_space(outputStream* st, const char* requested_addr = 0) NOT_LP64({}); // Return TRUE only if UseCompressedClassPointers is True. @@ -265,6 +229,80 @@ }; +// Manages the metaspace portion belonging to a class loader +class ClassLoaderMetaspace : public CHeapObj { +/* friend class VMStructs; + friend class SpaceManager; + friend class VM_CollectForMetadataAllocation; + friend class MetaspaceGC; + + friend class MetaspaceShared; + + friend class PrintCLDMetaspaceInfoClosure; + friend class MetaspaceAllocationTest; +*/ + + friend class CollectedHeap; // For expand_and_allocate() + friend class Metaspace; + friend class MetaspaceUtils; + friend class PrintCLDMetaspaceInfoClosure; + friend class VM_CollectForMetadataAllocation; // For expand_and_allocate() + + private: + + void initialize(Mutex* lock, Metaspace::MetaspaceType type); + + // Initialize the first chunk for a Metaspace. Used for + // special cases such as the boot class loader, reflection + // class loader and anonymous class loader. + void initialize_first_chunk(Metaspace::MetaspaceType type, Metaspace::MetadataType mdtype); + Metachunk* get_initialization_chunk(Metaspace::MetaspaceType type, Metaspace::MetadataType mdtype); + + SpaceManager* _vsm; + SpaceManager* vsm() const { return _vsm; } + + SpaceManager* _class_vsm; + SpaceManager* class_vsm() const { return _class_vsm; } + SpaceManager* get_space_manager(Metaspace::MetadataType mdtype) { + assert(mdtype != Metaspace::MetadataTypeCount, "MetadaTypeCount can't be used as mdtype"); + return mdtype == Metaspace::ClassType ? class_vsm() : vsm(); + } + + MetaWord* expand_and_allocate(size_t size, Metaspace::MetadataType mdtype); + + size_t class_chunk_size(size_t word_size); + + public: + + ClassLoaderMetaspace(Mutex* lock, Metaspace::MetaspaceType type); + ~ClassLoaderMetaspace(); + + // Allocate space for metadata of type mdtype. This is space + // within a Metachunk and is used by + // allocate(ClassLoaderData*, size_t, bool, MetadataType, TRAPS) + MetaWord* allocate(size_t word_size, Metaspace::MetadataType mdtype); + + size_t used_words_slow(Metaspace::MetadataType mdtype) const; + size_t free_words_slow(Metaspace::MetadataType mdtype) const; + size_t capacity_words_slow(Metaspace::MetadataType mdtype) const; + + size_t used_bytes_slow(Metaspace::MetadataType mdtype) const; + size_t capacity_bytes_slow(Metaspace::MetadataType mdtype) const; + + size_t allocated_blocks_bytes() const; + size_t allocated_chunks_bytes() const; + + void deallocate(MetaWord* ptr, size_t byte_size, bool is_class); + + void dump(outputStream* const out) const; + + void print_on(outputStream* st) const; + // Debugging support + void verify(); + +}; // ClassLoaderMetaspace + + class MetaspaceUtils : AllStatic { static size_t free_chunks_total_words(Metaspace::MetadataType mdtype); --- old/test/hotspot/gtest/memory/test_metaspace_allocation.cpp 2018-03-05 09:15:13.402084900 +0100 +++ new/test/hotspot/gtest/memory/test_metaspace_allocation.cpp 2018-03-05 09:15:13.064083400 +0100 @@ -73,7 +73,7 @@ struct { size_t allocated; Mutex* lock; - Metaspace* space; + ClassLoaderMetaspace* space; bool is_empty() const { return allocated == 0; } bool is_full() const { return allocated >= MAX_PER_METASPACE_ALLOCATION_WORDSIZE; } } _spaces[NUM_PARALLEL_METASPACES]; @@ -104,7 +104,7 @@ // Let every ~10th space be an anonymous one to test different allocation patterns. const Metaspace::MetaspaceType msType = (os::random() % 100 < 10) ? Metaspace::AnonymousMetaspaceType : Metaspace::StandardMetaspaceType; - _spaces[i].space = new Metaspace(_spaces[i].lock, msType); + _spaces[i].space = new ClassLoaderMetaspace(_spaces[i].lock, msType); _spaces[i].allocated = 0; ASSERT_TRUE(_spaces[i].space != NULL); }