src/share/vm/memory/metaspace.cpp

Print this page
rev 5190 : 8015107: NPG: Use consistent naming for metaspace concepts

*** 421,431 **** ChunkManager _chunk_manager; // Can this virtual list allocate >1 spaces? Also, used to determine // whether to allocate unlimited small chunks in this virtual space bool _is_class; ! bool can_grow() const { return !is_class() || !UseCompressedKlassPointers; } // Sum of space in all virtual spaces and number of virtual spaces size_t _virtual_space_total; size_t _virtual_space_count; --- 421,431 ---- ChunkManager _chunk_manager; // Can this virtual list allocate >1 spaces? Also, used to determine // whether to allocate unlimited small chunks in this virtual space bool _is_class; ! bool can_grow() const { return !is_class() || !UseCompressedClassPointers; } // Sum of space in all virtual spaces and number of virtual spaces size_t _virtual_space_total; size_t _virtual_space_count;
*** 2834,2854 **** // Return TRUE if the specified metaspace_base and cds_base are close enough // to work with compressed klass pointers. bool Metaspace::can_use_cds_with_metaspace_addr(char* metaspace_base, address cds_base) { assert(cds_base != 0 && UseSharedSpaces, "Only use with CDS"); ! assert(UseCompressedKlassPointers, "Only use with CompressedKlassPtrs"); address lower_base = MIN2((address)metaspace_base, cds_base); address higher_address = MAX2((address)(cds_base + FileMapInfo::shared_spaces_size()), (address)(metaspace_base + class_metaspace_size())); return ((uint64_t)(higher_address - lower_base) < (uint64_t)max_juint); } // Try to allocate the metaspace at the requested addr. void Metaspace::allocate_metaspace_compressed_klass_ptrs(char* requested_addr, address cds_base) { assert(using_class_space(), "called improperly"); ! assert(UseCompressedKlassPointers, "Only use with CompressedKlassPtrs"); assert(class_metaspace_size() < KlassEncodingMetaspaceMax, "Metaspace size is too big"); ReservedSpace metaspace_rs = ReservedSpace(class_metaspace_size(), os::vm_allocation_granularity(), --- 2834,2854 ---- // Return TRUE if the specified metaspace_base and cds_base are close enough // to work with compressed klass pointers. bool Metaspace::can_use_cds_with_metaspace_addr(char* metaspace_base, address cds_base) { assert(cds_base != 0 && UseSharedSpaces, "Only use with CDS"); ! assert(UseCompressedClassPointers, "Only use with CompressedKlassPtrs"); address lower_base = MIN2((address)metaspace_base, cds_base); address higher_address = MAX2((address)(cds_base + FileMapInfo::shared_spaces_size()), (address)(metaspace_base + class_metaspace_size())); return ((uint64_t)(higher_address - lower_base) < (uint64_t)max_juint); } // Try to allocate the metaspace at the requested addr. void Metaspace::allocate_metaspace_compressed_klass_ptrs(char* requested_addr, address cds_base) { assert(using_class_space(), "called improperly"); ! assert(UseCompressedClassPointers, "Only use with CompressedKlassPtrs"); assert(class_metaspace_size() < KlassEncodingMetaspaceMax, "Metaspace size is too big"); ReservedSpace metaspace_rs = ReservedSpace(class_metaspace_size(), os::vm_allocation_granularity(),
*** 2867,2879 **** } } // If no successful allocation then try to allocate the space anywhere. If // that fails then OOM doom. At this point we cannot try allocating the ! // metaspace as if UseCompressedKlassPointers is off because too much ! // initialization has happened that depends on UseCompressedKlassPointers. ! // So, UseCompressedKlassPointers cannot be turned off at this point. if (!metaspace_rs.is_reserved()) { metaspace_rs = ReservedSpace(class_metaspace_size(), os::vm_allocation_granularity(), false); if (!metaspace_rs.is_reserved()) { vm_exit_during_initialization(err_msg("Could not allocate metaspace: %d bytes", --- 2867,2879 ---- } } // If no successful allocation then try to allocate the space anywhere. If // that fails then OOM doom. At this point we cannot try allocating the ! // metaspace as if UseCompressedClassPointers is off because too much ! // initialization has happened that depends on UseCompressedClassPointers. ! // So, UseCompressedClassPointers cannot be turned off at this point. if (!metaspace_rs.is_reserved()) { metaspace_rs = ReservedSpace(class_metaspace_size(), os::vm_allocation_granularity(), false); if (!metaspace_rs.is_reserved()) { vm_exit_during_initialization(err_msg("Could not allocate metaspace: %d bytes",
*** 2902,2917 **** gclog_or_tty->print_cr("Metaspace Size: " SIZE_FORMAT " Address: " PTR_FORMAT " Req Addr: " PTR_FORMAT, class_metaspace_size(), metaspace_rs.base(), requested_addr); } } ! // For UseCompressedKlassPointers the class space is reserved above the top of // the Java heap. The argument passed in is at the base of the compressed space. void Metaspace::initialize_class_space(ReservedSpace rs) { // The reserved space size may be bigger because of alignment, esp with UseLargePages ! assert(rs.size() >= ClassMetaspaceSize, ! err_msg(SIZE_FORMAT " != " UINTX_FORMAT, rs.size(), ClassMetaspaceSize)); assert(using_class_space(), "Must be using class space"); _class_space_list = new VirtualSpaceList(rs); } #endif --- 2902,2917 ---- gclog_or_tty->print_cr("Metaspace Size: " SIZE_FORMAT " Address: " PTR_FORMAT " Req Addr: " PTR_FORMAT, class_metaspace_size(), metaspace_rs.base(), requested_addr); } } ! // For UseCompressedClassPointers the class space is reserved above the top of // the Java heap. The argument passed in is at the base of the compressed space. void Metaspace::initialize_class_space(ReservedSpace rs) { // The reserved space size may be bigger because of alignment, esp with UseLargePages ! assert(rs.size() >= CompressedClassSpaceSize, ! err_msg(SIZE_FORMAT " != " UINTX_FORMAT, rs.size(), CompressedClassSpaceSize)); assert(using_class_space(), "Must be using class space"); _class_space_list = new VirtualSpaceList(rs); } #endif
*** 2919,2929 **** void Metaspace::global_initialize() { // Initialize the alignment for shared spaces. int max_alignment = os::vm_page_size(); size_t cds_total = 0; ! set_class_metaspace_size(align_size_up(ClassMetaspaceSize, os::vm_allocation_granularity())); MetaspaceShared::set_max_alignment(max_alignment); if (DumpSharedSpaces) { --- 2919,2929 ---- void Metaspace::global_initialize() { // Initialize the alignment for shared spaces. int max_alignment = os::vm_page_size(); size_t cds_total = 0; ! set_class_metaspace_size(align_size_up(CompressedClassSpaceSize, os::vm_allocation_granularity())); MetaspaceShared::set_max_alignment(max_alignment); if (DumpSharedSpaces) {
*** 2939,2950 **** _space_list = new VirtualSpaceList(cds_total/wordSize); #ifdef _LP64 // Set the compressed klass pointer base so that decoding of these pointers works // properly when creating the shared archive. ! assert(UseCompressedOops && UseCompressedKlassPointers, ! "UseCompressedOops and UseCompressedKlassPointers must be set"); Universe::set_narrow_klass_base((address)_space_list->current_virtual_space()->bottom()); if (TraceMetavirtualspaceAllocation && Verbose) { gclog_or_tty->print_cr("Setting_narrow_klass_base to Address: " PTR_FORMAT, _space_list->current_virtual_space()->bottom()); } --- 2939,2950 ---- _space_list = new VirtualSpaceList(cds_total/wordSize); #ifdef _LP64 // Set the compressed klass pointer base so that decoding of these pointers works // properly when creating the shared archive. ! assert(UseCompressedOops && UseCompressedClassPointers, ! "UseCompressedOops and UseCompressedClassPointers must be set"); Universe::set_narrow_klass_base((address)_space_list->current_virtual_space()->bottom()); if (TraceMetavirtualspaceAllocation && Verbose) { gclog_or_tty->print_cr("Setting_narrow_klass_base to Address: " PTR_FORMAT, _space_list->current_virtual_space()->bottom()); }
*** 2977,2987 **** cds_total = FileMapInfo::shared_spaces_size(); cds_address = (address)mapinfo->region_base(0); } #ifdef _LP64 ! // If UseCompressedKlassPointers is set then allocate the metaspace area // above the heap and above the CDS area (if it exists). if (using_class_space()) { if (UseSharedSpaces) { allocate_metaspace_compressed_klass_ptrs((char *)(cds_address + cds_total), cds_address); } else { --- 2977,2987 ---- cds_total = FileMapInfo::shared_spaces_size(); cds_address = (address)mapinfo->region_base(0); } #ifdef _LP64 ! // If UseCompressedClassPointers is set then allocate the metaspace area // above the heap and above the CDS area (if it exists). if (using_class_space()) { if (UseSharedSpaces) { allocate_metaspace_compressed_klass_ptrs((char *)(cds_address + cds_total), cds_address); } else {
*** 2995,3005 **** _first_chunk_word_size = align_word_size_up(_first_chunk_word_size); // Make the first class chunk bigger than a medium chunk so it's not put // on the medium chunk list. The next chunk will be small and progress // from there. This size calculated by -version. _first_class_chunk_word_size = MIN2((size_t)MediumChunk*6, ! (ClassMetaspaceSize/BytesPerWord)*2); _first_class_chunk_word_size = align_word_size_up(_first_class_chunk_word_size); // Arbitrarily set the initial virtual space to a multiple // of the boot class loader size. size_t word_size = VIRTUALSPACEMULTIPLIER * first_chunk_word_size(); // Initialize the list of virtual spaces. --- 2995,3005 ---- _first_chunk_word_size = align_word_size_up(_first_chunk_word_size); // Make the first class chunk bigger than a medium chunk so it's not put // on the medium chunk list. The next chunk will be small and progress // from there. This size calculated by -version. _first_class_chunk_word_size = MIN2((size_t)MediumChunk*6, ! (CompressedClassSpaceSize/BytesPerWord)*2); _first_class_chunk_word_size = align_word_size_up(_first_class_chunk_word_size); // Arbitrarily set the initial virtual space to a multiple // of the boot class loader size. size_t word_size = VIRTUALSPACEMULTIPLIER * first_chunk_word_size(); // Initialize the list of virtual spaces.
*** 3062,3072 **** return ReservedSpace::allocation_align_size_up(byte_size) / wordSize; } MetaWord* Metaspace::allocate(size_t word_size, MetadataType mdtype) { // DumpSharedSpaces doesn't use class metadata area (yet) ! // Also, don't use class_vsm() unless UseCompressedKlassPointers is true. if (mdtype == ClassType && using_class_space()) { return class_vsm()->allocate(word_size); } else { return vsm()->allocate(word_size); } --- 3062,3072 ---- return ReservedSpace::allocation_align_size_up(byte_size) / wordSize; } MetaWord* Metaspace::allocate(size_t word_size, MetadataType mdtype) { // DumpSharedSpaces doesn't use class metadata area (yet) ! // Also, don't use class_vsm() unless UseCompressedClassPointers is true. if (mdtype == ClassType && using_class_space()) { return class_vsm()->allocate(word_size); } else { return vsm()->allocate(word_size); }
*** 3211,3221 **** SIZE_FORMAT, word_size); if (loader_data->metaspace_or_null() != NULL) loader_data->dump(gclog_or_tty); MetaspaceAux::dump(gclog_or_tty); } // -XX:+HeapDumpOnOutOfMemoryError and -XX:OnOutOfMemoryError support ! const char* space_string = (mdtype == ClassType) ? "Class Metadata space" : "Metadata space"; report_java_out_of_memory(space_string); if (JvmtiExport::should_post_resource_exhausted()) { JvmtiExport::post_resource_exhausted( --- 3211,3221 ---- SIZE_FORMAT, word_size); if (loader_data->metaspace_or_null() != NULL) loader_data->dump(gclog_or_tty); MetaspaceAux::dump(gclog_or_tty); } // -XX:+HeapDumpOnOutOfMemoryError and -XX:OnOutOfMemoryError support ! const char* space_string = (mdtype == ClassType) ? "Compressed class space" : "Metadata space"; report_java_out_of_memory(space_string); if (JvmtiExport::should_post_resource_exhausted()) { JvmtiExport::post_resource_exhausted(