src/share/vm/memory/metaspace.cpp

Print this page
rev 5190 : 8015107: NPG: Use consistent naming for metaspace concepts


 406 // It has a  _next link for singly linked list and a MemRegion
 407 // for total space in the VirtualSpace.
 408 class VirtualSpaceList : public CHeapObj<mtClass> {
 409   friend class VirtualSpaceNode;
 410 
 411   enum VirtualSpaceSizes {
 412     VirtualSpaceSize = 256 * K
 413   };
 414 
 415   // Global list of virtual spaces
 416   // Head of the list
 417   VirtualSpaceNode* _virtual_space_list;
 418   // virtual space currently being used for allocations
 419   VirtualSpaceNode* _current_virtual_space;
 420   // Free chunk list for all other metadata
 421   ChunkManager      _chunk_manager;
 422 
 423   // Can this virtual list allocate >1 spaces?  Also, used to determine
 424   // whether to allocate unlimited small chunks in this virtual space
 425   bool _is_class;
 426   bool can_grow() const { return !is_class() || !UseCompressedKlassPointers; }
 427 
 428   // Sum of space in all virtual spaces and number of virtual spaces
 429   size_t _virtual_space_total;
 430   size_t _virtual_space_count;
 431 
 432   ~VirtualSpaceList();
 433 
 434   VirtualSpaceNode* virtual_space_list() const { return _virtual_space_list; }
 435 
 436   void set_virtual_space_list(VirtualSpaceNode* v) {
 437     _virtual_space_list = v;
 438   }
 439   void set_current_virtual_space(VirtualSpaceNode* v) {
 440     _current_virtual_space = v;
 441   }
 442 
 443   void link_vs(VirtualSpaceNode* new_entry, size_t vs_word_size);
 444 
 445   // Get another virtual space and add it to the list.  This
 446   // is typically prompted by a failed attempt to allocate a chunk


2819     higher_address = MAX2((address)(cds_base + FileMapInfo::shared_spaces_size()),
2820                           (address)(metaspace_base + class_metaspace_size()));
2821     lower_base = MIN2(metaspace_base, cds_base);
2822   } else {
2823     higher_address = metaspace_base + class_metaspace_size();
2824     lower_base = metaspace_base;
2825   }
2826   Universe::set_narrow_klass_base(lower_base);
2827   if ((uint64_t)(higher_address - lower_base) < (uint64_t)max_juint) {
2828     Universe::set_narrow_klass_shift(0);
2829   } else {
2830     assert(!UseSharedSpaces, "Cannot shift with UseSharedSpaces");
2831     Universe::set_narrow_klass_shift(LogKlassAlignmentInBytes);
2832   }
2833 }
2834 
2835 // Return TRUE if the specified metaspace_base and cds_base are close enough
2836 // to work with compressed klass pointers.
2837 bool Metaspace::can_use_cds_with_metaspace_addr(char* metaspace_base, address cds_base) {
2838   assert(cds_base != 0 && UseSharedSpaces, "Only use with CDS");
2839   assert(UseCompressedKlassPointers, "Only use with CompressedKlassPtrs");
2840   address lower_base = MIN2((address)metaspace_base, cds_base);
2841   address higher_address = MAX2((address)(cds_base + FileMapInfo::shared_spaces_size()),
2842                                 (address)(metaspace_base + class_metaspace_size()));
2843   return ((uint64_t)(higher_address - lower_base) < (uint64_t)max_juint);
2844 }
2845 
2846 // Try to allocate the metaspace at the requested addr.
2847 void Metaspace::allocate_metaspace_compressed_klass_ptrs(char* requested_addr, address cds_base) {
2848   assert(using_class_space(), "called improperly");
2849   assert(UseCompressedKlassPointers, "Only use with CompressedKlassPtrs");
2850   assert(class_metaspace_size() < KlassEncodingMetaspaceMax,
2851          "Metaspace size is too big");
2852 
2853   ReservedSpace metaspace_rs = ReservedSpace(class_metaspace_size(),
2854                                              os::vm_allocation_granularity(),
2855                                              false, requested_addr, 0);
2856   if (!metaspace_rs.is_reserved()) {
2857     if (UseSharedSpaces) {
2858       // Keep trying to allocate the metaspace, increasing the requested_addr
2859       // by 1GB each time, until we reach an address that will no longer allow
2860       // use of CDS with compressed klass pointers.
2861       char *addr = requested_addr;
2862       while (!metaspace_rs.is_reserved() && (addr + 1*G > addr) &&
2863              can_use_cds_with_metaspace_addr(addr + 1*G, cds_base)) {
2864         addr = addr + 1*G;
2865         metaspace_rs = ReservedSpace(class_metaspace_size(),
2866                                      os::vm_allocation_granularity(), false, addr, 0);
2867       }
2868     }
2869 
2870     // If no successful allocation then try to allocate the space anywhere.  If
2871     // that fails then OOM doom.  At this point we cannot try allocating the
2872     // metaspace as if UseCompressedKlassPointers is off because too much
2873     // initialization has happened that depends on UseCompressedKlassPointers.
2874     // So, UseCompressedKlassPointers cannot be turned off at this point.
2875     if (!metaspace_rs.is_reserved()) {
2876       metaspace_rs = ReservedSpace(class_metaspace_size(),
2877                                    os::vm_allocation_granularity(), false);
2878       if (!metaspace_rs.is_reserved()) {
2879         vm_exit_during_initialization(err_msg("Could not allocate metaspace: %d bytes",
2880                                               class_metaspace_size()));
2881       }
2882     }
2883   }
2884 
2885   // If we got here then the metaspace got allocated.
2886   MemTracker::record_virtual_memory_type((address)metaspace_rs.base(), mtClass);
2887 
2888   // Verify that we can use shared spaces.  Otherwise, turn off CDS.
2889   if (UseSharedSpaces && !can_use_cds_with_metaspace_addr(metaspace_rs.base(), cds_base)) {
2890     FileMapInfo::stop_sharing_and_unmap(
2891         "Could not allocate metaspace at a compatible address");
2892   }
2893 
2894   set_narrow_klass_base_and_shift((address)metaspace_rs.base(),
2895                                   UseSharedSpaces ? (address)cds_base : 0);
2896 
2897   initialize_class_space(metaspace_rs);
2898 
2899   if (PrintCompressedOopsMode || (PrintMiscellaneous && Verbose)) {
2900     gclog_or_tty->print_cr("Narrow klass base: " PTR_FORMAT ", Narrow klass shift: " SIZE_FORMAT,
2901                             Universe::narrow_klass_base(), Universe::narrow_klass_shift());
2902     gclog_or_tty->print_cr("Metaspace Size: " SIZE_FORMAT " Address: " PTR_FORMAT " Req Addr: " PTR_FORMAT,
2903                            class_metaspace_size(), metaspace_rs.base(), requested_addr);
2904   }
2905 }
2906 
2907 // For UseCompressedKlassPointers the class space is reserved above the top of
2908 // the Java heap.  The argument passed in is at the base of the compressed space.
2909 void Metaspace::initialize_class_space(ReservedSpace rs) {
2910   // The reserved space size may be bigger because of alignment, esp with UseLargePages
2911   assert(rs.size() >= ClassMetaspaceSize,
2912          err_msg(SIZE_FORMAT " != " UINTX_FORMAT, rs.size(), ClassMetaspaceSize));
2913   assert(using_class_space(), "Must be using class space");
2914   _class_space_list = new VirtualSpaceList(rs);
2915 }
2916 
2917 #endif
2918 
2919 void Metaspace::global_initialize() {
2920   // Initialize the alignment for shared spaces.
2921   int max_alignment = os::vm_page_size();
2922   size_t cds_total = 0;
2923 
2924   set_class_metaspace_size(align_size_up(ClassMetaspaceSize,
2925                                          os::vm_allocation_granularity()));
2926 
2927   MetaspaceShared::set_max_alignment(max_alignment);
2928 
2929   if (DumpSharedSpaces) {
2930     SharedReadOnlySize = align_size_up(SharedReadOnlySize, max_alignment);
2931     SharedReadWriteSize = align_size_up(SharedReadWriteSize, max_alignment);
2932     SharedMiscDataSize  = align_size_up(SharedMiscDataSize, max_alignment);
2933     SharedMiscCodeSize  = align_size_up(SharedMiscCodeSize, max_alignment);
2934 
2935     // Initialize with the sum of the shared space sizes.  The read-only
2936     // and read write metaspace chunks will be allocated out of this and the
2937     // remainder is the misc code and data chunks.
2938     cds_total = FileMapInfo::shared_spaces_size();
2939     _space_list = new VirtualSpaceList(cds_total/wordSize);
2940 
2941 #ifdef _LP64
2942     // Set the compressed klass pointer base so that decoding of these pointers works
2943     // properly when creating the shared archive.
2944     assert(UseCompressedOops && UseCompressedKlassPointers,
2945       "UseCompressedOops and UseCompressedKlassPointers must be set");
2946     Universe::set_narrow_klass_base((address)_space_list->current_virtual_space()->bottom());
2947     if (TraceMetavirtualspaceAllocation && Verbose) {
2948       gclog_or_tty->print_cr("Setting_narrow_klass_base to Address: " PTR_FORMAT,
2949                              _space_list->current_virtual_space()->bottom());
2950     }
2951 
2952     // Set the shift to zero.
2953     assert(class_metaspace_size() < (uint64_t)(max_juint) - cds_total,
2954            "CDS region is too large");
2955     Universe::set_narrow_klass_shift(0);
2956 #endif
2957 
2958   } else {
2959     // If using shared space, open the file that contains the shared space
2960     // and map in the memory before initializing the rest of metaspace (so
2961     // the addresses don't conflict)
2962     address cds_address = NULL;
2963     if (UseSharedSpaces) {
2964       FileMapInfo* mapinfo = new FileMapInfo();
2965       memset(mapinfo, 0, sizeof(FileMapInfo));
2966 
2967       // Open the shared archive file, read and validate the header. If
2968       // initialization fails, shared spaces [UseSharedSpaces] are
2969       // disabled and the file is closed.
2970       // Map in spaces now also
2971       if (mapinfo->initialize() && MetaspaceShared::map_shared_spaces(mapinfo)) {
2972         FileMapInfo::set_current_info(mapinfo);
2973       } else {
2974         assert(!mapinfo->is_open() && !UseSharedSpaces,
2975                "archive file not closed or shared spaces not disabled.");
2976       }
2977       cds_total = FileMapInfo::shared_spaces_size();
2978       cds_address = (address)mapinfo->region_base(0);
2979     }
2980 
2981 #ifdef _LP64
2982     // If UseCompressedKlassPointers is set then allocate the metaspace area
2983     // above the heap and above the CDS area (if it exists).
2984     if (using_class_space()) {
2985       if (UseSharedSpaces) {
2986         allocate_metaspace_compressed_klass_ptrs((char *)(cds_address + cds_total), cds_address);
2987       } else {
2988         allocate_metaspace_compressed_klass_ptrs((char *)CompressedKlassPointersBase, 0);
2989       }
2990     }
2991 #endif
2992 
2993     // Initialize these before initializing the VirtualSpaceList
2994     _first_chunk_word_size = InitialBootClassLoaderMetaspaceSize / BytesPerWord;
2995     _first_chunk_word_size = align_word_size_up(_first_chunk_word_size);
2996     // Make the first class chunk bigger than a medium chunk so it's not put
2997     // on the medium chunk list.   The next chunk will be small and progress
2998     // from there.  This size calculated by -version.
2999     _first_class_chunk_word_size = MIN2((size_t)MediumChunk*6,
3000                                        (ClassMetaspaceSize/BytesPerWord)*2);
3001     _first_class_chunk_word_size = align_word_size_up(_first_class_chunk_word_size);
3002     // Arbitrarily set the initial virtual space to a multiple
3003     // of the boot class loader size.
3004     size_t word_size = VIRTUALSPACEMULTIPLIER * first_chunk_word_size();
3005     // Initialize the list of virtual spaces.
3006     _space_list = new VirtualSpaceList(word_size);
3007   }
3008 }
3009 
3010 void Metaspace::initialize(Mutex* lock, MetaspaceType type) {
3011 
3012   assert(space_list() != NULL,
3013     "Metadata VirtualSpaceList has not been initialized");
3014 
3015   _vsm = new SpaceManager(NonClassType, lock, space_list());
3016   if (_vsm == NULL) {
3017     return;
3018   }
3019   size_t word_size;
3020   size_t class_word_size;


3047   if (using_class_space()) {
3048     Metachunk* class_chunk =
3049        class_space_list()->get_initialization_chunk(class_word_size,
3050                                                     class_vsm()->medium_chunk_bunch());
3051     if (class_chunk != NULL) {
3052       class_vsm()->add_chunk(class_chunk, true);
3053     }
3054   }
3055 
3056   _alloc_record_head = NULL;
3057   _alloc_record_tail = NULL;
3058 }
3059 
3060 size_t Metaspace::align_word_size_up(size_t word_size) {
3061   size_t byte_size = word_size * wordSize;
3062   return ReservedSpace::allocation_align_size_up(byte_size) / wordSize;
3063 }
3064 
3065 MetaWord* Metaspace::allocate(size_t word_size, MetadataType mdtype) {
3066   // DumpSharedSpaces doesn't use class metadata area (yet)
3067   // Also, don't use class_vsm() unless UseCompressedKlassPointers is true.
3068   if (mdtype == ClassType && using_class_space()) {
3069     return  class_vsm()->allocate(word_size);
3070   } else {
3071     return  vsm()->allocate(word_size);
3072   }
3073 }
3074 
3075 MetaWord* Metaspace::expand_and_allocate(size_t word_size, MetadataType mdtype) {
3076   MetaWord* result;
3077   MetaspaceGC::set_expand_after_GC(true);
3078   size_t before_inc = MetaspaceGC::capacity_until_GC();
3079   size_t delta_bytes = MetaspaceGC::delta_capacity_until_GC(word_size) * BytesPerWord;
3080   MetaspaceGC::inc_capacity_until_GC(delta_bytes);
3081   if (PrintGCDetails && Verbose) {
3082     gclog_or_tty->print_cr("Increase capacity to GC from " SIZE_FORMAT
3083       " to " SIZE_FORMAT, before_inc, MetaspaceGC::capacity_until_GC());
3084   }
3085 
3086   result = allocate(word_size, mdtype);
3087 


3196     return Metablock::initialize(result, word_size);
3197   }
3198 
3199   result = loader_data->metaspace_non_null()->allocate(word_size, mdtype);
3200 
3201   if (result == NULL) {
3202     // Try to clean out some memory and retry.
3203     result =
3204       Universe::heap()->collector_policy()->satisfy_failed_metadata_allocation(
3205         loader_data, word_size, mdtype);
3206 
3207     // If result is still null, we are out of memory.
3208     if (result == NULL) {
3209       if (Verbose && TraceMetadataChunkAllocation) {
3210         gclog_or_tty->print_cr("Metaspace allocation failed for size "
3211           SIZE_FORMAT, word_size);
3212         if (loader_data->metaspace_or_null() != NULL) loader_data->dump(gclog_or_tty);
3213         MetaspaceAux::dump(gclog_or_tty);
3214       }
3215       // -XX:+HeapDumpOnOutOfMemoryError and -XX:OnOutOfMemoryError support
3216       const char* space_string = (mdtype == ClassType) ? "Class Metadata space" :
3217                                                          "Metadata space";
3218       report_java_out_of_memory(space_string);
3219 
3220       if (JvmtiExport::should_post_resource_exhausted()) {
3221         JvmtiExport::post_resource_exhausted(
3222             JVMTI_RESOURCE_EXHAUSTED_OOM_ERROR,
3223             space_string);
3224       }
3225       if (mdtype == ClassType) {
3226         THROW_OOP_0(Universe::out_of_memory_error_class_metaspace());
3227       } else {
3228         THROW_OOP_0(Universe::out_of_memory_error_metaspace());
3229       }
3230     }
3231   }
3232   return Metablock::initialize(result, word_size);
3233 }
3234 
3235 void Metaspace::record_allocation(void* ptr, MetaspaceObj::Type type, size_t word_size) {
3236   assert(DumpSharedSpaces, "sanity");




 406 // It has a  _next link for singly linked list and a MemRegion
 407 // for total space in the VirtualSpace.
 408 class VirtualSpaceList : public CHeapObj<mtClass> {
 409   friend class VirtualSpaceNode;
 410 
 411   enum VirtualSpaceSizes {
 412     VirtualSpaceSize = 256 * K
 413   };
 414 
 415   // Global list of virtual spaces
 416   // Head of the list
 417   VirtualSpaceNode* _virtual_space_list;
 418   // virtual space currently being used for allocations
 419   VirtualSpaceNode* _current_virtual_space;
 420   // Free chunk list for all other metadata
 421   ChunkManager      _chunk_manager;
 422 
 423   // Can this virtual list allocate >1 spaces?  Also, used to determine
 424   // whether to allocate unlimited small chunks in this virtual space
 425   bool _is_class;
 426   bool can_grow() const { return !is_class() || !UseCompressedClassPointers; }
 427 
 428   // Sum of space in all virtual spaces and number of virtual spaces
 429   size_t _virtual_space_total;
 430   size_t _virtual_space_count;
 431 
 432   ~VirtualSpaceList();
 433 
 434   VirtualSpaceNode* virtual_space_list() const { return _virtual_space_list; }
 435 
 436   void set_virtual_space_list(VirtualSpaceNode* v) {
 437     _virtual_space_list = v;
 438   }
 439   void set_current_virtual_space(VirtualSpaceNode* v) {
 440     _current_virtual_space = v;
 441   }
 442 
 443   void link_vs(VirtualSpaceNode* new_entry, size_t vs_word_size);
 444 
 445   // Get another virtual space and add it to the list.  This
 446   // is typically prompted by a failed attempt to allocate a chunk


2819     higher_address = MAX2((address)(cds_base + FileMapInfo::shared_spaces_size()),
2820                           (address)(metaspace_base + class_metaspace_size()));
2821     lower_base = MIN2(metaspace_base, cds_base);
2822   } else {
2823     higher_address = metaspace_base + class_metaspace_size();
2824     lower_base = metaspace_base;
2825   }
2826   Universe::set_narrow_klass_base(lower_base);
2827   if ((uint64_t)(higher_address - lower_base) < (uint64_t)max_juint) {
2828     Universe::set_narrow_klass_shift(0);
2829   } else {
2830     assert(!UseSharedSpaces, "Cannot shift with UseSharedSpaces");
2831     Universe::set_narrow_klass_shift(LogKlassAlignmentInBytes);
2832   }
2833 }
2834 
2835 // Return TRUE if the specified metaspace_base and cds_base are close enough
2836 // to work with compressed klass pointers.
2837 bool Metaspace::can_use_cds_with_metaspace_addr(char* metaspace_base, address cds_base) {
2838   assert(cds_base != 0 && UseSharedSpaces, "Only use with CDS");
2839   assert(UseCompressedClassPointers, "Only use with CompressedKlassPtrs");
2840   address lower_base = MIN2((address)metaspace_base, cds_base);
2841   address higher_address = MAX2((address)(cds_base + FileMapInfo::shared_spaces_size()),
2842                                 (address)(metaspace_base + class_metaspace_size()));
2843   return ((uint64_t)(higher_address - lower_base) < (uint64_t)max_juint);
2844 }
2845 
2846 // Try to allocate the metaspace at the requested addr.
2847 void Metaspace::allocate_metaspace_compressed_klass_ptrs(char* requested_addr, address cds_base) {
2848   assert(using_class_space(), "called improperly");
2849   assert(UseCompressedClassPointers, "Only use with CompressedKlassPtrs");
2850   assert(class_metaspace_size() < KlassEncodingMetaspaceMax,
2851          "Metaspace size is too big");
2852 
2853   ReservedSpace metaspace_rs = ReservedSpace(class_metaspace_size(),
2854                                              os::vm_allocation_granularity(),
2855                                              false, requested_addr, 0);
2856   if (!metaspace_rs.is_reserved()) {
2857     if (UseSharedSpaces) {
2858       // Keep trying to allocate the metaspace, increasing the requested_addr
2859       // by 1GB each time, until we reach an address that will no longer allow
2860       // use of CDS with compressed klass pointers.
2861       char *addr = requested_addr;
2862       while (!metaspace_rs.is_reserved() && (addr + 1*G > addr) &&
2863              can_use_cds_with_metaspace_addr(addr + 1*G, cds_base)) {
2864         addr = addr + 1*G;
2865         metaspace_rs = ReservedSpace(class_metaspace_size(),
2866                                      os::vm_allocation_granularity(), false, addr, 0);
2867       }
2868     }
2869 
2870     // If no successful allocation then try to allocate the space anywhere.  If
2871     // that fails then OOM doom.  At this point we cannot try allocating the
2872     // metaspace as if UseCompressedClassPointers is off because too much
2873     // initialization has happened that depends on UseCompressedClassPointers.
2874     // So, UseCompressedClassPointers cannot be turned off at this point.
2875     if (!metaspace_rs.is_reserved()) {
2876       metaspace_rs = ReservedSpace(class_metaspace_size(),
2877                                    os::vm_allocation_granularity(), false);
2878       if (!metaspace_rs.is_reserved()) {
2879         vm_exit_during_initialization(err_msg("Could not allocate metaspace: %d bytes",
2880                                               class_metaspace_size()));
2881       }
2882     }
2883   }
2884 
2885   // If we got here then the metaspace got allocated.
2886   MemTracker::record_virtual_memory_type((address)metaspace_rs.base(), mtClass);
2887 
2888   // Verify that we can use shared spaces.  Otherwise, turn off CDS.
2889   if (UseSharedSpaces && !can_use_cds_with_metaspace_addr(metaspace_rs.base(), cds_base)) {
2890     FileMapInfo::stop_sharing_and_unmap(
2891         "Could not allocate metaspace at a compatible address");
2892   }
2893 
2894   set_narrow_klass_base_and_shift((address)metaspace_rs.base(),
2895                                   UseSharedSpaces ? (address)cds_base : 0);
2896 
2897   initialize_class_space(metaspace_rs);
2898 
2899   if (PrintCompressedOopsMode || (PrintMiscellaneous && Verbose)) {
2900     gclog_or_tty->print_cr("Narrow klass base: " PTR_FORMAT ", Narrow klass shift: " SIZE_FORMAT,
2901                             Universe::narrow_klass_base(), Universe::narrow_klass_shift());
2902     gclog_or_tty->print_cr("Metaspace Size: " SIZE_FORMAT " Address: " PTR_FORMAT " Req Addr: " PTR_FORMAT,
2903                            class_metaspace_size(), metaspace_rs.base(), requested_addr);
2904   }
2905 }
2906 
2907 // For UseCompressedClassPointers the class space is reserved above the top of
2908 // the Java heap.  The argument passed in is at the base of the compressed space.
2909 void Metaspace::initialize_class_space(ReservedSpace rs) {
2910   // The reserved space size may be bigger because of alignment, esp with UseLargePages
2911   assert(rs.size() >= CompressedClassSpaceSize,
2912          err_msg(SIZE_FORMAT " != " UINTX_FORMAT, rs.size(), CompressedClassSpaceSize));
2913   assert(using_class_space(), "Must be using class space");
2914   _class_space_list = new VirtualSpaceList(rs);
2915 }
2916 
2917 #endif
2918 
2919 void Metaspace::global_initialize() {
2920   // Initialize the alignment for shared spaces.
2921   int max_alignment = os::vm_page_size();
2922   size_t cds_total = 0;
2923 
2924   set_class_metaspace_size(align_size_up(CompressedClassSpaceSize,
2925                                          os::vm_allocation_granularity()));
2926 
2927   MetaspaceShared::set_max_alignment(max_alignment);
2928 
2929   if (DumpSharedSpaces) {
2930     SharedReadOnlySize = align_size_up(SharedReadOnlySize, max_alignment);
2931     SharedReadWriteSize = align_size_up(SharedReadWriteSize, max_alignment);
2932     SharedMiscDataSize  = align_size_up(SharedMiscDataSize, max_alignment);
2933     SharedMiscCodeSize  = align_size_up(SharedMiscCodeSize, max_alignment);
2934 
2935     // Initialize with the sum of the shared space sizes.  The read-only
2936     // and read write metaspace chunks will be allocated out of this and the
2937     // remainder is the misc code and data chunks.
2938     cds_total = FileMapInfo::shared_spaces_size();
2939     _space_list = new VirtualSpaceList(cds_total/wordSize);
2940 
2941 #ifdef _LP64
2942     // Set the compressed klass pointer base so that decoding of these pointers works
2943     // properly when creating the shared archive.
2944     assert(UseCompressedOops && UseCompressedClassPointers,
2945       "UseCompressedOops and UseCompressedClassPointers must be set");
2946     Universe::set_narrow_klass_base((address)_space_list->current_virtual_space()->bottom());
2947     if (TraceMetavirtualspaceAllocation && Verbose) {
2948       gclog_or_tty->print_cr("Setting_narrow_klass_base to Address: " PTR_FORMAT,
2949                              _space_list->current_virtual_space()->bottom());
2950     }
2951 
2952     // Set the shift to zero.
2953     assert(class_metaspace_size() < (uint64_t)(max_juint) - cds_total,
2954            "CDS region is too large");
2955     Universe::set_narrow_klass_shift(0);
2956 #endif
2957 
2958   } else {
2959     // If using shared space, open the file that contains the shared space
2960     // and map in the memory before initializing the rest of metaspace (so
2961     // the addresses don't conflict)
2962     address cds_address = NULL;
2963     if (UseSharedSpaces) {
2964       FileMapInfo* mapinfo = new FileMapInfo();
2965       memset(mapinfo, 0, sizeof(FileMapInfo));
2966 
2967       // Open the shared archive file, read and validate the header. If
2968       // initialization fails, shared spaces [UseSharedSpaces] are
2969       // disabled and the file is closed.
2970       // Map in spaces now also
2971       if (mapinfo->initialize() && MetaspaceShared::map_shared_spaces(mapinfo)) {
2972         FileMapInfo::set_current_info(mapinfo);
2973       } else {
2974         assert(!mapinfo->is_open() && !UseSharedSpaces,
2975                "archive file not closed or shared spaces not disabled.");
2976       }
2977       cds_total = FileMapInfo::shared_spaces_size();
2978       cds_address = (address)mapinfo->region_base(0);
2979     }
2980 
2981 #ifdef _LP64
2982     // If UseCompressedClassPointers is set then allocate the metaspace area
2983     // above the heap and above the CDS area (if it exists).
2984     if (using_class_space()) {
2985       if (UseSharedSpaces) {
2986         allocate_metaspace_compressed_klass_ptrs((char *)(cds_address + cds_total), cds_address);
2987       } else {
2988         allocate_metaspace_compressed_klass_ptrs((char *)CompressedKlassPointersBase, 0);
2989       }
2990     }
2991 #endif
2992 
2993     // Initialize these before initializing the VirtualSpaceList
2994     _first_chunk_word_size = InitialBootClassLoaderMetaspaceSize / BytesPerWord;
2995     _first_chunk_word_size = align_word_size_up(_first_chunk_word_size);
2996     // Make the first class chunk bigger than a medium chunk so it's not put
2997     // on the medium chunk list.   The next chunk will be small and progress
2998     // from there.  This size calculated by -version.
2999     _first_class_chunk_word_size = MIN2((size_t)MediumChunk*6,
3000                                        (CompressedClassSpaceSize/BytesPerWord)*2);
3001     _first_class_chunk_word_size = align_word_size_up(_first_class_chunk_word_size);
3002     // Arbitrarily set the initial virtual space to a multiple
3003     // of the boot class loader size.
3004     size_t word_size = VIRTUALSPACEMULTIPLIER * first_chunk_word_size();
3005     // Initialize the list of virtual spaces.
3006     _space_list = new VirtualSpaceList(word_size);
3007   }
3008 }
3009 
3010 void Metaspace::initialize(Mutex* lock, MetaspaceType type) {
3011 
3012   assert(space_list() != NULL,
3013     "Metadata VirtualSpaceList has not been initialized");
3014 
3015   _vsm = new SpaceManager(NonClassType, lock, space_list());
3016   if (_vsm == NULL) {
3017     return;
3018   }
3019   size_t word_size;
3020   size_t class_word_size;


3047   if (using_class_space()) {
3048     Metachunk* class_chunk =
3049        class_space_list()->get_initialization_chunk(class_word_size,
3050                                                     class_vsm()->medium_chunk_bunch());
3051     if (class_chunk != NULL) {
3052       class_vsm()->add_chunk(class_chunk, true);
3053     }
3054   }
3055 
3056   _alloc_record_head = NULL;
3057   _alloc_record_tail = NULL;
3058 }
3059 
3060 size_t Metaspace::align_word_size_up(size_t word_size) {
3061   size_t byte_size = word_size * wordSize;
3062   return ReservedSpace::allocation_align_size_up(byte_size) / wordSize;
3063 }
3064 
3065 MetaWord* Metaspace::allocate(size_t word_size, MetadataType mdtype) {
3066   // DumpSharedSpaces doesn't use class metadata area (yet)
3067   // Also, don't use class_vsm() unless UseCompressedClassPointers is true.
3068   if (mdtype == ClassType && using_class_space()) {
3069     return  class_vsm()->allocate(word_size);
3070   } else {
3071     return  vsm()->allocate(word_size);
3072   }
3073 }
3074 
3075 MetaWord* Metaspace::expand_and_allocate(size_t word_size, MetadataType mdtype) {
3076   MetaWord* result;
3077   MetaspaceGC::set_expand_after_GC(true);
3078   size_t before_inc = MetaspaceGC::capacity_until_GC();
3079   size_t delta_bytes = MetaspaceGC::delta_capacity_until_GC(word_size) * BytesPerWord;
3080   MetaspaceGC::inc_capacity_until_GC(delta_bytes);
3081   if (PrintGCDetails && Verbose) {
3082     gclog_or_tty->print_cr("Increase capacity to GC from " SIZE_FORMAT
3083       " to " SIZE_FORMAT, before_inc, MetaspaceGC::capacity_until_GC());
3084   }
3085 
3086   result = allocate(word_size, mdtype);
3087 


3196     return Metablock::initialize(result, word_size);
3197   }
3198 
3199   result = loader_data->metaspace_non_null()->allocate(word_size, mdtype);
3200 
3201   if (result == NULL) {
3202     // Try to clean out some memory and retry.
3203     result =
3204       Universe::heap()->collector_policy()->satisfy_failed_metadata_allocation(
3205         loader_data, word_size, mdtype);
3206 
3207     // If result is still null, we are out of memory.
3208     if (result == NULL) {
3209       if (Verbose && TraceMetadataChunkAllocation) {
3210         gclog_or_tty->print_cr("Metaspace allocation failed for size "
3211           SIZE_FORMAT, word_size);
3212         if (loader_data->metaspace_or_null() != NULL) loader_data->dump(gclog_or_tty);
3213         MetaspaceAux::dump(gclog_or_tty);
3214       }
3215       // -XX:+HeapDumpOnOutOfMemoryError and -XX:OnOutOfMemoryError support
3216       const char* space_string = (mdtype == ClassType) ? "Compressed class space" :
3217                                                          "Metadata space";
3218       report_java_out_of_memory(space_string);
3219 
3220       if (JvmtiExport::should_post_resource_exhausted()) {
3221         JvmtiExport::post_resource_exhausted(
3222             JVMTI_RESOURCE_EXHAUSTED_OOM_ERROR,
3223             space_string);
3224       }
3225       if (mdtype == ClassType) {
3226         THROW_OOP_0(Universe::out_of_memory_error_class_metaspace());
3227       } else {
3228         THROW_OOP_0(Universe::out_of_memory_error_metaspace());
3229       }
3230     }
3231   }
3232   return Metablock::initialize(result, word_size);
3233 }
3234 
3235 void Metaspace::record_allocation(void* ptr, MetaspaceObj::Type type, size_t word_size) {
3236   assert(DumpSharedSpaces, "sanity");