rev 57380 : [mq]: metaspace-improvement

   1 /*
   2  * Copyright (c) 2011, 2019, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "aot/aotLoader.hpp"
  27 #include "classfile/classLoaderDataGraph.hpp"
  28 #include "gc/shared/collectedHeap.hpp"
  29 #include "logging/log.hpp"
  30 #include "logging/logStream.hpp"
  31 #include "memory/filemap.hpp"
  32 #include "memory/metaspace.hpp"
  33 #include "memory/metaspace/chunkManager.hpp"
  34 #include "memory/metaspace/chunkLevel.hpp"
  35 #include "memory/metaspace/metachunk.hpp"

  36 #include "memory/metaspace/printCLDMetaspaceInfoClosure.hpp"
  37 #include "memory/metaspace/spaceManager.hpp"
  38 #include "memory/metaspace/virtualSpaceList.hpp"
  39 #include "memory/metaspaceShared.hpp"
  40 #include "memory/metaspaceTracer.hpp"
  41 #include "memory/universe.hpp"
  42 #include "oops/compressedOops.hpp"
  43 #include "runtime/init.hpp"
  44 #include "runtime/orderAccess.hpp"
  45 #include "services/memTracker.hpp"
  46 #include "utilities/copy.hpp"
  47 #include "utilities/debug.hpp"
  48 #include "utilities/formatBuffer.hpp"
  49 #include "utilities/globalDefinitions.hpp"
  50 #include "utilities/vmError.hpp"
  51 
  52 
  53 using namespace metaspace;
  54 
  55 MetaWord* last_allocated = 0;
  56 
  57 size_t Metaspace::_compressed_class_space_size;
  58 const MetaspaceTracer* Metaspace::_tracer = NULL;
  59 
  60 DEBUG_ONLY(bool Metaspace::_frozen = false;)
  61 













































































































  62 








  63 













































































































































































  64 
  65 // MetaspaceUtils
  66 size_t MetaspaceUtils::_capacity_words [Metaspace:: MetadataTypeCount] = {0, 0};
  67 size_t MetaspaceUtils::_overhead_words [Metaspace:: MetadataTypeCount] = {0, 0};
  68 volatile size_t MetaspaceUtils::_used_words [Metaspace:: MetadataTypeCount] = {0, 0};
  69 
  70 // Collect used metaspace statistics. This involves walking the CLDG. The resulting
  71 // output will be the accumulated values for all live metaspaces.
  72 // Note: method does not do any locking.
  73 void MetaspaceUtils::collect_statistics(ClassLoaderMetaspaceStatistics* out) {
  74   out->reset();
  75   ClassLoaderDataGraphMetaspaceIterator iter;
  76    while (iter.repeat()) {
  77      ClassLoaderMetaspace* msp = iter.get_next();
  78      if (msp != NULL) {
  79        msp->add_to_statistics(out);
  80      }
  81    }
  82 }
  83 
  84 size_t MetaspaceUtils::free_in_vs_bytes(Metaspace::MetadataType mdtype) {
  85   VirtualSpaceList* list = Metaspace::get_space_list(mdtype);
  86   return list == NULL ? 0 : list->free_bytes();
  87 }
  88 
  89 size_t MetaspaceUtils::free_in_vs_bytes() {
  90   return free_in_vs_bytes(Metaspace::ClassType) + free_in_vs_bytes(Metaspace::NonClassType);
  91 }
  92 
  93 static void inc_stat_nonatomically(size_t* pstat, size_t words) {
  94   assert_lock_strong(MetaspaceExpand_lock);
  95   (*pstat) += words;
  96 }
  97 
  98 static void dec_stat_nonatomically(size_t* pstat, size_t words) {
  99   assert_lock_strong(MetaspaceExpand_lock);
 100   const size_t size_now = *pstat;
 101   assert(size_now >= words, "About to decrement counter below zero "
 102          "(current value: " SIZE_FORMAT ", decrement value: " SIZE_FORMAT ".",
 103          size_now, words);
 104   *pstat = size_now - words;
 105 }
 106 
 107 static void inc_stat_atomically(volatile size_t* pstat, size_t words) {
 108   Atomic::add(words, pstat);
 109 }
 110 
 111 static void dec_stat_atomically(volatile size_t* pstat, size_t words) {
 112   const size_t size_now = *pstat;
 113   assert(size_now >= words, "About to decrement counter below zero "
 114          "(current value: " SIZE_FORMAT ", decrement value: " SIZE_FORMAT ".",
 115          size_now, words);
 116   Atomic::sub(words, pstat);
 117 }
 118 
 119 void MetaspaceUtils::dec_capacity(Metaspace::MetadataType mdtype, size_t words) {
 120   dec_stat_nonatomically(&_capacity_words[mdtype], words);
 121 }
 122 void MetaspaceUtils::inc_capacity(Metaspace::MetadataType mdtype, size_t words) {
 123   inc_stat_nonatomically(&_capacity_words[mdtype], words);
 124 }
 125 void MetaspaceUtils::dec_used(Metaspace::MetadataType mdtype, size_t words) {
 126   dec_stat_atomically(&_used_words[mdtype], words);
 127 }
 128 void MetaspaceUtils::inc_used(Metaspace::MetadataType mdtype, size_t words) {
 129   inc_stat_atomically(&_used_words[mdtype], words);
 130 }
 131 void MetaspaceUtils::dec_overhead(Metaspace::MetadataType mdtype, size_t words) {
 132   dec_stat_nonatomically(&_overhead_words[mdtype], words);
 133 }
 134 void MetaspaceUtils::inc_overhead(Metaspace::MetadataType mdtype, size_t words) {
 135   inc_stat_nonatomically(&_overhead_words[mdtype], words);
 136 }
 137 
 138 size_t MetaspaceUtils::reserved_bytes(Metaspace::MetadataType mdtype) {
 139   VirtualSpaceList* list = Metaspace::get_space_list(mdtype);
 140   return list == NULL ? 0 : list->reserved_bytes();
 141 }
 142 
 143 size_t MetaspaceUtils::committed_bytes(Metaspace::MetadataType mdtype) {
 144   VirtualSpaceList* list = Metaspace::get_space_list(mdtype);
 145   return list == NULL ? 0 : list->committed_bytes();
 146 }
 147 
 148 size_t MetaspaceUtils::min_chunk_size_words() { return Metaspace::first_chunk_word_size(); }
 149 
 150 size_t MetaspaceUtils::free_chunks_total_words(Metaspace::MetadataType mdtype) {
 151   ChunkManager* chunk_manager = Metaspace::get_chunk_manager(mdtype);
 152   if (chunk_manager == NULL) {
 153     return 0;
 154   }
 155   return chunk_manager->free_chunks_total_words();
 156 }
 157 
 158 size_t MetaspaceUtils::free_chunks_total_bytes(Metaspace::MetadataType mdtype) {
 159   return free_chunks_total_words(mdtype) * BytesPerWord;
 160 }
 161 
 162 size_t MetaspaceUtils::free_chunks_total_words() {
 163   return free_chunks_total_words(Metaspace::ClassType) +
 164          free_chunks_total_words(Metaspace::NonClassType);
 165 }
 166 
 167 size_t MetaspaceUtils::free_chunks_total_bytes() {
 168   return free_chunks_total_words() * BytesPerWord;
 169 }
 170 
 171 bool MetaspaceUtils::has_chunk_free_list(Metaspace::MetadataType mdtype) {
 172   return Metaspace::get_chunk_manager(mdtype) != NULL;
 173 }
 174 
 175 MetaspaceChunkFreeListSummary MetaspaceUtils::chunk_free_list_summary(Metaspace::MetadataType mdtype) {
 176   if (!has_chunk_free_list(mdtype)) {
 177     return MetaspaceChunkFreeListSummary();
 178   }
 179 
 180   const ChunkManager* cm = Metaspace::get_chunk_manager(mdtype);
 181   return cm->chunk_free_list_summary();
 182 }
 183 
 184 void MetaspaceUtils::print_metaspace_change(const metaspace::MetaspaceSizesSnapshot& pre_meta_values) {
 185   const metaspace::MetaspaceSizesSnapshot meta_values;
 186 
 187   if (Metaspace::using_class_space()) {
 188     log_info(gc, metaspace)(HEAP_CHANGE_FORMAT" "
 189                             HEAP_CHANGE_FORMAT" "
 190                             HEAP_CHANGE_FORMAT,
 191                             HEAP_CHANGE_FORMAT_ARGS("Metaspace",
 192                                                     pre_meta_values.used(),
 193                                                     pre_meta_values.committed(),
 194                                                     meta_values.used(),
 195                                                     meta_values.committed()),
 196                             HEAP_CHANGE_FORMAT_ARGS("NonClass",
 197                                                     pre_meta_values.non_class_used(),
 198                                                     pre_meta_values.non_class_committed(),
 199                                                     meta_values.non_class_used(),
 200                                                     meta_values.non_class_committed()),
 201                             HEAP_CHANGE_FORMAT_ARGS("Class",
 202                                                     pre_meta_values.class_used(),
 203                                                     pre_meta_values.class_committed(),
 204                                                     meta_values.class_used(),
 205                                                     meta_values.class_committed()));
 206   } else {
 207     log_info(gc, metaspace)(HEAP_CHANGE_FORMAT,
 208                             HEAP_CHANGE_FORMAT_ARGS("Metaspace",
 209                                                     pre_meta_values.used(),
 210                                                     pre_meta_values.committed(),
 211                                                     meta_values.used(),
 212                                                     meta_values.committed()));
 213   }
 214 }
 215 
 216 void MetaspaceUtils::print_on(outputStream* out) {
 217   Metaspace::MetadataType nct = Metaspace::NonClassType;
 218 
 219   out->print_cr(" Metaspace       "
 220                 "used "      SIZE_FORMAT "K, "
 221                 "capacity "  SIZE_FORMAT "K, "
 222                 "committed " SIZE_FORMAT "K, "
 223                 "reserved "  SIZE_FORMAT "K",
 224                 used_bytes()/K,
 225                 capacity_bytes()/K,
 226                 committed_bytes()/K,
 227                 reserved_bytes()/K);
 228 
 229   if (Metaspace::using_class_space()) {
 230     Metaspace::MetadataType ct = Metaspace::ClassType;
 231     out->print_cr("  class space    "
 232                   "used "      SIZE_FORMAT "K, "
 233                   "capacity "  SIZE_FORMAT "K, "
 234                   "committed " SIZE_FORMAT "K, "
 235                   "reserved "  SIZE_FORMAT "K",
 236                   used_bytes(ct)/K,
 237                   capacity_bytes(ct)/K,
 238                   committed_bytes(ct)/K,
 239                   reserved_bytes(ct)/K);
 240   }
 241 }
 242 
 243 






































































































































































































































































































































































 244 
 245 // Prints an ASCII representation of the given space.
 246 void MetaspaceUtils::print_metaspace_map(outputStream* out, Metaspace::MetadataType mdtype) {
 247   MutexLocker cl(MetaspaceExpand_lock, Mutex::_no_safepoint_check_flag);
 248   const bool for_class = mdtype == Metaspace::ClassType ? true : false;
 249   VirtualSpaceList* const vsl = for_class ? Metaspace::class_space_list() : Metaspace::space_list();
 250   if (vsl != NULL) {
 251     if (for_class) {
 252       if (!Metaspace::using_class_space()) {
 253         out->print_cr("No Class Space.");
 254         return;
 255       }
 256       out->print_raw("---- Metaspace Map (Class Space) ----");
 257     } else {
 258       out->print_raw("---- Metaspace Map (Non-Class Space) ----");
 259     }
 260     // Print legend:
 261     out->cr();
 262     out->print_cr("Chunk Types (uppercase chunks are in use): x-specialized, s-small, m-medium, h-humongous.");
 263     out->cr();
 264     VirtualSpaceList* const vsl = for_class ? Metaspace::class_space_list() : Metaspace::space_list();
 265     vsl->print_map(out);
 266     out->cr();
 267   }
 268 }
 269 
 270 void MetaspaceUtils::verify_free_chunks() {
 271 #ifdef ASSERT
 272   Metaspace::chunk_manager_metadata()->verify(false);
 273   if (Metaspace::using_class_space()) {
 274     Metaspace::chunk_manager_class()->verify(false);
 275   }
 276 #endif
 277 }
 278 
 279 void MetaspaceUtils::verify_metrics() {
 280 #ifdef ASSERT
 281   // Please note: there are time windows where the internal counters are out of sync with
 282   // reality. For example, when a newly created ClassLoaderMetaspace creates its first chunk -
 283   // the ClassLoaderMetaspace is not yet attached to its ClassLoaderData object and hence will
 284   // not be counted when iterating the CLDG. So be careful when you call this method.
 285   ClassLoaderMetaspaceStatistics total_stat;
 286   collect_statistics(&total_stat);
 287   UsedChunksStatistics nonclass_chunk_stat = total_stat.nonclass_sm_stats().totals();
 288   UsedChunksStatistics class_chunk_stat = total_stat.class_sm_stats().totals();
 289 
 290   bool mismatch = false;
 291   for (int i = 0; i < Metaspace::MetadataTypeCount; i ++) {
 292     Metaspace::MetadataType mdtype = (Metaspace::MetadataType)i;
 293     UsedChunksStatistics chunk_stat = total_stat.sm_stats(mdtype).totals();
 294     if (capacity_words(mdtype) != chunk_stat.cap() ||
 295         used_words(mdtype) != chunk_stat.used() ||
 296         overhead_words(mdtype) != chunk_stat.overhead()) {
 297       mismatch = true;
 298       tty->print_cr("MetaspaceUtils::verify_metrics: counter mismatch for mdtype=%u:", mdtype);
 299       tty->print_cr("Expected cap " SIZE_FORMAT ", used " SIZE_FORMAT ", overhead " SIZE_FORMAT ".",
 300                     capacity_words(mdtype), used_words(mdtype), overhead_words(mdtype));
 301       tty->print_cr("Got cap " SIZE_FORMAT ", used " SIZE_FORMAT ", overhead " SIZE_FORMAT ".",
 302                     chunk_stat.cap(), chunk_stat.used(), chunk_stat.overhead());
 303       tty->flush();
 304     }
 305   }
 306   assert(mismatch == false, "MetaspaceUtils::verify_metrics: counter mismatch.");
 307 #endif
 308 }
 309 
 310 // Metaspace methods
 311 






 312 VirtualSpaceList* Metaspace::_space_list = NULL;
 313 VirtualSpaceList* Metaspace::_class_space_list = NULL;
 314 
 315 ChunkManager* Metaspace::_chunk_manager_metadata = NULL;
 316 ChunkManager* Metaspace::_chunk_manager_class = NULL;
 317 
 318 bool Metaspace::_initialized = false;
 319 
 320 #define VIRTUALSPACEMULTIPLIER 2
 321 
 322 #ifdef _LP64
 323 static const uint64_t UnscaledClassSpaceMax = (uint64_t(max_juint) + 1);
 324 
 325 void Metaspace::set_narrow_klass_base_and_shift(address metaspace_base, address cds_base) {
 326   assert(!DumpSharedSpaces, "narrow_klass is set by MetaspaceShared class.");
 327   // Figure out the narrow_klass_base and the narrow_klass_shift.  The
 328   // narrow_klass_base is the lower of the metaspace base and the cds base
 329   // (if cds is enabled).  The narrow_klass_shift depends on the distance
 330   // between the lower base and higher address.
 331   address lower_base;
 332   address higher_address;
 333 #if INCLUDE_CDS
 334   if (UseSharedSpaces) {
 335     higher_address = MAX2((address)(cds_base + MetaspaceShared::core_spaces_size()),
 336                           (address)(metaspace_base + compressed_class_space_size()));
 337     lower_base = MIN2(metaspace_base, cds_base);
 338   } else
 339 #endif
 340   {
 341     higher_address = metaspace_base + compressed_class_space_size();
 342     lower_base = metaspace_base;
 343 
 344     uint64_t klass_encoding_max = UnscaledClassSpaceMax << LogKlassAlignmentInBytes;
 345     // If compressed class space fits in lower 32G, we don't need a base.
 346     if (higher_address <= (address)klass_encoding_max) {
 347       lower_base = 0; // Effectively lower base is zero.
 348     }
 349   }
 350 
 351   CompressedKlassPointers::set_base(lower_base);
 352 
 353   // CDS uses LogKlassAlignmentInBytes for narrow_klass_shift. See
 354   // MetaspaceShared::initialize_dumptime_shared_and_meta_spaces() for
 355   // how dump time narrow_klass_shift is set. Although, CDS can work
 356   // with zero-shift mode also, to be consistent with AOT it uses
 357   // LogKlassAlignmentInBytes for klass shift so archived java heap objects
 358   // can be used at same time as AOT code.
 359   if (!UseSharedSpaces
 360       && (uint64_t)(higher_address - lower_base) <= UnscaledClassSpaceMax) {
 361     CompressedKlassPointers::set_shift(0);
 362   } else {
 363     CompressedKlassPointers::set_shift(LogKlassAlignmentInBytes);
 364   }
 365   AOTLoader::set_narrow_klass_shift();
 366 }
 367 
 368 #if INCLUDE_CDS
 369 // Return TRUE if the specified metaspace_base and cds_base are close enough
 370 // to work with compressed klass pointers.
 371 bool Metaspace::can_use_cds_with_metaspace_addr(char* metaspace_base, address cds_base) {
 372   assert(cds_base != 0 && UseSharedSpaces, "Only use with CDS");
 373   assert(UseCompressedClassPointers, "Only use with CompressedKlassPtrs");
 374   address lower_base = MIN2((address)metaspace_base, cds_base);
 375   address higher_address = MAX2((address)(cds_base + MetaspaceShared::core_spaces_size()),
 376                                 (address)(metaspace_base + compressed_class_space_size()));
 377   return ((uint64_t)(higher_address - lower_base) <= UnscaledClassSpaceMax);
 378 }
 379 #endif
 380 
 381 // Try to allocate the metaspace at the requested addr.
 382 void Metaspace::allocate_metaspace_compressed_klass_ptrs(char* requested_addr, address cds_base) {
 383   assert(!DumpSharedSpaces, "compress klass space is allocated by MetaspaceShared class.");
 384   assert(using_class_space(), "called improperly");
 385   assert(UseCompressedClassPointers, "Only use with CompressedKlassPtrs");
 386   assert(compressed_class_space_size() < KlassEncodingMetaspaceMax,
 387          "Metaspace size is too big");
 388   assert_is_aligned(requested_addr, _reserve_alignment);
 389   assert_is_aligned(cds_base, _reserve_alignment);
 390   assert_is_aligned(compressed_class_space_size(), _reserve_alignment);
 391 
 392   // Don't use large pages for the class space.
 393   bool large_pages = false;
 394 
 395 #if !(defined(AARCH64) || defined(AIX))
 396   ReservedSpace metaspace_rs = ReservedSpace(compressed_class_space_size(),
 397                                              _reserve_alignment,
 398                                              large_pages,
 399                                              requested_addr);
 400 #else // AARCH64
 401   ReservedSpace metaspace_rs;
 402 
 403   // Our compressed klass pointers may fit nicely into the lower 32
 404   // bits.
 405   if ((uint64_t)requested_addr + compressed_class_space_size() < 4*G) {
 406     metaspace_rs = ReservedSpace(compressed_class_space_size(),
 407                                  _reserve_alignment,
 408                                  large_pages,
 409                                  requested_addr);
 410   }
 411 
 412   if (! metaspace_rs.is_reserved()) {
 413     // Aarch64: Try to align metaspace so that we can decode a compressed
 414     // klass with a single MOVK instruction.  We can do this iff the
 415     // compressed class base is a multiple of 4G.
 416     // Aix: Search for a place where we can find memory. If we need to load
 417     // the base, 4G alignment is helpful, too.
 418     size_t increment = AARCH64_ONLY(4*)G;
 419     for (char *a = align_up(requested_addr, increment);
 420          a < (char*)(1024*G);
 421          a += increment) {
 422       if (a == (char *)(32*G)) {
 423         // Go faster from here on. Zero-based is no longer possible.
 424         increment = 4*G;
 425       }
 426 
 427 #if INCLUDE_CDS
 428       if (UseSharedSpaces
 429           && ! can_use_cds_with_metaspace_addr(a, cds_base)) {
 430         // We failed to find an aligned base that will reach.  Fall
 431         // back to using our requested addr.
 432         metaspace_rs = ReservedSpace(compressed_class_space_size(),
 433                                      _reserve_alignment,
 434                                      large_pages,
 435                                      requested_addr);
 436         break;
 437       }
 438 #endif
 439 
 440       metaspace_rs = ReservedSpace(compressed_class_space_size(),
 441                                    _reserve_alignment,
 442                                    large_pages,
 443                                    a);
 444       if (metaspace_rs.is_reserved())
 445         break;
 446     }
 447   }
 448 
 449 #endif // AARCH64
 450 
 451   if (!metaspace_rs.is_reserved()) {
 452 #if INCLUDE_CDS
 453     if (UseSharedSpaces) {
 454       size_t increment = align_up(1*G, _reserve_alignment);
 455 
 456       // Keep trying to allocate the metaspace, increasing the requested_addr
 457       // by 1GB each time, until we reach an address that will no longer allow
 458       // use of CDS with compressed klass pointers.
 459       char *addr = requested_addr;
 460       while (!metaspace_rs.is_reserved() && (addr + increment > addr) &&
 461              can_use_cds_with_metaspace_addr(addr + increment, cds_base)) {
 462         addr = addr + increment;
 463         metaspace_rs = ReservedSpace(compressed_class_space_size(),
 464                                      _reserve_alignment, large_pages, addr);
 465       }
 466     }
 467 #endif
 468     // If no successful allocation then try to allocate the space anywhere.  If
 469     // that fails then OOM doom.  At this point we cannot try allocating the
 470     // metaspace as if UseCompressedClassPointers is off because too much
 471     // initialization has happened that depends on UseCompressedClassPointers.
 472     // So, UseCompressedClassPointers cannot be turned off at this point.
 473     if (!metaspace_rs.is_reserved()) {
 474       metaspace_rs = ReservedSpace(compressed_class_space_size(),
 475                                    _reserve_alignment, large_pages);
 476       if (!metaspace_rs.is_reserved()) {
 477         vm_exit_during_initialization(err_msg("Could not allocate metaspace: " SIZE_FORMAT " bytes",
 478                                               compressed_class_space_size()));
 479       }
 480     }
 481   }
 482 
 483   // If we got here then the metaspace got allocated.
 484   MemTracker::record_virtual_memory_type((address)metaspace_rs.base(), mtClass);
 485 
 486 #if INCLUDE_CDS
 487   // Verify that we can use shared spaces.  Otherwise, turn off CDS.
 488   if (UseSharedSpaces && !can_use_cds_with_metaspace_addr(metaspace_rs.base(), cds_base)) {
 489     FileMapInfo::stop_sharing_and_unmap(
 490         "Could not allocate metaspace at a compatible address");
 491   }
 492 #endif
 493   set_narrow_klass_base_and_shift((address)metaspace_rs.base(),
 494                                   UseSharedSpaces ? (address)cds_base : 0);
 495 
 496   initialize_class_space(metaspace_rs);
 497 
 498   LogTarget(Trace, gc, metaspace) lt;
 499   if (lt.is_enabled()) {
 500     ResourceMark rm;
 501     LogStream ls(lt);
 502     print_compressed_class_space(&ls, requested_addr);
 503   }
 504 }
 505 
 506 void Metaspace::print_compressed_class_space(outputStream* st, const char* requested_addr) {
 507   st->print_cr("Narrow klass base: " PTR_FORMAT ", Narrow klass shift: %d",
 508                p2i(CompressedKlassPointers::base()), CompressedKlassPointers::shift());
 509   if (_class_space_list != NULL) {
 510     address base = (address)_class_space_list->current_virtual_space()->bottom();
 511     st->print("Compressed class space size: " SIZE_FORMAT " Address: " PTR_FORMAT,
 512                  compressed_class_space_size(), p2i(base));
 513     if (requested_addr != 0) {
 514       st->print(" Req Addr: " PTR_FORMAT, p2i(requested_addr));
 515     }
 516     st->cr();
 517   }
 518 }
 519 
 520 // For UseCompressedClassPointers the class space is reserved above the top of
 521 // the Java heap.  The argument passed in is at the base of the compressed space.
 522 void Metaspace::initialize_class_space(ReservedSpace rs) {
 523   // The reserved space size may be bigger because of alignment, esp with UseLargePages
 524   assert(rs.size() >= CompressedClassSpaceSize,
 525          SIZE_FORMAT " != " SIZE_FORMAT, rs.size(), CompressedClassSpaceSize);
 526   assert(using_class_space(), "Must be using class space");
 527   _class_space_list = new VirtualSpaceList("class space list", rs);
 528   _chunk_manager_class = new ChunkManager("class space chunk manager", _class_space_list);
 529 



 530 }
 531 
 532 #endif
 533 
 534 void Metaspace::ergo_initialize() {
 535   if (DumpSharedSpaces) {
 536     // Using large pages when dumping the shared archive is currently not implemented.
 537     FLAG_SET_ERGO(UseLargePagesInMetaspace, false);
 538   }
 539 
 540   size_t page_size = os::vm_page_size();
 541   if (UseLargePages && UseLargePagesInMetaspace) {
 542     page_size = os::large_page_size();
 543   }
 544 
 545   _commit_alignment  = page_size;
 546 
 547   // Reserve alignment: all Metaspace memory mappings are to be aligned to the size of a root chunk.
 548   assert(is_aligned_to((int)MAX_CHUNK_BYTE_SIZE, os::vm_allocation_granularity()),
 549       "root chunk size must be a multiple of alloc granularity");
 550 
 551   _reserve_alignment = MAX2(page_size, (size_t)MAX_CHUNK_BYTE_SIZE);
 552 
 553   // Do not use FLAG_SET_ERGO to update MaxMetaspaceSize, since this will
 554   // override if MaxMetaspaceSize was set on the command line or not.
 555   // This information is needed later to conform to the specification of the
 556   // java.lang.management.MemoryUsage API.
 557   //
 558   // Ideally, we would be able to set the default value of MaxMetaspaceSize in
 559   // globals.hpp to the aligned value, but this is not possible, since the
 560   // alignment depends on other flags being parsed.
 561   MaxMetaspaceSize = align_down_bounded(MaxMetaspaceSize, _reserve_alignment);
 562 
 563   if (MetaspaceSize > MaxMetaspaceSize) {
 564     MetaspaceSize = MaxMetaspaceSize;
 565   }
 566 
 567   MetaspaceSize = align_down_bounded(MetaspaceSize, _commit_alignment);
 568 
 569   assert(MetaspaceSize <= MaxMetaspaceSize, "MetaspaceSize should be limited by MaxMetaspaceSize");
 570 
 571   MinMetaspaceExpansion = align_down_bounded(MinMetaspaceExpansion, _commit_alignment);
 572   MaxMetaspaceExpansion = align_down_bounded(MaxMetaspaceExpansion, _commit_alignment);
 573 
 574   CompressedClassSpaceSize = align_down_bounded(CompressedClassSpaceSize, _reserve_alignment);
 575 
 576   // Initial virtual space size will be calculated at global_initialize()
 577   size_t min_metaspace_sz =
 578       VIRTUALSPACEMULTIPLIER * InitialBootClassLoaderMetaspaceSize;
 579   if (UseCompressedClassPointers) {
 580     if ((min_metaspace_sz + CompressedClassSpaceSize) >  MaxMetaspaceSize) {
 581       if (min_metaspace_sz >= MaxMetaspaceSize) {
 582         vm_exit_during_initialization("MaxMetaspaceSize is too small.");
 583       } else {
 584         FLAG_SET_ERGO(CompressedClassSpaceSize,
 585                       MaxMetaspaceSize - min_metaspace_sz);
 586       }
 587     }
 588   } else if (min_metaspace_sz >= MaxMetaspaceSize) {
 589     FLAG_SET_ERGO(InitialBootClassLoaderMetaspaceSize,
 590                   min_metaspace_sz);
 591   }
 592 
 593   set_compressed_class_space_size(CompressedClassSpaceSize);
 594 }
 595 
 596 void Metaspace::global_initialize() {
 597   MetaspaceGC::initialize(); // <- since we do not prealloc init chunks anymore is this still needed?
 598 
 599 #if INCLUDE_CDS
 600   if (DumpSharedSpaces) {
 601     MetaspaceShared::initialize_dumptime_shared_and_meta_spaces();
 602   } else if (UseSharedSpaces) {
 603     // If any of the archived space fails to map, UseSharedSpaces
 604     // is reset to false. Fall through to the
 605     // (!DumpSharedSpaces && !UseSharedSpaces) case to set up class
 606     // metaspace.
 607     MetaspaceShared::initialize_runtime_shared_and_meta_spaces();
 608   }
 609 
 610   if (DynamicDumpSharedSpaces && !UseSharedSpaces) {
 611     vm_exit_during_initialization("DynamicDumpSharedSpaces is unsupported when base CDS archive is not loaded", NULL);
 612   }


 613 #endif // INCLUDE_CDS
 614 
 615   // Initialize class space:
 616   if (CDS_ONLY(!DumpSharedSpaces && !UseSharedSpaces) NOT_CDS(true)) {
 617 #ifdef _LP64
 618     if (using_class_space()) {
 619       char* base = (char*)align_up(Universe::heap()->reserved_region().end(), _reserve_alignment);
 620       allocate_metaspace_compressed_klass_ptrs(base, 0);
 621     }
 622 #endif // _LP64
 623   }
 624 
 625   // Initialize non-class virtual space list, and its chunk manager:
 626   _space_list = new VirtualSpaceList("Non-Class VirtualSpaceList");
 627   _chunk_manager_metadata = new ChunkManager("Non-Class ChunkManager", _space_list);


















 628 
 629   _tracer = new MetaspaceTracer();
 630 
 631   _initialized = true;
 632 
 633 }
 634 
 635 void Metaspace::post_initialize() {
 636   MetaspaceGC::post_initialize();
 637 }
 638 
 639 void Metaspace::verify_global_initialization() {
 640   assert(space_list() != NULL, "Metadata VirtualSpaceList has not been initialized");
 641   assert(chunk_manager_metadata() != NULL, "Metadata ChunkManager has not been initialized");
 642 
 643   if (using_class_space()) {
 644     assert(class_space_list() != NULL, "Class VirtualSpaceList has not been initialized");
 645     assert(chunk_manager_class() != NULL, "Class ChunkManager has not been initialized");
 646   }
 647 }
 648 




 649 
 650 MetaWord* Metaspace::allocate(ClassLoaderData* loader_data, size_t word_size,
 651                               MetaspaceObj::Type type, TRAPS) {
 652   assert(!_frozen, "sanity");
 653   assert(!(DumpSharedSpaces && THREAD->is_VM_thread()), "sanity");
 654 
 655   if (HAS_PENDING_EXCEPTION) {
 656     assert(false, "Should not allocate with exception pending");
 657     return NULL;  // caller does a CHECK_NULL too
 658   }
 659 
 660   assert(loader_data != NULL, "Should never pass around a NULL loader_data. "
 661         "ClassLoaderData::the_null_class_loader_data() should have been used.");
 662 
 663   MetadataType mdtype = (type == MetaspaceObj::ClassType) ? ClassType : NonClassType;
 664 
 665   // Try to allocate metadata.
 666   MetaWord* result = loader_data->metaspace_non_null()->allocate(word_size, mdtype);
 667 
 668   if (result == NULL) {
 669     tracer()->report_metaspace_allocation_failure(loader_data, word_size, type, mdtype);
 670 
 671     // Allocation failed.
 672     if (is_init_completed()) {
 673       // Only start a GC if the bootstrapping has completed.
 674       // Try to clean out some heap memory and retry. This can prevent premature
 675       // expansion of the metaspace.
 676       result = Universe::heap()->satisfy_failed_metadata_allocation(loader_data, word_size, mdtype);
 677     }
 678   }
 679 
 680   if (result == NULL) {
 681     if (DumpSharedSpaces) {
 682       // CDS dumping keeps loading classes, so if we hit an OOM we probably will keep hitting OOM.
 683       // We should abort to avoid generating a potentially bad archive.
 684       vm_exit_during_cds_dumping(err_msg("Failed allocating metaspace object type %s of size " SIZE_FORMAT ". CDS dump aborted.",
 685           MetaspaceObj::type_name(type), word_size * BytesPerWord),
 686         err_msg("Please increase MaxMetaspaceSize (currently " SIZE_FORMAT " bytes).", MaxMetaspaceSize));
 687     }
 688     report_metadata_oome(loader_data, word_size, type, mdtype, THREAD);
 689     assert(HAS_PENDING_EXCEPTION, "sanity");
 690     return NULL;
 691   }
 692 
 693   // Zero initialize.
 694   Copy::fill_to_words((HeapWord*)result, word_size, 0);
 695 
 696   return result;
 697 }
 698 
 699 void Metaspace::report_metadata_oome(ClassLoaderData* loader_data, size_t word_size, MetaspaceObj::Type type, MetadataType mdtype, TRAPS) {
 700   tracer()->report_metadata_oom(loader_data, word_size, type, mdtype);
 701 
 702   // If result is still null, we are out of memory.
 703   Log(gc, metaspace, freelist, oom) log;
 704   if (log.is_info()) {
 705     log.info("Metaspace (%s) allocation failed for size " SIZE_FORMAT,
 706              is_class_space_allocation(mdtype) ? "class" : "data", word_size);
 707     ResourceMark rm;
 708     if (log.is_debug()) {
 709       if (loader_data->metaspace_or_null() != NULL) {
 710         LogStream ls(log.debug());
 711         loader_data->print_value_on(&ls);
 712       }
 713     }
 714     LogStream ls(log.info());
 715     // In case of an OOM, log out a short but still useful report.
 716     MetaspaceUtils::print_basic_report(&ls, 0);
 717   }
 718 
 719   bool out_of_compressed_class_space = false;
 720   if (is_class_space_allocation(mdtype)) {
 721     ClassLoaderMetaspace* metaspace = loader_data->metaspace_non_null();
 722     out_of_compressed_class_space =
 723       MetaspaceUtils::committed_bytes(Metaspace::ClassType) +
 724       (metaspace->class_chunk_size(word_size) * BytesPerWord) >
 725       CompressedClassSpaceSize;
 726   }
 727 
 728   // -XX:+HeapDumpOnOutOfMemoryError and -XX:OnOutOfMemoryError support
 729   const char* space_string = out_of_compressed_class_space ?
 730     "Compressed class space" : "Metaspace";
 731 
 732   report_java_out_of_memory(space_string);
 733 
 734   if (JvmtiExport::should_post_resource_exhausted()) {
 735     JvmtiExport::post_resource_exhausted(
 736         JVMTI_RESOURCE_EXHAUSTED_OOM_ERROR,
 737         space_string);
 738   }
 739 
 740   if (!is_init_completed()) {
 741     vm_exit_during_initialization("OutOfMemoryError", space_string);
 742   }
 743 
 744   if (out_of_compressed_class_space) {
 745     THROW_OOP(Universe::out_of_memory_error_class_metaspace());
 746   } else {
 747     THROW_OOP(Universe::out_of_memory_error_metaspace());
 748   }
 749 }
 750 
 751 const char* Metaspace::metadata_type_name(Metaspace::MetadataType mdtype) {
 752   switch (mdtype) {
 753     case Metaspace::ClassType: return "Class";
 754     case Metaspace::NonClassType: return "Metadata";
 755     default:
 756       assert(false, "Got bad mdtype: %d", (int) mdtype);
 757       return NULL;
 758   }
 759 }
 760 
 761 void Metaspace::purge(MetadataType mdtype) {
 762   get_space_list(mdtype)->purge(get_chunk_manager(mdtype));
 763 }
 764 
 765 void Metaspace::purge() {
 766   MutexLocker cl(MetaspaceExpand_lock,
 767                  Mutex::_no_safepoint_check_flag);
 768   purge(NonClassType);
 769   if (using_class_space()) {
 770     purge(ClassType);
 771   }
 772 }
 773 
 774 bool Metaspace::contains(const void* ptr) {
 775   if (MetaspaceShared::is_in_shared_metaspace(ptr)) {
 776     return true;
 777   }
 778   return contains_non_shared(ptr);
 779 }
 780 
 781 bool Metaspace::contains_non_shared(const void* ptr) {
 782   if (using_class_space() && get_space_list(ClassType)->contains(ptr)) {
 783      return true;
 784   }
 785 
 786   return get_space_list(NonClassType)->contains(ptr);








































































































































































 787 }
 788 
 789 /////////////// Unit tests ///////////////
 790 
 791 struct chunkmanager_statistics_t {
 792   int num_specialized_chunks;
 793   int num_small_chunks;
 794   int num_medium_chunks;
 795   int num_humongous_chunks;
 796 };
 797 
 798 extern void test_metaspace_retrieve_chunkmanager_statistics(Metaspace::MetadataType mdType, chunkmanager_statistics_t* out) {
 799   ChunkManager* const chunk_manager = Metaspace::get_chunk_manager(mdType);
 800   ChunkManagerStatistics stat;
 801   chunk_manager->collect_statistics(&stat);
 802   out->num_specialized_chunks = (int)stat.chunk_stats(SpecializedIndex).num();
 803   out->num_small_chunks = (int)stat.chunk_stats(SmallIndex).num();
 804   out->num_medium_chunks = (int)stat.chunk_stats(MediumIndex).num();
 805   out->num_humongous_chunks = (int)stat.chunk_stats(HumongousIndex).num();
 806 }
 807 
 808 struct chunk_geometry_t {
 809   size_t specialized_chunk_word_size;
 810   size_t small_chunk_word_size;
 811   size_t medium_chunk_word_size;
 812 };
 813 
 814 extern void test_metaspace_retrieve_chunk_geometry(Metaspace::MetadataType mdType, chunk_geometry_t* out) {
 815   if (mdType == Metaspace::NonClassType) {
 816     out->specialized_chunk_word_size = SpecializedChunk;
 817     out->small_chunk_word_size = SmallChunk;
 818     out->medium_chunk_word_size = MediumChunk;
 819   } else {
 820     out->specialized_chunk_word_size = ClassSpecializedChunk;
 821     out->small_chunk_word_size = ClassSmallChunk;
 822     out->medium_chunk_word_size = ClassMediumChunk;
 823   }
 824 }
--- EOF ---