1 /*
   2  * Copyright (c) 2011, 2019, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "aot/aotLoader.hpp"
  27 #include "classfile/classLoaderDataGraph.hpp"
  28 #include "gc/shared/collectedHeap.hpp"
  29 #include "logging/log.hpp"
  30 #include "logging/logStream.hpp"
  31 #include "memory/filemap.hpp"
  32 #include "memory/metaspace.hpp"
  33 #include "memory/metaspace/chunkManager.hpp"
  34 #include "memory/metaspace/metachunk.hpp"
  35 #include "memory/metaspace/metaspaceCommon.hpp"
  36 #include "memory/metaspace/printCLDMetaspaceInfoClosure.hpp"
  37 #include "memory/metaspace/spaceManager.hpp"
  38 #include "memory/metaspace/virtualSpaceList.hpp"
  39 #include "memory/metaspaceShared.hpp"
  40 #include "memory/metaspaceTracer.hpp"
  41 #include "memory/universe.hpp"
  42 #include "oops/compressedOops.hpp"
  43 #include "runtime/init.hpp"
  44 #include "runtime/orderAccess.hpp"
  45 #include "services/memTracker.hpp"
  46 #include "utilities/copy.hpp"
  47 #include "utilities/debug.hpp"
  48 #include "utilities/formatBuffer.hpp"
  49 #include "utilities/globalDefinitions.hpp"
  50 #include "utilities/vmError.hpp"
  51 
  52 
  53 using namespace metaspace;
  54 
  55 MetaWord* last_allocated = 0;
  56 
  57 size_t Metaspace::_compressed_class_space_size;
  58 const MetaspaceTracer* Metaspace::_tracer = NULL;
  59 
  60 DEBUG_ONLY(bool Metaspace::_frozen = false;)
  61 
  62 static const char* space_type_name(Metaspace::MetaspaceType t) {
  63   const char* s = NULL;
  64   switch (t) {
  65     case Metaspace::StandardMetaspaceType: s = "Standard"; break;
  66     case Metaspace::BootMetaspaceType: s = "Boot"; break;
  67     case Metaspace::UnsafeAnonymousMetaspaceType: s = "UnsafeAnonymous"; break;
  68     case Metaspace::ReflectionMetaspaceType: s = "Reflection"; break;
  69     default: ShouldNotReachHere();
  70   }
  71   return s;
  72 }
  73 
  74 volatile size_t MetaspaceGC::_capacity_until_GC = 0;
  75 uint MetaspaceGC::_shrink_factor = 0;
  76 bool MetaspaceGC::_should_concurrent_collect = false;
  77 
  78 // BlockFreelist methods
  79 
  80 // VirtualSpaceNode methods
  81 
  82 // MetaspaceGC methods
  83 
  84 // VM_CollectForMetadataAllocation is the vm operation used to GC.
  85 // Within the VM operation after the GC the attempt to allocate the metadata
  86 // should succeed.  If the GC did not free enough space for the metaspace
  87 // allocation, the HWM is increased so that another virtualspace will be
  88 // allocated for the metadata.  With perm gen the increase in the perm
  89 // gen had bounds, MinMetaspaceExpansion and MaxMetaspaceExpansion.  The
  90 // metaspace policy uses those as the small and large steps for the HWM.
  91 //
  92 // After the GC the compute_new_size() for MetaspaceGC is called to
  93 // resize the capacity of the metaspaces.  The current implementation
  94 // is based on the flags MinMetaspaceFreeRatio and MaxMetaspaceFreeRatio used
  95 // to resize the Java heap by some GC's.  New flags can be implemented
  96 // if really needed.  MinMetaspaceFreeRatio is used to calculate how much
  97 // free space is desirable in the metaspace capacity to decide how much
  98 // to increase the HWM.  MaxMetaspaceFreeRatio is used to decide how much
  99 // free space is desirable in the metaspace capacity before decreasing
 100 // the HWM.
 101 
 102 // Calculate the amount to increase the high water mark (HWM).
 103 // Increase by a minimum amount (MinMetaspaceExpansion) so that
 104 // another expansion is not requested too soon.  If that is not
 105 // enough to satisfy the allocation, increase by MaxMetaspaceExpansion.
 106 // If that is still not enough, expand by the size of the allocation
 107 // plus some.
 108 size_t MetaspaceGC::delta_capacity_until_GC(size_t bytes) {
 109   size_t min_delta = MinMetaspaceExpansion;
 110   size_t max_delta = MaxMetaspaceExpansion;
 111   size_t delta = align_up(bytes, Metaspace::commit_alignment());
 112 
 113   if (delta <= min_delta) {
 114     delta = min_delta;
 115   } else if (delta <= max_delta) {
 116     // Don't want to hit the high water mark on the next
 117     // allocation so make the delta greater than just enough
 118     // for this allocation.
 119     delta = max_delta;
 120   } else {
 121     // This allocation is large but the next ones are probably not
 122     // so increase by the minimum.
 123     delta = delta + min_delta;
 124   }
 125 
 126   assert_is_aligned(delta, Metaspace::commit_alignment());
 127 
 128   return delta;
 129 }
 130 
 131 size_t MetaspaceGC::capacity_until_GC() {
 132   size_t value = OrderAccess::load_acquire(&_capacity_until_GC);
 133   assert(value >= MetaspaceSize, "Not initialized properly?");
 134   return value;
 135 }
 136 
 137 // Try to increase the _capacity_until_GC limit counter by v bytes.
 138 // Returns true if it succeeded. It may fail if either another thread
 139 // concurrently increased the limit or the new limit would be larger
 140 // than MaxMetaspaceSize.
 141 // On success, optionally returns new and old metaspace capacity in
 142 // new_cap_until_GC and old_cap_until_GC respectively.
 143 // On error, optionally sets can_retry to indicate whether if there is
 144 // actually enough space remaining to satisfy the request.
 145 bool MetaspaceGC::inc_capacity_until_GC(size_t v, size_t* new_cap_until_GC, size_t* old_cap_until_GC, bool* can_retry) {
 146   assert_is_aligned(v, Metaspace::commit_alignment());
 147 
 148   size_t old_capacity_until_GC = _capacity_until_GC;
 149   size_t new_value = old_capacity_until_GC + v;
 150 
 151   if (new_value < old_capacity_until_GC) {
 152     // The addition wrapped around, set new_value to aligned max value.
 153     new_value = align_down(max_uintx, Metaspace::commit_alignment());
 154   }
 155 
 156   if (new_value > MaxMetaspaceSize) {
 157     if (can_retry != NULL) {
 158       *can_retry = false;
 159     }
 160     return false;
 161   }
 162 
 163   if (can_retry != NULL) {
 164     *can_retry = true;
 165   }
 166   size_t prev_value = Atomic::cmpxchg(new_value, &_capacity_until_GC, old_capacity_until_GC);
 167 
 168   if (old_capacity_until_GC != prev_value) {
 169     return false;
 170   }
 171 
 172   if (new_cap_until_GC != NULL) {
 173     *new_cap_until_GC = new_value;
 174   }
 175   if (old_cap_until_GC != NULL) {
 176     *old_cap_until_GC = old_capacity_until_GC;
 177   }
 178   return true;
 179 }
 180 
 181 size_t MetaspaceGC::dec_capacity_until_GC(size_t v) {
 182   assert_is_aligned(v, Metaspace::commit_alignment());
 183 
 184   return Atomic::sub(v, &_capacity_until_GC);
 185 }
 186 
 187 void MetaspaceGC::initialize() {
 188   // Set the high-water mark to MaxMetapaceSize during VM initializaton since
 189   // we can't do a GC during initialization.
 190   _capacity_until_GC = MaxMetaspaceSize;
 191 }
 192 
 193 void MetaspaceGC::post_initialize() {
 194   // Reset the high-water mark once the VM initialization is done.
 195   _capacity_until_GC = MAX2(MetaspaceUtils::committed_bytes(), MetaspaceSize);
 196 }
 197 
 198 bool MetaspaceGC::can_expand(size_t word_size, bool is_class) {
 199   // Check if the compressed class space is full.
 200   if (is_class && Metaspace::using_class_space()) {
 201     size_t class_committed = MetaspaceUtils::committed_bytes(Metaspace::ClassType);
 202     if (class_committed + word_size * BytesPerWord > CompressedClassSpaceSize) {
 203       log_trace(gc, metaspace, freelist)("Cannot expand %s metaspace by " SIZE_FORMAT " words (CompressedClassSpaceSize = " SIZE_FORMAT " words)",
 204                 (is_class ? "class" : "non-class"), word_size, CompressedClassSpaceSize / sizeof(MetaWord));
 205       return false;
 206     }
 207   }
 208 
 209   // Check if the user has imposed a limit on the metaspace memory.
 210   size_t committed_bytes = MetaspaceUtils::committed_bytes();
 211   if (committed_bytes + word_size * BytesPerWord > MaxMetaspaceSize) {
 212     log_trace(gc, metaspace, freelist)("Cannot expand %s metaspace by " SIZE_FORMAT " words (MaxMetaspaceSize = " SIZE_FORMAT " words)",
 213               (is_class ? "class" : "non-class"), word_size, MaxMetaspaceSize / sizeof(MetaWord));
 214     return false;
 215   }
 216 
 217   return true;
 218 }
 219 
 220 size_t MetaspaceGC::allowed_expansion() {
 221   size_t committed_bytes = MetaspaceUtils::committed_bytes();
 222   size_t capacity_until_gc = capacity_until_GC();
 223 
 224   assert(capacity_until_gc >= committed_bytes,
 225          "capacity_until_gc: " SIZE_FORMAT " < committed_bytes: " SIZE_FORMAT,
 226          capacity_until_gc, committed_bytes);
 227 
 228   size_t left_until_max  = MaxMetaspaceSize - committed_bytes;
 229   size_t left_until_GC = capacity_until_gc - committed_bytes;
 230   size_t left_to_commit = MIN2(left_until_GC, left_until_max);
 231   log_trace(gc, metaspace, freelist)("allowed expansion words: " SIZE_FORMAT
 232             " (left_until_max: " SIZE_FORMAT ", left_until_GC: " SIZE_FORMAT ".",
 233             left_to_commit / BytesPerWord, left_until_max / BytesPerWord, left_until_GC / BytesPerWord);
 234 
 235   return left_to_commit / BytesPerWord;
 236 }
 237 
 238 void MetaspaceGC::compute_new_size() {
 239   assert(_shrink_factor <= 100, "invalid shrink factor");
 240   uint current_shrink_factor = _shrink_factor;
 241   _shrink_factor = 0;
 242 
 243   // Using committed_bytes() for used_after_gc is an overestimation, since the
 244   // chunk free lists are included in committed_bytes() and the memory in an
 245   // un-fragmented chunk free list is available for future allocations.
 246   // However, if the chunk free lists becomes fragmented, then the memory may
 247   // not be available for future allocations and the memory is therefore "in use".
 248   // Including the chunk free lists in the definition of "in use" is therefore
 249   // necessary. Not including the chunk free lists can cause capacity_until_GC to
 250   // shrink below committed_bytes() and this has caused serious bugs in the past.
 251   const size_t used_after_gc = MetaspaceUtils::committed_bytes();
 252   const size_t capacity_until_GC = MetaspaceGC::capacity_until_GC();
 253 
 254   const double minimum_free_percentage = MinMetaspaceFreeRatio / 100.0;
 255   const double maximum_used_percentage = 1.0 - minimum_free_percentage;
 256 
 257   const double min_tmp = used_after_gc / maximum_used_percentage;
 258   size_t minimum_desired_capacity =
 259     (size_t)MIN2(min_tmp, double(MaxMetaspaceSize));
 260   // Don't shrink less than the initial generation size
 261   minimum_desired_capacity = MAX2(minimum_desired_capacity,
 262                                   MetaspaceSize);
 263 
 264   log_trace(gc, metaspace)("MetaspaceGC::compute_new_size: ");
 265   log_trace(gc, metaspace)("    minimum_free_percentage: %6.2f  maximum_used_percentage: %6.2f",
 266                            minimum_free_percentage, maximum_used_percentage);
 267   log_trace(gc, metaspace)("     used_after_gc       : %6.1fKB", used_after_gc / (double) K);
 268 
 269 
 270   size_t shrink_bytes = 0;
 271   if (capacity_until_GC < minimum_desired_capacity) {
 272     // If we have less capacity below the metaspace HWM, then
 273     // increment the HWM.
 274     size_t expand_bytes = minimum_desired_capacity - capacity_until_GC;
 275     expand_bytes = align_up(expand_bytes, Metaspace::commit_alignment());
 276     // Don't expand unless it's significant
 277     if (expand_bytes >= MinMetaspaceExpansion) {
 278       size_t new_capacity_until_GC = 0;
 279       bool succeeded = MetaspaceGC::inc_capacity_until_GC(expand_bytes, &new_capacity_until_GC);
 280       assert(succeeded, "Should always succesfully increment HWM when at safepoint");
 281 
 282       Metaspace::tracer()->report_gc_threshold(capacity_until_GC,
 283                                                new_capacity_until_GC,
 284                                                MetaspaceGCThresholdUpdater::ComputeNewSize);
 285       log_trace(gc, metaspace)("    expanding:  minimum_desired_capacity: %6.1fKB  expand_bytes: %6.1fKB  MinMetaspaceExpansion: %6.1fKB  new metaspace HWM:  %6.1fKB",
 286                                minimum_desired_capacity / (double) K,
 287                                expand_bytes / (double) K,
 288                                MinMetaspaceExpansion / (double) K,
 289                                new_capacity_until_GC / (double) K);
 290     }
 291     return;
 292   }
 293 
 294   // No expansion, now see if we want to shrink
 295   // We would never want to shrink more than this
 296   assert(capacity_until_GC >= minimum_desired_capacity,
 297          SIZE_FORMAT " >= " SIZE_FORMAT,
 298          capacity_until_GC, minimum_desired_capacity);
 299   size_t max_shrink_bytes = capacity_until_GC - minimum_desired_capacity;
 300 
 301   // Should shrinking be considered?
 302   if (MaxMetaspaceFreeRatio < 100) {
 303     const double maximum_free_percentage = MaxMetaspaceFreeRatio / 100.0;
 304     const double minimum_used_percentage = 1.0 - maximum_free_percentage;
 305     const double max_tmp = used_after_gc / minimum_used_percentage;
 306     size_t maximum_desired_capacity = (size_t)MIN2(max_tmp, double(MaxMetaspaceSize));
 307     maximum_desired_capacity = MAX2(maximum_desired_capacity,
 308                                     MetaspaceSize);
 309     log_trace(gc, metaspace)("    maximum_free_percentage: %6.2f  minimum_used_percentage: %6.2f",
 310                              maximum_free_percentage, minimum_used_percentage);
 311     log_trace(gc, metaspace)("    minimum_desired_capacity: %6.1fKB  maximum_desired_capacity: %6.1fKB",
 312                              minimum_desired_capacity / (double) K, maximum_desired_capacity / (double) K);
 313 
 314     assert(minimum_desired_capacity <= maximum_desired_capacity,
 315            "sanity check");
 316 
 317     if (capacity_until_GC > maximum_desired_capacity) {
 318       // Capacity too large, compute shrinking size
 319       shrink_bytes = capacity_until_GC - maximum_desired_capacity;
 320       // We don't want shrink all the way back to initSize if people call
 321       // System.gc(), because some programs do that between "phases" and then
 322       // we'd just have to grow the heap up again for the next phase.  So we
 323       // damp the shrinking: 0% on the first call, 10% on the second call, 40%
 324       // on the third call, and 100% by the fourth call.  But if we recompute
 325       // size without shrinking, it goes back to 0%.
 326       shrink_bytes = shrink_bytes / 100 * current_shrink_factor;
 327 
 328       shrink_bytes = align_down(shrink_bytes, Metaspace::commit_alignment());
 329 
 330       assert(shrink_bytes <= max_shrink_bytes,
 331              "invalid shrink size " SIZE_FORMAT " not <= " SIZE_FORMAT,
 332              shrink_bytes, max_shrink_bytes);
 333       if (current_shrink_factor == 0) {
 334         _shrink_factor = 10;
 335       } else {
 336         _shrink_factor = MIN2(current_shrink_factor * 4, (uint) 100);
 337       }
 338       log_trace(gc, metaspace)("    shrinking:  initThreshold: %.1fK  maximum_desired_capacity: %.1fK",
 339                                MetaspaceSize / (double) K, maximum_desired_capacity / (double) K);
 340       log_trace(gc, metaspace)("    shrink_bytes: %.1fK  current_shrink_factor: %d  new shrink factor: %d  MinMetaspaceExpansion: %.1fK",
 341                                shrink_bytes / (double) K, current_shrink_factor, _shrink_factor, MinMetaspaceExpansion / (double) K);
 342     }
 343   }
 344 
 345   // Don't shrink unless it's significant
 346   if (shrink_bytes >= MinMetaspaceExpansion &&
 347       ((capacity_until_GC - shrink_bytes) >= MetaspaceSize)) {
 348     size_t new_capacity_until_GC = MetaspaceGC::dec_capacity_until_GC(shrink_bytes);
 349     Metaspace::tracer()->report_gc_threshold(capacity_until_GC,
 350                                              new_capacity_until_GC,
 351                                              MetaspaceGCThresholdUpdater::ComputeNewSize);
 352   }
 353 }
 354 
 355 // MetaspaceUtils
 356 size_t MetaspaceUtils::_capacity_words [Metaspace:: MetadataTypeCount] = {0, 0};
 357 size_t MetaspaceUtils::_overhead_words [Metaspace:: MetadataTypeCount] = {0, 0};
 358 volatile size_t MetaspaceUtils::_used_words [Metaspace:: MetadataTypeCount] = {0, 0};
 359 
 360 // Collect used metaspace statistics. This involves walking the CLDG. The resulting
 361 // output will be the accumulated values for all live metaspaces.
 362 // Note: method does not do any locking.
 363 void MetaspaceUtils::collect_statistics(ClassLoaderMetaspaceStatistics* out) {
 364   out->reset();
 365   ClassLoaderDataGraphMetaspaceIterator iter;
 366    while (iter.repeat()) {
 367      ClassLoaderMetaspace* msp = iter.get_next();
 368      if (msp != NULL) {
 369        msp->add_to_statistics(out);
 370      }
 371    }
 372 }
 373 
 374 size_t MetaspaceUtils::free_in_vs_bytes(Metaspace::MetadataType mdtype) {
 375   VirtualSpaceList* list = Metaspace::get_space_list(mdtype);
 376   return list == NULL ? 0 : list->free_bytes();
 377 }
 378 
 379 size_t MetaspaceUtils::free_in_vs_bytes() {
 380   return free_in_vs_bytes(Metaspace::ClassType) + free_in_vs_bytes(Metaspace::NonClassType);
 381 }
 382 
 383 static void inc_stat_nonatomically(size_t* pstat, size_t words) {
 384   assert_lock_strong(MetaspaceExpand_lock);
 385   (*pstat) += words;
 386 }
 387 
 388 static void dec_stat_nonatomically(size_t* pstat, size_t words) {
 389   assert_lock_strong(MetaspaceExpand_lock);
 390   const size_t size_now = *pstat;
 391   assert(size_now >= words, "About to decrement counter below zero "
 392          "(current value: " SIZE_FORMAT ", decrement value: " SIZE_FORMAT ".",
 393          size_now, words);
 394   *pstat = size_now - words;
 395 }
 396 
 397 static void inc_stat_atomically(volatile size_t* pstat, size_t words) {
 398   Atomic::add(words, pstat);
 399 }
 400 
 401 static void dec_stat_atomically(volatile size_t* pstat, size_t words) {
 402   const size_t size_now = *pstat;
 403   assert(size_now >= words, "About to decrement counter below zero "
 404          "(current value: " SIZE_FORMAT ", decrement value: " SIZE_FORMAT ".",
 405          size_now, words);
 406   Atomic::sub(words, pstat);
 407 }
 408 
 409 void MetaspaceUtils::dec_capacity(Metaspace::MetadataType mdtype, size_t words) {
 410   dec_stat_nonatomically(&_capacity_words[mdtype], words);
 411 }
 412 void MetaspaceUtils::inc_capacity(Metaspace::MetadataType mdtype, size_t words) {
 413   inc_stat_nonatomically(&_capacity_words[mdtype], words);
 414 }
 415 void MetaspaceUtils::dec_used(Metaspace::MetadataType mdtype, size_t words) {
 416   dec_stat_atomically(&_used_words[mdtype], words);
 417 }
 418 void MetaspaceUtils::inc_used(Metaspace::MetadataType mdtype, size_t words) {
 419   inc_stat_atomically(&_used_words[mdtype], words);
 420 }
 421 void MetaspaceUtils::dec_overhead(Metaspace::MetadataType mdtype, size_t words) {
 422   dec_stat_nonatomically(&_overhead_words[mdtype], words);
 423 }
 424 void MetaspaceUtils::inc_overhead(Metaspace::MetadataType mdtype, size_t words) {
 425   inc_stat_nonatomically(&_overhead_words[mdtype], words);
 426 }
 427 
 428 size_t MetaspaceUtils::reserved_bytes(Metaspace::MetadataType mdtype) {
 429   VirtualSpaceList* list = Metaspace::get_space_list(mdtype);
 430   return list == NULL ? 0 : list->reserved_bytes();
 431 }
 432 
 433 size_t MetaspaceUtils::committed_bytes(Metaspace::MetadataType mdtype) {
 434   VirtualSpaceList* list = Metaspace::get_space_list(mdtype);
 435   return list == NULL ? 0 : list->committed_bytes();
 436 }
 437 
 438 size_t MetaspaceUtils::min_chunk_size_words() { return Metaspace::first_chunk_word_size(); }
 439 
 440 size_t MetaspaceUtils::free_chunks_total_words(Metaspace::MetadataType mdtype) {
 441   ChunkManager* chunk_manager = Metaspace::get_chunk_manager(mdtype);
 442   if (chunk_manager == NULL) {
 443     return 0;
 444   }
 445   return chunk_manager->free_chunks_total_words();
 446 }
 447 
 448 size_t MetaspaceUtils::free_chunks_total_bytes(Metaspace::MetadataType mdtype) {
 449   return free_chunks_total_words(mdtype) * BytesPerWord;
 450 }
 451 
 452 size_t MetaspaceUtils::free_chunks_total_words() {
 453   return free_chunks_total_words(Metaspace::ClassType) +
 454          free_chunks_total_words(Metaspace::NonClassType);
 455 }
 456 
 457 size_t MetaspaceUtils::free_chunks_total_bytes() {
 458   return free_chunks_total_words() * BytesPerWord;
 459 }
 460 
 461 bool MetaspaceUtils::has_chunk_free_list(Metaspace::MetadataType mdtype) {
 462   return Metaspace::get_chunk_manager(mdtype) != NULL;
 463 }
 464 
 465 MetaspaceChunkFreeListSummary MetaspaceUtils::chunk_free_list_summary(Metaspace::MetadataType mdtype) {
 466   if (!has_chunk_free_list(mdtype)) {
 467     return MetaspaceChunkFreeListSummary();
 468   }
 469 
 470   const ChunkManager* cm = Metaspace::get_chunk_manager(mdtype);
 471   return cm->chunk_free_list_summary();
 472 }
 473 
 474 void MetaspaceUtils::print_metaspace_change(const metaspace::MetaspaceSizesSnapshot& pre_meta_values) {
 475   const metaspace::MetaspaceSizesSnapshot meta_values;
 476 
 477   if (Metaspace::using_class_space()) {
 478     log_info(gc, metaspace)(HEAP_CHANGE_FORMAT" "
 479                             HEAP_CHANGE_FORMAT" "
 480                             HEAP_CHANGE_FORMAT,
 481                             HEAP_CHANGE_FORMAT_ARGS("Metaspace",
 482                                                     pre_meta_values.used(),
 483                                                     pre_meta_values.committed(),
 484                                                     meta_values.used(),
 485                                                     meta_values.committed()),
 486                             HEAP_CHANGE_FORMAT_ARGS("NonClass",
 487                                                     pre_meta_values.non_class_used(),
 488                                                     pre_meta_values.non_class_committed(),
 489                                                     meta_values.non_class_used(),
 490                                                     meta_values.non_class_committed()),
 491                             HEAP_CHANGE_FORMAT_ARGS("Class",
 492                                                     pre_meta_values.class_used(),
 493                                                     pre_meta_values.class_committed(),
 494                                                     meta_values.class_used(),
 495                                                     meta_values.class_committed()));
 496   } else {
 497     log_info(gc, metaspace)(HEAP_CHANGE_FORMAT,
 498                             HEAP_CHANGE_FORMAT_ARGS("Metaspace",
 499                                                     pre_meta_values.used(),
 500                                                     pre_meta_values.committed(),
 501                                                     meta_values.used(),
 502                                                     meta_values.committed()));
 503   }
 504 }
 505 
 506 void MetaspaceUtils::print_on(outputStream* out) {
 507   Metaspace::MetadataType nct = Metaspace::NonClassType;
 508 
 509   out->print_cr(" Metaspace       "
 510                 "used "      SIZE_FORMAT "K, "
 511                 "capacity "  SIZE_FORMAT "K, "
 512                 "committed " SIZE_FORMAT "K, "
 513                 "reserved "  SIZE_FORMAT "K",
 514                 used_bytes()/K,
 515                 capacity_bytes()/K,
 516                 committed_bytes()/K,
 517                 reserved_bytes()/K);
 518 
 519   if (Metaspace::using_class_space()) {
 520     Metaspace::MetadataType ct = Metaspace::ClassType;
 521     out->print_cr("  class space    "
 522                   "used "      SIZE_FORMAT "K, "
 523                   "capacity "  SIZE_FORMAT "K, "
 524                   "committed " SIZE_FORMAT "K, "
 525                   "reserved "  SIZE_FORMAT "K",
 526                   used_bytes(ct)/K,
 527                   capacity_bytes(ct)/K,
 528                   committed_bytes(ct)/K,
 529                   reserved_bytes(ct)/K);
 530   }
 531 }
 532 
 533 
 534 void MetaspaceUtils::print_vs(outputStream* out, size_t scale) {
 535   const size_t reserved_nonclass_words = reserved_bytes(Metaspace::NonClassType) / sizeof(MetaWord);
 536   const size_t committed_nonclass_words = committed_bytes(Metaspace::NonClassType) / sizeof(MetaWord);
 537   {
 538     if (Metaspace::using_class_space()) {
 539       out->print("  Non-class space:  ");
 540     }
 541     print_scaled_words(out, reserved_nonclass_words, scale, 7);
 542     out->print(" reserved, ");
 543     print_scaled_words_and_percentage(out, committed_nonclass_words, reserved_nonclass_words, scale, 7);
 544     out->print_cr(" committed ");
 545 
 546     if (Metaspace::using_class_space()) {
 547       const size_t reserved_class_words = reserved_bytes(Metaspace::ClassType) / sizeof(MetaWord);
 548       const size_t committed_class_words = committed_bytes(Metaspace::ClassType) / sizeof(MetaWord);
 549       out->print("      Class space:  ");
 550       print_scaled_words(out, reserved_class_words, scale, 7);
 551       out->print(" reserved, ");
 552       print_scaled_words_and_percentage(out, committed_class_words, reserved_class_words, scale, 7);
 553       out->print_cr(" committed ");
 554 
 555       const size_t reserved_words = reserved_nonclass_words + reserved_class_words;
 556       const size_t committed_words = committed_nonclass_words + committed_class_words;
 557       out->print("             Both:  ");
 558       print_scaled_words(out, reserved_words, scale, 7);
 559       out->print(" reserved, ");
 560       print_scaled_words_and_percentage(out, committed_words, reserved_words, scale, 7);
 561       out->print_cr(" committed ");
 562     }
 563   }
 564 }
 565 
 566 static void print_basic_switches(outputStream* out, size_t scale) {
 567   out->print("MaxMetaspaceSize: ");
 568   if (MaxMetaspaceSize >= (max_uintx) - (2 * os::vm_page_size())) {
 569     // aka "very big". Default is max_uintx, but due to rounding in arg parsing the real
 570     // value is smaller.
 571     out->print("unlimited");
 572   } else {
 573     print_human_readable_size(out, MaxMetaspaceSize, scale);
 574   }
 575   out->cr();
 576   if (Metaspace::using_class_space()) {
 577     out->print("CompressedClassSpaceSize: ");
 578     print_human_readable_size(out, CompressedClassSpaceSize, scale);
 579   }
 580   out->cr();
 581 }
 582 
 583 // This will print out a basic metaspace usage report but
 584 // unlike print_report() is guaranteed not to lock or to walk the CLDG.
 585 void MetaspaceUtils::print_basic_report(outputStream* out, size_t scale) {
 586 
 587   if (!Metaspace::initialized()) {
 588     out->print_cr("Metaspace not yet initialized.");
 589     return;
 590   }
 591 
 592   out->cr();
 593   out->print_cr("Usage:");
 594 
 595   if (Metaspace::using_class_space()) {
 596     out->print("  Non-class:  ");
 597   }
 598 
 599   // In its most basic form, we do not require walking the CLDG. Instead, just print the running totals from
 600   // MetaspaceUtils.
 601   const size_t cap_nc = MetaspaceUtils::capacity_words(Metaspace::NonClassType);
 602   const size_t overhead_nc = MetaspaceUtils::overhead_words(Metaspace::NonClassType);
 603   const size_t used_nc = MetaspaceUtils::used_words(Metaspace::NonClassType);
 604   const size_t free_and_waste_nc = cap_nc - overhead_nc - used_nc;
 605 
 606   print_scaled_words(out, cap_nc, scale, 5);
 607   out->print(" capacity, ");
 608   print_scaled_words_and_percentage(out, used_nc, cap_nc, scale, 5);
 609   out->print(" used, ");
 610   print_scaled_words_and_percentage(out, free_and_waste_nc, cap_nc, scale, 5);
 611   out->print(" free+waste, ");
 612   print_scaled_words_and_percentage(out, overhead_nc, cap_nc, scale, 5);
 613   out->print(" overhead. ");
 614   out->cr();
 615 
 616   if (Metaspace::using_class_space()) {
 617     const size_t cap_c = MetaspaceUtils::capacity_words(Metaspace::ClassType);
 618     const size_t overhead_c = MetaspaceUtils::overhead_words(Metaspace::ClassType);
 619     const size_t used_c = MetaspaceUtils::used_words(Metaspace::ClassType);
 620     const size_t free_and_waste_c = cap_c - overhead_c - used_c;
 621     out->print("      Class:  ");
 622     print_scaled_words(out, cap_c, scale, 5);
 623     out->print(" capacity, ");
 624     print_scaled_words_and_percentage(out, used_c, cap_c, scale, 5);
 625     out->print(" used, ");
 626     print_scaled_words_and_percentage(out, free_and_waste_c, cap_c, scale, 5);
 627     out->print(" free+waste, ");
 628     print_scaled_words_and_percentage(out, overhead_c, cap_c, scale, 5);
 629     out->print(" overhead. ");
 630     out->cr();
 631 
 632     out->print("       Both:  ");
 633     const size_t cap = cap_nc + cap_c;
 634 
 635     print_scaled_words(out, cap, scale, 5);
 636     out->print(" capacity, ");
 637     print_scaled_words_and_percentage(out, used_nc + used_c, cap, scale, 5);
 638     out->print(" used, ");
 639     print_scaled_words_and_percentage(out, free_and_waste_nc + free_and_waste_c, cap, scale, 5);
 640     out->print(" free+waste, ");
 641     print_scaled_words_and_percentage(out, overhead_nc + overhead_c, cap, scale, 5);
 642     out->print(" overhead. ");
 643     out->cr();
 644   }
 645 
 646   out->cr();
 647   out->print_cr("Virtual space:");
 648 
 649   print_vs(out, scale);
 650 
 651   out->cr();
 652   out->print_cr("Chunk freelists:");
 653 
 654   if (Metaspace::using_class_space()) {
 655     out->print("   Non-Class:  ");
 656   }
 657   print_human_readable_size(out, Metaspace::chunk_manager_metadata()->free_chunks_total_bytes(), scale);
 658   out->cr();
 659   if (Metaspace::using_class_space()) {
 660     out->print("       Class:  ");
 661     print_human_readable_size(out, Metaspace::chunk_manager_class()->free_chunks_total_bytes(), scale);
 662     out->cr();
 663     out->print("        Both:  ");
 664     print_human_readable_size(out, Metaspace::chunk_manager_class()->free_chunks_total_bytes() +
 665                               Metaspace::chunk_manager_metadata()->free_chunks_total_bytes(), scale);
 666     out->cr();
 667   }
 668 
 669   out->cr();
 670 
 671   // Print basic settings
 672   print_basic_switches(out, scale);
 673 
 674   out->cr();
 675 
 676 }
 677 
 678 void MetaspaceUtils::print_report(outputStream* out, size_t scale, int flags) {
 679 
 680   if (!Metaspace::initialized()) {
 681     out->print_cr("Metaspace not yet initialized.");
 682     return;
 683   }
 684 
 685   const bool print_loaders = (flags & rf_show_loaders) > 0;
 686   const bool print_classes = (flags & rf_show_classes) > 0;
 687   const bool print_by_chunktype = (flags & rf_break_down_by_chunktype) > 0;
 688   const bool print_by_spacetype = (flags & rf_break_down_by_spacetype) > 0;
 689 
 690   // Some report options require walking the class loader data graph.
 691   PrintCLDMetaspaceInfoClosure cl(out, scale, print_loaders, print_classes, print_by_chunktype);
 692   if (print_loaders) {
 693     out->cr();
 694     out->print_cr("Usage per loader:");
 695     out->cr();
 696   }
 697 
 698   ClassLoaderDataGraph::loaded_cld_do(&cl); // collect data and optionally print
 699 
 700   // Print totals, broken up by space type.
 701   if (print_by_spacetype) {
 702     out->cr();
 703     out->print_cr("Usage per space type:");
 704     out->cr();
 705     for (int space_type = (int)Metaspace::ZeroMetaspaceType;
 706          space_type < (int)Metaspace::MetaspaceTypeCount; space_type ++)
 707     {
 708       uintx num_loaders = cl._num_loaders_by_spacetype[space_type];
 709       uintx num_classes = cl._num_classes_by_spacetype[space_type];
 710       out->print("%s - " UINTX_FORMAT " %s",
 711         space_type_name((Metaspace::MetaspaceType)space_type),
 712         num_loaders, loaders_plural(num_loaders));
 713       if (num_classes > 0) {
 714         out->print(", ");
 715         print_number_of_classes(out, num_classes, cl._num_classes_shared_by_spacetype[space_type]);
 716         out->print(":");
 717         cl._stats_by_spacetype[space_type].print_on(out, scale, print_by_chunktype);
 718       } else {
 719         out->print(".");
 720         out->cr();
 721       }
 722       out->cr();
 723     }
 724   }
 725 
 726   // Print totals for in-use data:
 727   out->cr();
 728   {
 729     uintx num_loaders = cl._num_loaders;
 730     out->print("Total Usage - " UINTX_FORMAT " %s, ",
 731       num_loaders, loaders_plural(num_loaders));
 732     print_number_of_classes(out, cl._num_classes, cl._num_classes_shared);
 733     out->print(":");
 734     cl._stats_total.print_on(out, scale, print_by_chunktype);
 735     out->cr();
 736   }
 737 
 738   // -- Print Virtual space.
 739   out->cr();
 740   out->print_cr("Virtual space:");
 741 
 742   print_vs(out, scale);
 743 
 744   // -- Print VirtualSpaceList details.
 745   if ((flags & rf_show_vslist) > 0) {
 746     out->cr();
 747     out->print_cr("Virtual space list%s:", Metaspace::using_class_space() ? "s" : "");
 748 
 749     if (Metaspace::using_class_space()) {
 750       out->print_cr("   Non-Class:");
 751     }
 752     Metaspace::space_list()->print_on(out, scale);
 753     if (Metaspace::using_class_space()) {
 754       out->print_cr("       Class:");
 755       Metaspace::class_space_list()->print_on(out, scale);
 756     }
 757   }
 758   out->cr();
 759 
 760   // -- Print VirtualSpaceList map.
 761   if ((flags & rf_show_vsmap) > 0) {
 762     out->cr();
 763     out->print_cr("Virtual space map:");
 764 
 765     if (Metaspace::using_class_space()) {
 766       out->print_cr("   Non-Class:");
 767     }
 768     Metaspace::space_list()->print_map(out);
 769     if (Metaspace::using_class_space()) {
 770       out->print_cr("       Class:");
 771       Metaspace::class_space_list()->print_map(out);
 772     }
 773   }
 774   out->cr();
 775 
 776   // -- Print Freelists (ChunkManager) details
 777   out->cr();
 778   out->print_cr("Chunk freelist%s:", Metaspace::using_class_space() ? "s" : "");
 779 
 780   ChunkManagerStatistics non_class_cm_stat;
 781   Metaspace::chunk_manager_metadata()->collect_statistics(&non_class_cm_stat);
 782 
 783   if (Metaspace::using_class_space()) {
 784     out->print_cr("   Non-Class:");
 785   }
 786   non_class_cm_stat.print_on(out, scale);
 787 
 788   if (Metaspace::using_class_space()) {
 789     ChunkManagerStatistics class_cm_stat;
 790     Metaspace::chunk_manager_class()->collect_statistics(&class_cm_stat);
 791     out->print_cr("       Class:");
 792     class_cm_stat.print_on(out, scale);
 793   }
 794 
 795   // As a convenience, print a summary of common waste.
 796   out->cr();
 797   out->print("Waste ");
 798   // For all wastages, print percentages from total. As total use the total size of memory committed for metaspace.
 799   const size_t committed_words = committed_bytes() / BytesPerWord;
 800 
 801   out->print("(percentages refer to total committed size ");
 802   print_scaled_words(out, committed_words, scale);
 803   out->print_cr("):");
 804 
 805   // Print space committed but not yet used by any class loader
 806   const size_t unused_words_in_vs = MetaspaceUtils::free_in_vs_bytes() / BytesPerWord;
 807   out->print("              Committed unused: ");
 808   print_scaled_words_and_percentage(out, unused_words_in_vs, committed_words, scale, 6);
 809   out->cr();
 810 
 811   // Print waste for in-use chunks.
 812   UsedChunksStatistics ucs_nonclass = cl._stats_total.nonclass_sm_stats().totals();
 813   UsedChunksStatistics ucs_class = cl._stats_total.class_sm_stats().totals();
 814   UsedChunksStatistics ucs_all;
 815   ucs_all.add(ucs_nonclass);
 816   ucs_all.add(ucs_class);
 817 
 818   out->print("        Waste in chunks in use: ");
 819   print_scaled_words_and_percentage(out, ucs_all.waste(), committed_words, scale, 6);
 820   out->cr();
 821   out->print("         Free in chunks in use: ");
 822   print_scaled_words_and_percentage(out, ucs_all.free(), committed_words, scale, 6);
 823   out->cr();
 824   out->print("     Overhead in chunks in use: ");
 825   print_scaled_words_and_percentage(out, ucs_all.overhead(), committed_words, scale, 6);
 826   out->cr();
 827 
 828   // Print waste in free chunks.
 829   const size_t total_capacity_in_free_chunks =
 830       Metaspace::chunk_manager_metadata()->free_chunks_total_words() +
 831      (Metaspace::using_class_space() ? Metaspace::chunk_manager_class()->free_chunks_total_words() : 0);
 832   out->print("                In free chunks: ");
 833   print_scaled_words_and_percentage(out, total_capacity_in_free_chunks, committed_words, scale, 6);
 834   out->cr();
 835 
 836   // Print waste in deallocated blocks.
 837   const uintx free_blocks_num =
 838       cl._stats_total.nonclass_sm_stats().free_blocks_num() +
 839       cl._stats_total.class_sm_stats().free_blocks_num();
 840   const size_t free_blocks_cap_words =
 841       cl._stats_total.nonclass_sm_stats().free_blocks_cap_words() +
 842       cl._stats_total.class_sm_stats().free_blocks_cap_words();
 843   out->print("Deallocated from chunks in use: ");
 844   print_scaled_words_and_percentage(out, free_blocks_cap_words, committed_words, scale, 6);
 845   out->print(" (" UINTX_FORMAT " blocks)", free_blocks_num);
 846   out->cr();
 847 
 848   // Print total waste.
 849   const size_t total_waste = ucs_all.waste() + ucs_all.free() + ucs_all.overhead() + total_capacity_in_free_chunks
 850       + free_blocks_cap_words + unused_words_in_vs;
 851   out->print("                       -total-: ");
 852   print_scaled_words_and_percentage(out, total_waste, committed_words, scale, 6);
 853   out->cr();
 854 
 855   // Print internal statistics
 856 #ifdef ASSERT
 857   out->cr();
 858   out->cr();
 859   out->print_cr("Internal statistics:");
 860   out->cr();
 861   out->print_cr("Number of allocations: " UINTX_FORMAT ".", g_internal_statistics.num_allocs);
 862   out->print_cr("Number of space births: " UINTX_FORMAT ".", g_internal_statistics.num_metaspace_births);
 863   out->print_cr("Number of space deaths: " UINTX_FORMAT ".", g_internal_statistics.num_metaspace_deaths);
 864   out->print_cr("Number of virtual space node births: " UINTX_FORMAT ".", g_internal_statistics.num_vsnodes_created);
 865   out->print_cr("Number of virtual space node deaths: " UINTX_FORMAT ".", g_internal_statistics.num_vsnodes_purged);
 866   out->print_cr("Number of times virtual space nodes were expanded: " UINTX_FORMAT ".", g_internal_statistics.num_committed_space_expanded);
 867   out->print_cr("Number of deallocations: " UINTX_FORMAT " (" UINTX_FORMAT " external).", g_internal_statistics.num_deallocs, g_internal_statistics.num_external_deallocs);
 868   out->print_cr("Allocations from deallocated blocks: " UINTX_FORMAT ".", g_internal_statistics.num_allocs_from_deallocated_blocks);
 869   out->print_cr("Number of chunks added to freelist: " UINTX_FORMAT ".",
 870                 g_internal_statistics.num_chunks_added_to_freelist);
 871   out->print_cr("Number of chunks removed from freelist: " UINTX_FORMAT ".",
 872                 g_internal_statistics.num_chunks_removed_from_freelist);
 873   out->print_cr("Number of chunk merges: " UINTX_FORMAT ", split-ups: " UINTX_FORMAT ".",
 874                 g_internal_statistics.num_chunk_merges, g_internal_statistics.num_chunk_splits);
 875 
 876   out->cr();
 877 #endif
 878 
 879   // Print some interesting settings
 880   out->cr();
 881   out->cr();
 882   print_basic_switches(out, scale);
 883 
 884   out->cr();
 885   out->print("InitialBootClassLoaderMetaspaceSize: ");
 886   print_human_readable_size(out, InitialBootClassLoaderMetaspaceSize, scale);
 887 
 888   out->cr();
 889   out->cr();
 890 
 891 } // MetaspaceUtils::print_report()
 892 
 893 // Prints an ASCII representation of the given space.
 894 void MetaspaceUtils::print_metaspace_map(outputStream* out, Metaspace::MetadataType mdtype) {
 895   MutexLocker cl(MetaspaceExpand_lock, Mutex::_no_safepoint_check_flag);
 896   const bool for_class = mdtype == Metaspace::ClassType ? true : false;
 897   VirtualSpaceList* const vsl = for_class ? Metaspace::class_space_list() : Metaspace::space_list();
 898   if (vsl != NULL) {
 899     if (for_class) {
 900       if (!Metaspace::using_class_space()) {
 901         out->print_cr("No Class Space.");
 902         return;
 903       }
 904       out->print_raw("---- Metaspace Map (Class Space) ----");
 905     } else {
 906       out->print_raw("---- Metaspace Map (Non-Class Space) ----");
 907     }
 908     // Print legend:
 909     out->cr();
 910     out->print_cr("Chunk Types (uppercase chunks are in use): x-specialized, s-small, m-medium, h-humongous.");
 911     out->cr();
 912     VirtualSpaceList* const vsl = for_class ? Metaspace::class_space_list() : Metaspace::space_list();
 913     vsl->print_map(out);
 914     out->cr();
 915   }
 916 }
 917 
 918 void MetaspaceUtils::verify_free_chunks() {
 919 #ifdef ASSERT
 920   Metaspace::chunk_manager_metadata()->verify(false);
 921   if (Metaspace::using_class_space()) {
 922     Metaspace::chunk_manager_class()->verify(false);
 923   }
 924 #endif
 925 }
 926 
 927 void MetaspaceUtils::verify_metrics() {
 928 #ifdef ASSERT
 929   // Please note: there are time windows where the internal counters are out of sync with
 930   // reality. For example, when a newly created ClassLoaderMetaspace creates its first chunk -
 931   // the ClassLoaderMetaspace is not yet attached to its ClassLoaderData object and hence will
 932   // not be counted when iterating the CLDG. So be careful when you call this method.
 933   ClassLoaderMetaspaceStatistics total_stat;
 934   collect_statistics(&total_stat);
 935   UsedChunksStatistics nonclass_chunk_stat = total_stat.nonclass_sm_stats().totals();
 936   UsedChunksStatistics class_chunk_stat = total_stat.class_sm_stats().totals();
 937 
 938   bool mismatch = false;
 939   for (int i = 0; i < Metaspace::MetadataTypeCount; i ++) {
 940     Metaspace::MetadataType mdtype = (Metaspace::MetadataType)i;
 941     UsedChunksStatistics chunk_stat = total_stat.sm_stats(mdtype).totals();
 942     if (capacity_words(mdtype) != chunk_stat.cap() ||
 943         used_words(mdtype) != chunk_stat.used() ||
 944         overhead_words(mdtype) != chunk_stat.overhead()) {
 945       mismatch = true;
 946       tty->print_cr("MetaspaceUtils::verify_metrics: counter mismatch for mdtype=%u:", mdtype);
 947       tty->print_cr("Expected cap " SIZE_FORMAT ", used " SIZE_FORMAT ", overhead " SIZE_FORMAT ".",
 948                     capacity_words(mdtype), used_words(mdtype), overhead_words(mdtype));
 949       tty->print_cr("Got cap " SIZE_FORMAT ", used " SIZE_FORMAT ", overhead " SIZE_FORMAT ".",
 950                     chunk_stat.cap(), chunk_stat.used(), chunk_stat.overhead());
 951       tty->flush();
 952     }
 953   }
 954   assert(mismatch == false, "MetaspaceUtils::verify_metrics: counter mismatch.");
 955 #endif
 956 }
 957 
 958 // Metaspace methods
 959 
 960 size_t Metaspace::_first_chunk_word_size = 0;
 961 size_t Metaspace::_first_class_chunk_word_size = 0;
 962 
 963 size_t Metaspace::_commit_alignment = 0;
 964 size_t Metaspace::_reserve_alignment = 0;
 965 
 966 VirtualSpaceList* Metaspace::_space_list = NULL;
 967 VirtualSpaceList* Metaspace::_class_space_list = NULL;
 968 
 969 ChunkManager* Metaspace::_chunk_manager_metadata = NULL;
 970 ChunkManager* Metaspace::_chunk_manager_class = NULL;
 971 
 972 bool Metaspace::_initialized = false;
 973 
 974 #define VIRTUALSPACEMULTIPLIER 2
 975 
 976 #ifdef _LP64
 977 static const uint64_t UnscaledClassSpaceMax = (uint64_t(max_juint) + 1);
 978 
 979 void Metaspace::set_narrow_klass_base_and_shift(address metaspace_base, address cds_base) {
 980   assert(!DumpSharedSpaces, "narrow_klass is set by MetaspaceShared class.");
 981   // Figure out the narrow_klass_base and the narrow_klass_shift.  The
 982   // narrow_klass_base is the lower of the metaspace base and the cds base
 983   // (if cds is enabled).  The narrow_klass_shift depends on the distance
 984   // between the lower base and higher address.
 985   address lower_base;
 986   address higher_address;
 987 #if INCLUDE_CDS
 988   if (UseSharedSpaces) {
 989     higher_address = MAX2((address)(cds_base + MetaspaceShared::core_spaces_size()),
 990                           (address)(metaspace_base + compressed_class_space_size()));
 991     lower_base = MIN2(metaspace_base, cds_base);
 992   } else
 993 #endif
 994   {
 995     higher_address = metaspace_base + compressed_class_space_size();
 996     lower_base = metaspace_base;
 997 
 998     uint64_t klass_encoding_max = UnscaledClassSpaceMax << LogKlassAlignmentInBytes;
 999     // If compressed class space fits in lower 32G, we don't need a base.
1000     if (higher_address <= (address)klass_encoding_max) {
1001       lower_base = 0; // Effectively lower base is zero.
1002     }
1003   }
1004 
1005   CompressedKlassPointers::set_base(lower_base);
1006 
1007   // CDS uses LogKlassAlignmentInBytes for narrow_klass_shift. See
1008   // MetaspaceShared::initialize_dumptime_shared_and_meta_spaces() for
1009   // how dump time narrow_klass_shift is set. Although, CDS can work
1010   // with zero-shift mode also, to be consistent with AOT it uses
1011   // LogKlassAlignmentInBytes for klass shift so archived java heap objects
1012   // can be used at same time as AOT code.
1013   if (!UseSharedSpaces
1014       && (uint64_t)(higher_address - lower_base) <= UnscaledClassSpaceMax) {
1015     CompressedKlassPointers::set_shift(0);
1016   } else {
1017     CompressedKlassPointers::set_shift(LogKlassAlignmentInBytes);
1018   }
1019   AOTLoader::set_narrow_klass_shift();
1020 }
1021 
1022 #if INCLUDE_CDS
1023 // Return TRUE if the specified metaspace_base and cds_base are close enough
1024 // to work with compressed klass pointers.
1025 bool Metaspace::can_use_cds_with_metaspace_addr(char* metaspace_base, address cds_base) {
1026   assert(cds_base != 0 && UseSharedSpaces, "Only use with CDS");
1027   assert(UseCompressedClassPointers, "Only use with CompressedKlassPtrs");
1028   address lower_base = MIN2((address)metaspace_base, cds_base);
1029   address higher_address = MAX2((address)(cds_base + MetaspaceShared::core_spaces_size()),
1030                                 (address)(metaspace_base + compressed_class_space_size()));
1031   return ((uint64_t)(higher_address - lower_base) <= UnscaledClassSpaceMax);
1032 }
1033 #endif
1034 
1035 // Try to allocate the metaspace at the requested addr.
1036 void Metaspace::allocate_metaspace_compressed_klass_ptrs(char* requested_addr, address cds_base) {
1037   assert(!DumpSharedSpaces, "compress klass space is allocated by MetaspaceShared class.");
1038   assert(using_class_space(), "called improperly");
1039   assert(UseCompressedClassPointers, "Only use with CompressedKlassPtrs");
1040   assert(compressed_class_space_size() < KlassEncodingMetaspaceMax,
1041          "Metaspace size is too big");
1042   assert_is_aligned(requested_addr, _reserve_alignment);
1043   assert_is_aligned(cds_base, _reserve_alignment);
1044   assert_is_aligned(compressed_class_space_size(), _reserve_alignment);
1045 
1046   // Don't use large pages for the class space.
1047   bool large_pages = false;
1048 
1049 #if !(defined(AARCH64) || defined(AIX))
1050   ReservedSpace metaspace_rs = ReservedSpace(compressed_class_space_size(),
1051                                              _reserve_alignment,
1052                                              large_pages,
1053                                              requested_addr);
1054 #else // AARCH64
1055   ReservedSpace metaspace_rs;
1056 
1057   // Our compressed klass pointers may fit nicely into the lower 32
1058   // bits.
1059   if ((uint64_t)requested_addr + compressed_class_space_size() < 4*G) {
1060     metaspace_rs = ReservedSpace(compressed_class_space_size(),
1061                                  _reserve_alignment,
1062                                  large_pages,
1063                                  requested_addr);
1064   }
1065 
1066   if (! metaspace_rs.is_reserved()) {
1067     // Aarch64: Try to align metaspace so that we can decode a compressed
1068     // klass with a single MOVK instruction.  We can do this iff the
1069     // compressed class base is a multiple of 4G.
1070     // Aix: Search for a place where we can find memory. If we need to load
1071     // the base, 4G alignment is helpful, too.
1072     size_t increment = AARCH64_ONLY(4*)G;
1073     for (char *a = align_up(requested_addr, increment);
1074          a < (char*)(1024*G);
1075          a += increment) {
1076       if (a == (char *)(32*G)) {
1077         // Go faster from here on. Zero-based is no longer possible.
1078         increment = 4*G;
1079       }
1080 
1081 #if INCLUDE_CDS
1082       if (UseSharedSpaces
1083           && ! can_use_cds_with_metaspace_addr(a, cds_base)) {
1084         // We failed to find an aligned base that will reach.  Fall
1085         // back to using our requested addr.
1086         metaspace_rs = ReservedSpace(compressed_class_space_size(),
1087                                      _reserve_alignment,
1088                                      large_pages,
1089                                      requested_addr);
1090         break;
1091       }
1092 #endif
1093 
1094       metaspace_rs = ReservedSpace(compressed_class_space_size(),
1095                                    _reserve_alignment,
1096                                    large_pages,
1097                                    a);
1098       if (metaspace_rs.is_reserved())
1099         break;
1100     }
1101   }
1102 
1103 #endif // AARCH64
1104 
1105   if (!metaspace_rs.is_reserved()) {
1106 #if INCLUDE_CDS
1107     if (UseSharedSpaces) {
1108       size_t increment = align_up(1*G, _reserve_alignment);
1109 
1110       // Keep trying to allocate the metaspace, increasing the requested_addr
1111       // by 1GB each time, until we reach an address that will no longer allow
1112       // use of CDS with compressed klass pointers.
1113       char *addr = requested_addr;
1114       while (!metaspace_rs.is_reserved() && (addr + increment > addr) &&
1115              can_use_cds_with_metaspace_addr(addr + increment, cds_base)) {
1116         addr = addr + increment;
1117         metaspace_rs = ReservedSpace(compressed_class_space_size(),
1118                                      _reserve_alignment, large_pages, addr);
1119       }
1120     }
1121 #endif
1122     // If no successful allocation then try to allocate the space anywhere.  If
1123     // that fails then OOM doom.  At this point we cannot try allocating the
1124     // metaspace as if UseCompressedClassPointers is off because too much
1125     // initialization has happened that depends on UseCompressedClassPointers.
1126     // So, UseCompressedClassPointers cannot be turned off at this point.
1127     if (!metaspace_rs.is_reserved()) {
1128       metaspace_rs = ReservedSpace(compressed_class_space_size(),
1129                                    _reserve_alignment, large_pages);
1130       if (!metaspace_rs.is_reserved()) {
1131         vm_exit_during_initialization(err_msg("Could not allocate metaspace: " SIZE_FORMAT " bytes",
1132                                               compressed_class_space_size()));
1133       }
1134     }
1135   }
1136 
1137   // If we got here then the metaspace got allocated.
1138   MemTracker::record_virtual_memory_type((address)metaspace_rs.base(), mtClass);
1139 
1140 #if INCLUDE_CDS
1141   // Verify that we can use shared spaces.  Otherwise, turn off CDS.
1142   if (UseSharedSpaces && !can_use_cds_with_metaspace_addr(metaspace_rs.base(), cds_base)) {
1143     FileMapInfo::stop_sharing_and_unmap(
1144         "Could not allocate metaspace at a compatible address");
1145   }
1146 #endif
1147   set_narrow_klass_base_and_shift((address)metaspace_rs.base(),
1148                                   UseSharedSpaces ? (address)cds_base : 0);
1149 
1150   initialize_class_space(metaspace_rs);
1151 
1152   LogTarget(Trace, gc, metaspace) lt;
1153   if (lt.is_enabled()) {
1154     ResourceMark rm;
1155     LogStream ls(lt);
1156     print_compressed_class_space(&ls, requested_addr);
1157   }
1158 }
1159 
1160 void Metaspace::print_compressed_class_space(outputStream* st, const char* requested_addr) {
1161   st->print_cr("Narrow klass base: " PTR_FORMAT ", Narrow klass shift: %d",
1162                p2i(CompressedKlassPointers::base()), CompressedKlassPointers::shift());
1163   if (_class_space_list != NULL) {
1164     address base = (address)_class_space_list->current_virtual_space()->bottom();
1165     st->print("Compressed class space size: " SIZE_FORMAT " Address: " PTR_FORMAT,
1166                  compressed_class_space_size(), p2i(base));
1167     if (requested_addr != 0) {
1168       st->print(" Req Addr: " PTR_FORMAT, p2i(requested_addr));
1169     }
1170     st->cr();
1171   }
1172 }
1173 
1174 // For UseCompressedClassPointers the class space is reserved above the top of
1175 // the Java heap.  The argument passed in is at the base of the compressed space.
1176 void Metaspace::initialize_class_space(ReservedSpace rs) {
1177   // The reserved space size may be bigger because of alignment, esp with UseLargePages
1178   assert(rs.size() >= CompressedClassSpaceSize,
1179          SIZE_FORMAT " != " SIZE_FORMAT, rs.size(), CompressedClassSpaceSize);
1180   assert(using_class_space(), "Must be using class space");
1181   _class_space_list = new VirtualSpaceList(rs);
1182   _chunk_manager_class = new ChunkManager(true/*is_class*/);
1183 
1184   if (!_class_space_list->initialization_succeeded()) {
1185     vm_exit_during_initialization("Failed to setup compressed class space virtual space list.");
1186   }
1187 }
1188 
1189 #endif
1190 
1191 void Metaspace::ergo_initialize() {
1192   if (DumpSharedSpaces) {
1193     // Using large pages when dumping the shared archive is currently not implemented.
1194     FLAG_SET_ERGO(UseLargePagesInMetaspace, false);
1195   }
1196 
1197   size_t page_size = os::vm_page_size();
1198   if (UseLargePages && UseLargePagesInMetaspace) {
1199     page_size = os::large_page_size();
1200   }
1201 
1202   _commit_alignment  = page_size;
1203   _reserve_alignment = MAX2(page_size, (size_t)os::vm_allocation_granularity());
1204 
1205   // Do not use FLAG_SET_ERGO to update MaxMetaspaceSize, since this will
1206   // override if MaxMetaspaceSize was set on the command line or not.
1207   // This information is needed later to conform to the specification of the
1208   // java.lang.management.MemoryUsage API.
1209   //
1210   // Ideally, we would be able to set the default value of MaxMetaspaceSize in
1211   // globals.hpp to the aligned value, but this is not possible, since the
1212   // alignment depends on other flags being parsed.
1213   MaxMetaspaceSize = align_down_bounded(MaxMetaspaceSize, _reserve_alignment);
1214 
1215   if (MetaspaceSize > MaxMetaspaceSize) {
1216     MetaspaceSize = MaxMetaspaceSize;
1217   }
1218 
1219   MetaspaceSize = align_down_bounded(MetaspaceSize, _commit_alignment);
1220 
1221   assert(MetaspaceSize <= MaxMetaspaceSize, "MetaspaceSize should be limited by MaxMetaspaceSize");
1222 
1223   MinMetaspaceExpansion = align_down_bounded(MinMetaspaceExpansion, _commit_alignment);
1224   MaxMetaspaceExpansion = align_down_bounded(MaxMetaspaceExpansion, _commit_alignment);
1225 
1226   CompressedClassSpaceSize = align_down_bounded(CompressedClassSpaceSize, _reserve_alignment);
1227 
1228   // Initial virtual space size will be calculated at global_initialize()
1229   size_t min_metaspace_sz =
1230       VIRTUALSPACEMULTIPLIER * InitialBootClassLoaderMetaspaceSize;
1231   if (UseCompressedClassPointers) {
1232     if ((min_metaspace_sz + CompressedClassSpaceSize) >  MaxMetaspaceSize) {
1233       if (min_metaspace_sz >= MaxMetaspaceSize) {
1234         vm_exit_during_initialization("MaxMetaspaceSize is too small.");
1235       } else {
1236         FLAG_SET_ERGO(CompressedClassSpaceSize,
1237                       MaxMetaspaceSize - min_metaspace_sz);
1238       }
1239     }
1240   } else if (min_metaspace_sz >= MaxMetaspaceSize) {
1241     FLAG_SET_ERGO(InitialBootClassLoaderMetaspaceSize,
1242                   min_metaspace_sz);
1243   }
1244 
1245   set_compressed_class_space_size(CompressedClassSpaceSize);
1246 }
1247 
1248 void Metaspace::global_initialize() {
1249   MetaspaceGC::initialize();
1250 
1251 #if INCLUDE_CDS
1252   if (DumpSharedSpaces) {
1253     MetaspaceShared::initialize_dumptime_shared_and_meta_spaces();
1254   } else if (UseSharedSpaces) {
1255     // If any of the archived space fails to map, UseSharedSpaces
1256     // is reset to false. Fall through to the
1257     // (!DumpSharedSpaces && !UseSharedSpaces) case to set up class
1258     // metaspace.
1259     MetaspaceShared::initialize_runtime_shared_and_meta_spaces();
1260   }
1261 
1262   if (DynamicDumpSharedSpaces && !UseSharedSpaces) {
1263     vm_exit_during_initialization("DynamicDumpSharedSpaces is unsupported when base CDS archive is not loaded", NULL);
1264   }
1265 
1266   if (!DumpSharedSpaces && !UseSharedSpaces)
1267 #endif // INCLUDE_CDS
1268   {
1269 #ifdef _LP64
1270     if (using_class_space()) {
1271       char* base = (char*)align_up(CompressedOops::end(), _reserve_alignment);
1272       allocate_metaspace_compressed_klass_ptrs(base, 0);
1273     }
1274 #endif // _LP64
1275   }
1276 
1277   // Initialize these before initializing the VirtualSpaceList
1278   _first_chunk_word_size = InitialBootClassLoaderMetaspaceSize / BytesPerWord;
1279   _first_chunk_word_size = align_word_size_up(_first_chunk_word_size);
1280   // Make the first class chunk bigger than a medium chunk so it's not put
1281   // on the medium chunk list.   The next chunk will be small and progress
1282   // from there.  This size calculated by -version.
1283   _first_class_chunk_word_size = MIN2((size_t)MediumChunk*6,
1284                                      (CompressedClassSpaceSize/BytesPerWord)*2);
1285   _first_class_chunk_word_size = align_word_size_up(_first_class_chunk_word_size);
1286   // Arbitrarily set the initial virtual space to a multiple
1287   // of the boot class loader size.
1288   size_t word_size = VIRTUALSPACEMULTIPLIER * _first_chunk_word_size;
1289   word_size = align_up(word_size, Metaspace::reserve_alignment_words());
1290 
1291   // Initialize the list of virtual spaces.
1292   _space_list = new VirtualSpaceList(word_size);
1293   _chunk_manager_metadata = new ChunkManager(false/*metaspace*/);
1294 
1295   if (!_space_list->initialization_succeeded()) {
1296     vm_exit_during_initialization("Unable to setup metadata virtual space list.", NULL);
1297   }
1298 
1299   _tracer = new MetaspaceTracer();
1300 
1301   _initialized = true;
1302 
1303 }
1304 
1305 void Metaspace::post_initialize() {
1306   MetaspaceGC::post_initialize();
1307 }
1308 
1309 void Metaspace::verify_global_initialization() {
1310   assert(space_list() != NULL, "Metadata VirtualSpaceList has not been initialized");
1311   assert(chunk_manager_metadata() != NULL, "Metadata ChunkManager has not been initialized");
1312 
1313   if (using_class_space()) {
1314     assert(class_space_list() != NULL, "Class VirtualSpaceList has not been initialized");
1315     assert(chunk_manager_class() != NULL, "Class ChunkManager has not been initialized");
1316   }
1317 }
1318 
1319 size_t Metaspace::align_word_size_up(size_t word_size) {
1320   size_t byte_size = word_size * wordSize;
1321   return ReservedSpace::allocation_align_size_up(byte_size) / wordSize;
1322 }
1323 
1324 MetaWord* Metaspace::allocate(ClassLoaderData* loader_data, size_t word_size,
1325                               MetaspaceObj::Type type, TRAPS) {
1326   assert(!_frozen, "sanity");
1327   assert(!(DumpSharedSpaces && THREAD->is_VM_thread()), "sanity");
1328 
1329   if (HAS_PENDING_EXCEPTION) {
1330     assert(false, "Should not allocate with exception pending");
1331     return NULL;  // caller does a CHECK_NULL too
1332   }
1333 
1334   assert(loader_data != NULL, "Should never pass around a NULL loader_data. "
1335         "ClassLoaderData::the_null_class_loader_data() should have been used.");
1336 
1337   MetadataType mdtype = (type == MetaspaceObj::ClassType) ? ClassType : NonClassType;
1338 
1339   // Try to allocate metadata.
1340   MetaWord* result = loader_data->metaspace_non_null()->allocate(word_size, mdtype);
1341 
1342   if (result == NULL) {
1343     tracer()->report_metaspace_allocation_failure(loader_data, word_size, type, mdtype);
1344 
1345     // Allocation failed.
1346     if (is_init_completed()) {
1347       // Only start a GC if the bootstrapping has completed.
1348       // Try to clean out some heap memory and retry. This can prevent premature
1349       // expansion of the metaspace.
1350       result = Universe::heap()->satisfy_failed_metadata_allocation(loader_data, word_size, mdtype);
1351     }
1352   }
1353 
1354   if (result == NULL) {
1355     if (DumpSharedSpaces) {
1356       // CDS dumping keeps loading classes, so if we hit an OOM we probably will keep hitting OOM.
1357       // We should abort to avoid generating a potentially bad archive.
1358       vm_exit_during_cds_dumping(err_msg("Failed allocating metaspace object type %s of size " SIZE_FORMAT ". CDS dump aborted.",
1359           MetaspaceObj::type_name(type), word_size * BytesPerWord),
1360         err_msg("Please increase MaxMetaspaceSize (currently " SIZE_FORMAT " bytes).", MaxMetaspaceSize));
1361     }
1362     report_metadata_oome(loader_data, word_size, type, mdtype, THREAD);
1363     assert(HAS_PENDING_EXCEPTION, "sanity");
1364     return NULL;
1365   }
1366 
1367   // Zero initialize.
1368   Copy::fill_to_words((HeapWord*)result, word_size, 0);
1369 
1370   return result;
1371 }
1372 
1373 void Metaspace::report_metadata_oome(ClassLoaderData* loader_data, size_t word_size, MetaspaceObj::Type type, MetadataType mdtype, TRAPS) {
1374   tracer()->report_metadata_oom(loader_data, word_size, type, mdtype);
1375 
1376   // If result is still null, we are out of memory.
1377   Log(gc, metaspace, freelist, oom) log;
1378   if (log.is_info()) {
1379     log.info("Metaspace (%s) allocation failed for size " SIZE_FORMAT,
1380              is_class_space_allocation(mdtype) ? "class" : "data", word_size);
1381     ResourceMark rm;
1382     if (log.is_debug()) {
1383       if (loader_data->metaspace_or_null() != NULL) {
1384         LogStream ls(log.debug());
1385         loader_data->print_value_on(&ls);
1386       }
1387     }
1388     LogStream ls(log.info());
1389     // In case of an OOM, log out a short but still useful report.
1390     MetaspaceUtils::print_basic_report(&ls, 0);
1391   }
1392 
1393   bool out_of_compressed_class_space = false;
1394   if (is_class_space_allocation(mdtype)) {
1395     ClassLoaderMetaspace* metaspace = loader_data->metaspace_non_null();
1396     out_of_compressed_class_space =
1397       MetaspaceUtils::committed_bytes(Metaspace::ClassType) +
1398       (metaspace->class_chunk_size(word_size) * BytesPerWord) >
1399       CompressedClassSpaceSize;
1400   }
1401 
1402   // -XX:+HeapDumpOnOutOfMemoryError and -XX:OnOutOfMemoryError support
1403   const char* space_string = out_of_compressed_class_space ?
1404     "Compressed class space" : "Metaspace";
1405 
1406   report_java_out_of_memory(space_string);
1407 
1408   if (JvmtiExport::should_post_resource_exhausted()) {
1409     JvmtiExport::post_resource_exhausted(
1410         JVMTI_RESOURCE_EXHAUSTED_OOM_ERROR,
1411         space_string);
1412   }
1413 
1414   if (!is_init_completed()) {
1415     vm_exit_during_initialization("OutOfMemoryError", space_string);
1416   }
1417 
1418   if (out_of_compressed_class_space) {
1419     THROW_OOP(Universe::out_of_memory_error_class_metaspace());
1420   } else {
1421     THROW_OOP(Universe::out_of_memory_error_metaspace());
1422   }
1423 }
1424 
1425 const char* Metaspace::metadata_type_name(Metaspace::MetadataType mdtype) {
1426   switch (mdtype) {
1427     case Metaspace::ClassType: return "Class";
1428     case Metaspace::NonClassType: return "Metadata";
1429     default:
1430       assert(false, "Got bad mdtype: %d", (int) mdtype);
1431       return NULL;
1432   }
1433 }
1434 
1435 void Metaspace::purge(MetadataType mdtype) {
1436   get_space_list(mdtype)->purge(get_chunk_manager(mdtype));
1437 }
1438 
1439 void Metaspace::purge() {
1440   MutexLocker cl(MetaspaceExpand_lock,
1441                  Mutex::_no_safepoint_check_flag);
1442   purge(NonClassType);
1443   if (using_class_space()) {
1444     purge(ClassType);
1445   }
1446 }
1447 
1448 bool Metaspace::contains(const void* ptr) {
1449   if (MetaspaceShared::is_in_shared_metaspace(ptr)) {
1450     return true;
1451   }
1452   return contains_non_shared(ptr);
1453 }
1454 
1455 bool Metaspace::contains_non_shared(const void* ptr) {
1456   if (using_class_space() && get_space_list(ClassType)->contains(ptr)) {
1457      return true;
1458   }
1459 
1460   return get_space_list(NonClassType)->contains(ptr);
1461 }
1462 
1463 // ClassLoaderMetaspace
1464 
1465 ClassLoaderMetaspace::ClassLoaderMetaspace(Mutex* lock, Metaspace::MetaspaceType type)
1466   : _space_type(type)
1467   , _lock(lock)
1468   , _vsm(NULL)
1469   , _class_vsm(NULL)
1470 {
1471   initialize(lock, type);
1472 }
1473 
1474 ClassLoaderMetaspace::~ClassLoaderMetaspace() {
1475   Metaspace::assert_not_frozen();
1476   DEBUG_ONLY(Atomic::inc(&g_internal_statistics.num_metaspace_deaths));
1477   delete _vsm;
1478   if (Metaspace::using_class_space()) {
1479     delete _class_vsm;
1480   }
1481 }
1482 
1483 void ClassLoaderMetaspace::initialize_first_chunk(Metaspace::MetaspaceType type, Metaspace::MetadataType mdtype) {
1484   Metachunk* chunk = get_initialization_chunk(type, mdtype);
1485   if (chunk != NULL) {
1486     // Add to this manager's list of chunks in use and make it the current_chunk().
1487     get_space_manager(mdtype)->add_chunk(chunk, true);
1488   }
1489 }
1490 
1491 Metachunk* ClassLoaderMetaspace::get_initialization_chunk(Metaspace::MetaspaceType type, Metaspace::MetadataType mdtype) {
1492   size_t chunk_word_size = get_space_manager(mdtype)->get_initial_chunk_size(type);
1493 
1494   // Get a chunk from the chunk freelist
1495   Metachunk* chunk = Metaspace::get_chunk_manager(mdtype)->chunk_freelist_allocate(chunk_word_size);
1496 
1497   if (chunk == NULL) {
1498     chunk = Metaspace::get_space_list(mdtype)->get_new_chunk(chunk_word_size,
1499                                                   get_space_manager(mdtype)->medium_chunk_bunch());
1500   }
1501 
1502   return chunk;
1503 }
1504 
1505 void ClassLoaderMetaspace::initialize(Mutex* lock, Metaspace::MetaspaceType type) {
1506   Metaspace::verify_global_initialization();
1507 
1508   DEBUG_ONLY(Atomic::inc(&g_internal_statistics.num_metaspace_births));
1509 
1510   // Allocate SpaceManager for metadata objects.
1511   _vsm = new SpaceManager(Metaspace::NonClassType, type, lock);
1512 
1513   if (Metaspace::using_class_space()) {
1514     // Allocate SpaceManager for classes.
1515     _class_vsm = new SpaceManager(Metaspace::ClassType, type, lock);
1516   }
1517 
1518   MutexLocker cl(MetaspaceExpand_lock, Mutex::_no_safepoint_check_flag);
1519 
1520   // Allocate chunk for metadata objects
1521   initialize_first_chunk(type, Metaspace::NonClassType);
1522 
1523   // Allocate chunk for class metadata objects
1524   if (Metaspace::using_class_space()) {
1525     initialize_first_chunk(type, Metaspace::ClassType);
1526   }
1527 }
1528 
1529 MetaWord* ClassLoaderMetaspace::allocate(size_t word_size, Metaspace::MetadataType mdtype) {
1530   Metaspace::assert_not_frozen();
1531 
1532   DEBUG_ONLY(Atomic::inc(&g_internal_statistics.num_allocs));
1533 
1534   // Don't use class_vsm() unless UseCompressedClassPointers is true.
1535   if (Metaspace::is_class_space_allocation(mdtype)) {
1536     return  class_vsm()->allocate(word_size);
1537   } else {
1538     return  vsm()->allocate(word_size);
1539   }
1540 }
1541 
1542 MetaWord* ClassLoaderMetaspace::expand_and_allocate(size_t word_size, Metaspace::MetadataType mdtype) {
1543   Metaspace::assert_not_frozen();
1544   size_t delta_bytes = MetaspaceGC::delta_capacity_until_GC(word_size * BytesPerWord);
1545   assert(delta_bytes > 0, "Must be");
1546 
1547   size_t before = 0;
1548   size_t after = 0;
1549   bool can_retry = true;
1550   MetaWord* res;
1551   bool incremented;
1552 
1553   // Each thread increments the HWM at most once. Even if the thread fails to increment
1554   // the HWM, an allocation is still attempted. This is because another thread must then
1555   // have incremented the HWM and therefore the allocation might still succeed.
1556   do {
1557     incremented = MetaspaceGC::inc_capacity_until_GC(delta_bytes, &after, &before, &can_retry);
1558     res = allocate(word_size, mdtype);
1559   } while (!incremented && res == NULL && can_retry);
1560 
1561   if (incremented) {
1562     Metaspace::tracer()->report_gc_threshold(before, after,
1563                                   MetaspaceGCThresholdUpdater::ExpandAndAllocate);
1564     log_trace(gc, metaspace)("Increase capacity to GC from " SIZE_FORMAT " to " SIZE_FORMAT, before, after);
1565   }
1566 
1567   return res;
1568 }
1569 
1570 size_t ClassLoaderMetaspace::allocated_blocks_bytes() const {
1571   return (vsm()->used_words() +
1572       (Metaspace::using_class_space() ? class_vsm()->used_words() : 0)) * BytesPerWord;
1573 }
1574 
1575 size_t ClassLoaderMetaspace::allocated_chunks_bytes() const {
1576   return (vsm()->capacity_words() +
1577       (Metaspace::using_class_space() ? class_vsm()->capacity_words() : 0)) * BytesPerWord;
1578 }
1579 
1580 void ClassLoaderMetaspace::deallocate(MetaWord* ptr, size_t word_size, bool is_class) {
1581   Metaspace::assert_not_frozen();
1582   assert(!SafepointSynchronize::is_at_safepoint()
1583          || Thread::current()->is_VM_thread(), "should be the VM thread");
1584 
1585   DEBUG_ONLY(Atomic::inc(&g_internal_statistics.num_external_deallocs));
1586 
1587   MutexLocker ml(vsm()->lock(), Mutex::_no_safepoint_check_flag);
1588 
1589   if (is_class && Metaspace::using_class_space()) {
1590     class_vsm()->deallocate(ptr, word_size);
1591   } else {
1592     vsm()->deallocate(ptr, word_size);
1593   }
1594 }
1595 
1596 size_t ClassLoaderMetaspace::class_chunk_size(size_t word_size) {
1597   assert(Metaspace::using_class_space(), "Has to use class space");
1598   return class_vsm()->calc_chunk_size(word_size);
1599 }
1600 
1601 void ClassLoaderMetaspace::print_on(outputStream* out) const {
1602   // Print both class virtual space counts and metaspace.
1603   if (Verbose) {
1604     vsm()->print_on(out);
1605     if (Metaspace::using_class_space()) {
1606       class_vsm()->print_on(out);
1607     }
1608   }
1609 }
1610 
1611 void ClassLoaderMetaspace::verify() {
1612   vsm()->verify();
1613   if (Metaspace::using_class_space()) {
1614     class_vsm()->verify();
1615   }
1616 }
1617 
1618 void ClassLoaderMetaspace::add_to_statistics_locked(ClassLoaderMetaspaceStatistics* out) const {
1619   assert_lock_strong(lock());
1620   vsm()->add_to_statistics_locked(&out->nonclass_sm_stats());
1621   if (Metaspace::using_class_space()) {
1622     class_vsm()->add_to_statistics_locked(&out->class_sm_stats());
1623   }
1624 }
1625 
1626 void ClassLoaderMetaspace::add_to_statistics(ClassLoaderMetaspaceStatistics* out) const {
1627   MutexLocker cl(lock(), Mutex::_no_safepoint_check_flag);
1628   add_to_statistics_locked(out);
1629 }
1630 
1631 /////////////// Unit tests ///////////////
1632 
1633 struct chunkmanager_statistics_t {
1634   int num_specialized_chunks;
1635   int num_small_chunks;
1636   int num_medium_chunks;
1637   int num_humongous_chunks;
1638 };
1639 
1640 extern void test_metaspace_retrieve_chunkmanager_statistics(Metaspace::MetadataType mdType, chunkmanager_statistics_t* out) {
1641   ChunkManager* const chunk_manager = Metaspace::get_chunk_manager(mdType);
1642   ChunkManagerStatistics stat;
1643   chunk_manager->collect_statistics(&stat);
1644   out->num_specialized_chunks = (int)stat.chunk_stats(SpecializedIndex).num();
1645   out->num_small_chunks = (int)stat.chunk_stats(SmallIndex).num();
1646   out->num_medium_chunks = (int)stat.chunk_stats(MediumIndex).num();
1647   out->num_humongous_chunks = (int)stat.chunk_stats(HumongousIndex).num();
1648 }
1649 
1650 struct chunk_geometry_t {
1651   size_t specialized_chunk_word_size;
1652   size_t small_chunk_word_size;
1653   size_t medium_chunk_word_size;
1654 };
1655 
1656 extern void test_metaspace_retrieve_chunk_geometry(Metaspace::MetadataType mdType, chunk_geometry_t* out) {
1657   if (mdType == Metaspace::NonClassType) {
1658     out->specialized_chunk_word_size = SpecializedChunk;
1659     out->small_chunk_word_size = SmallChunk;
1660     out->medium_chunk_word_size = MediumChunk;
1661   } else {
1662     out->specialized_chunk_word_size = ClassSpecializedChunk;
1663     out->small_chunk_word_size = ClassSmallChunk;
1664     out->medium_chunk_word_size = ClassMediumChunk;
1665   }
1666 }