< prev index next >

src/hotspot/share/memory/metaspace.cpp

Print this page
rev 60538 : imported patch jep387-all.patch


   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"

  26 #include "aot/aotLoader.hpp"
  27 #include "classfile/classLoaderDataGraph.hpp"
  28 #include "gc/shared/collectedHeap.hpp"
  29 #include "logging/log.hpp"
  30 #include "logging/logStream.hpp"
  31 #include "memory/filemap.hpp"
  32 #include "memory/metaspace.hpp"



  33 #include "memory/metaspace/chunkManager.hpp"
  34 #include "memory/metaspace/metachunk.hpp"
  35 #include "memory/metaspace/metaspaceCommon.hpp"
  36 #include "memory/metaspace/printCLDMetaspaceInfoClosure.hpp"
  37 #include "memory/metaspace/spaceManager.hpp"




  38 #include "memory/metaspace/virtualSpaceList.hpp"
  39 #include "memory/metaspaceShared.hpp"
  40 #include "memory/metaspaceTracer.hpp"
  41 #include "memory/universe.hpp"
  42 #include "oops/compressedOops.hpp"
  43 #include "runtime/atomic.hpp"
  44 #include "runtime/init.hpp"

  45 #include "services/memTracker.hpp"
  46 #include "utilities/copy.hpp"
  47 #include "utilities/debug.hpp"
  48 #include "utilities/formatBuffer.hpp"
  49 #include "utilities/globalDefinitions.hpp"
  50 #include "utilities/vmError.hpp"
  51 
  52 
  53 using namespace metaspace;





  54 
  55 MetaWord* last_allocated = 0;
  56 
  57 size_t Metaspace::_compressed_class_space_size;
  58 const MetaspaceTracer* Metaspace::_tracer = NULL;

  59 
  60 DEBUG_ONLY(bool Metaspace::_frozen = false;)










  61 
  62 static const char* space_type_name(Metaspace::MetaspaceType t) {
  63   const char* s = NULL;
  64   switch (t) {
  65     case Metaspace::StandardMetaspaceType: s = "Standard"; break;
  66     case Metaspace::BootMetaspaceType: s = "Boot"; break;
  67     case Metaspace::ClassMirrorHolderMetaspaceType: s = "ClassMirrorHolder"; break;
  68     case Metaspace::ReflectionMetaspaceType: s = "Reflection"; break;
  69     default: ShouldNotReachHere();









































  70   }
  71   return s;
  72 }
  73 
  74 volatile size_t MetaspaceGC::_capacity_until_GC = 0;
  75 uint MetaspaceGC::_shrink_factor = 0;







































  76 
  77 // BlockFreelist methods


  78 
  79 // VirtualSpaceNode methods


















  80 





  81 // MetaspaceGC methods
  82 



  83 // VM_CollectForMetadataAllocation is the vm operation used to GC.
  84 // Within the VM operation after the GC the attempt to allocate the metadata
  85 // should succeed.  If the GC did not free enough space for the metaspace
  86 // allocation, the HWM is increased so that another virtualspace will be
  87 // allocated for the metadata.  With perm gen the increase in the perm
  88 // gen had bounds, MinMetaspaceExpansion and MaxMetaspaceExpansion.  The
  89 // metaspace policy uses those as the small and large steps for the HWM.
  90 //
  91 // After the GC the compute_new_size() for MetaspaceGC is called to
  92 // resize the capacity of the metaspaces.  The current implementation
  93 // is based on the flags MinMetaspaceFreeRatio and MaxMetaspaceFreeRatio used
  94 // to resize the Java heap by some GC's.  New flags can be implemented
  95 // if really needed.  MinMetaspaceFreeRatio is used to calculate how much
  96 // free space is desirable in the metaspace capacity to decide how much
  97 // to increase the HWM.  MaxMetaspaceFreeRatio is used to decide how much
  98 // free space is desirable in the metaspace capacity before decreasing
  99 // the HWM.
 100 
 101 // Calculate the amount to increase the high water mark (HWM).
 102 // Increase by a minimum amount (MinMetaspaceExpansion) so that


 334       } else {
 335         _shrink_factor = MIN2(current_shrink_factor * 4, (uint) 100);
 336       }
 337       log_trace(gc, metaspace)("    shrinking:  initThreshold: %.1fK  maximum_desired_capacity: %.1fK",
 338                                MetaspaceSize / (double) K, maximum_desired_capacity / (double) K);
 339       log_trace(gc, metaspace)("    shrink_bytes: %.1fK  current_shrink_factor: %d  new shrink factor: %d  MinMetaspaceExpansion: %.1fK",
 340                                shrink_bytes / (double) K, current_shrink_factor, _shrink_factor, MinMetaspaceExpansion / (double) K);
 341     }
 342   }
 343 
 344   // Don't shrink unless it's significant
 345   if (shrink_bytes >= MinMetaspaceExpansion &&
 346       ((capacity_until_GC - shrink_bytes) >= MetaspaceSize)) {
 347     size_t new_capacity_until_GC = MetaspaceGC::dec_capacity_until_GC(shrink_bytes);
 348     Metaspace::tracer()->report_gc_threshold(capacity_until_GC,
 349                                              new_capacity_until_GC,
 350                                              MetaspaceGCThresholdUpdater::ComputeNewSize);
 351   }
 352 }
 353 
 354 // MetaspaceUtils
 355 size_t MetaspaceUtils::_capacity_words [Metaspace:: MetadataTypeCount] = {0, 0};
 356 size_t MetaspaceUtils::_overhead_words [Metaspace:: MetadataTypeCount] = {0, 0};
 357 volatile size_t MetaspaceUtils::_used_words [Metaspace:: MetadataTypeCount] = {0, 0};
 358 
 359 // Collect used metaspace statistics. This involves walking the CLDG. The resulting
 360 // output will be the accumulated values for all live metaspaces.
 361 // Note: method does not do any locking.
 362 void MetaspaceUtils::collect_statistics(ClassLoaderMetaspaceStatistics* out) {
 363   out->reset();
 364   ClassLoaderDataGraphMetaspaceIterator iter;
 365    while (iter.repeat()) {
 366      ClassLoaderMetaspace* msp = iter.get_next();
 367      if (msp != NULL) {
 368        msp->add_to_statistics(out);
 369      }
 370    }
 371 }
 372 
 373 size_t MetaspaceUtils::free_in_vs_bytes(Metaspace::MetadataType mdtype) {
 374   VirtualSpaceList* list = Metaspace::get_space_list(mdtype);
 375   return list == NULL ? 0 : list->free_bytes();
 376 }
 377 
 378 size_t MetaspaceUtils::free_in_vs_bytes() {
 379   return free_in_vs_bytes(Metaspace::ClassType) + free_in_vs_bytes(Metaspace::NonClassType);
 380 }
 381 
 382 static void inc_stat_nonatomically(size_t* pstat, size_t words) {
 383   assert_lock_strong(MetaspaceExpand_lock);
 384   (*pstat) += words;
 385 }
 386 
 387 static void dec_stat_nonatomically(size_t* pstat, size_t words) {
 388   assert_lock_strong(MetaspaceExpand_lock);
 389   const size_t size_now = *pstat;
 390   assert(size_now >= words, "About to decrement counter below zero "
 391          "(current value: " SIZE_FORMAT ", decrement value: " SIZE_FORMAT ".",
 392          size_now, words);
 393   *pstat = size_now - words;
 394 }
 395 
 396 static void inc_stat_atomically(volatile size_t* pstat, size_t words) {
 397   Atomic::add(pstat, words);
 398 }
 399 
 400 static void dec_stat_atomically(volatile size_t* pstat, size_t words) {
 401   const size_t size_now = *pstat;
 402   assert(size_now >= words, "About to decrement counter below zero "
 403          "(current value: " SIZE_FORMAT ", decrement value: " SIZE_FORMAT ".",
 404          size_now, words);
 405   Atomic::sub(pstat, words);
 406 }
 407 
 408 void MetaspaceUtils::dec_capacity(Metaspace::MetadataType mdtype, size_t words) {
 409   dec_stat_nonatomically(&_capacity_words[mdtype], words);
 410 }
 411 void MetaspaceUtils::inc_capacity(Metaspace::MetadataType mdtype, size_t words) {
 412   inc_stat_nonatomically(&_capacity_words[mdtype], words);
 413 }
 414 void MetaspaceUtils::dec_used(Metaspace::MetadataType mdtype, size_t words) {
 415   dec_stat_atomically(&_used_words[mdtype], words);
 416 }
 417 void MetaspaceUtils::inc_used(Metaspace::MetadataType mdtype, size_t words) {
 418   inc_stat_atomically(&_used_words[mdtype], words);
 419 }
 420 void MetaspaceUtils::dec_overhead(Metaspace::MetadataType mdtype, size_t words) {
 421   dec_stat_nonatomically(&_overhead_words[mdtype], words);
 422 }
 423 void MetaspaceUtils::inc_overhead(Metaspace::MetadataType mdtype, size_t words) {
 424   inc_stat_nonatomically(&_overhead_words[mdtype], words);
 425 }
 426 
 427 size_t MetaspaceUtils::reserved_bytes(Metaspace::MetadataType mdtype) {
 428   VirtualSpaceList* list = Metaspace::get_space_list(mdtype);
 429   return list == NULL ? 0 : list->reserved_bytes();
 430 }
 431 
 432 size_t MetaspaceUtils::committed_bytes(Metaspace::MetadataType mdtype) {
 433   VirtualSpaceList* list = Metaspace::get_space_list(mdtype);
 434   return list == NULL ? 0 : list->committed_bytes();
 435 }
 436 
 437 size_t MetaspaceUtils::min_chunk_size_words() { return Metaspace::first_chunk_word_size(); }
 438 
 439 size_t MetaspaceUtils::free_chunks_total_words(Metaspace::MetadataType mdtype) {
 440   ChunkManager* chunk_manager = Metaspace::get_chunk_manager(mdtype);
 441   if (chunk_manager == NULL) {
 442     return 0;
 443   }
 444   return chunk_manager->free_chunks_total_words();
 445 }
 446 
 447 size_t MetaspaceUtils::free_chunks_total_bytes(Metaspace::MetadataType mdtype) {
 448   return free_chunks_total_words(mdtype) * BytesPerWord;
 449 }
 450 
 451 size_t MetaspaceUtils::free_chunks_total_words() {
 452   return free_chunks_total_words(Metaspace::ClassType) +
 453          free_chunks_total_words(Metaspace::NonClassType);
 454 }
 455 
 456 size_t MetaspaceUtils::free_chunks_total_bytes() {
 457   return free_chunks_total_words() * BytesPerWord;
 458 }
 459 
 460 bool MetaspaceUtils::has_chunk_free_list(Metaspace::MetadataType mdtype) {
 461   return Metaspace::get_chunk_manager(mdtype) != NULL;
 462 }
 463 
 464 MetaspaceChunkFreeListSummary MetaspaceUtils::chunk_free_list_summary(Metaspace::MetadataType mdtype) {
 465   if (!has_chunk_free_list(mdtype)) {
 466     return MetaspaceChunkFreeListSummary();
 467   }
 468 
 469   const ChunkManager* cm = Metaspace::get_chunk_manager(mdtype);
 470   return cm->chunk_free_list_summary();
 471 }
 472 
 473 void MetaspaceUtils::print_metaspace_change(const metaspace::MetaspaceSizesSnapshot& pre_meta_values) {
 474   const metaspace::MetaspaceSizesSnapshot meta_values;
 475 
 476   if (Metaspace::using_class_space()) {
 477     log_info(gc, metaspace)(HEAP_CHANGE_FORMAT" "
 478                             HEAP_CHANGE_FORMAT" "
 479                             HEAP_CHANGE_FORMAT,
 480                             HEAP_CHANGE_FORMAT_ARGS("Metaspace",
 481                                                     pre_meta_values.used(),
 482                                                     pre_meta_values.committed(),
 483                                                     meta_values.used(),
 484                                                     meta_values.committed()),
 485                             HEAP_CHANGE_FORMAT_ARGS("NonClass",
 486                                                     pre_meta_values.non_class_used(),
 487                                                     pre_meta_values.non_class_committed(),
 488                                                     meta_values.non_class_used(),
 489                                                     meta_values.non_class_committed()),
 490                             HEAP_CHANGE_FORMAT_ARGS("Class",
 491                                                     pre_meta_values.class_used(),
 492                                                     pre_meta_values.class_committed(),
 493                                                     meta_values.class_used(),
 494                                                     meta_values.class_committed()));
 495   } else {
 496     log_info(gc, metaspace)(HEAP_CHANGE_FORMAT,
 497                             HEAP_CHANGE_FORMAT_ARGS("Metaspace",
 498                                                     pre_meta_values.used(),
 499                                                     pre_meta_values.committed(),
 500                                                     meta_values.used(),
 501                                                     meta_values.committed()));
 502   }
 503 }
 504 
 505 void MetaspaceUtils::print_on(outputStream* out) {
 506   Metaspace::MetadataType nct = Metaspace::NonClassType;
 507 
 508   out->print_cr(" Metaspace       "
 509                 "used "      SIZE_FORMAT "K, "
 510                 "capacity "  SIZE_FORMAT "K, "
 511                 "committed " SIZE_FORMAT "K, "
 512                 "reserved "  SIZE_FORMAT "K",
 513                 used_bytes()/K,
 514                 capacity_bytes()/K,
 515                 committed_bytes()/K,
 516                 reserved_bytes()/K);
 517 
 518   if (Metaspace::using_class_space()) {
 519     Metaspace::MetadataType ct = Metaspace::ClassType;
 520     out->print_cr("  class space    "
 521                   "used "      SIZE_FORMAT "K, "
 522                   "capacity "  SIZE_FORMAT "K, "
 523                   "committed " SIZE_FORMAT "K, "
 524                   "reserved "  SIZE_FORMAT "K",
 525                   used_bytes(ct)/K,
 526                   capacity_bytes(ct)/K,
 527                   committed_bytes(ct)/K,
 528                   reserved_bytes(ct)/K);
 529   }
 530 }
 531 
 532 
 533 void MetaspaceUtils::print_vs(outputStream* out, size_t scale) {
 534   const size_t reserved_nonclass_words = reserved_bytes(Metaspace::NonClassType) / sizeof(MetaWord);
 535   const size_t committed_nonclass_words = committed_bytes(Metaspace::NonClassType) / sizeof(MetaWord);
 536   {
 537     if (Metaspace::using_class_space()) {
 538       out->print("  Non-class space:  ");
 539     }
 540     print_scaled_words(out, reserved_nonclass_words, scale, 7);
 541     out->print(" reserved, ");
 542     print_scaled_words_and_percentage(out, committed_nonclass_words, reserved_nonclass_words, scale, 7);
 543     out->print_cr(" committed ");
 544 
 545     if (Metaspace::using_class_space()) {
 546       const size_t reserved_class_words = reserved_bytes(Metaspace::ClassType) / sizeof(MetaWord);
 547       const size_t committed_class_words = committed_bytes(Metaspace::ClassType) / sizeof(MetaWord);
 548       out->print("      Class space:  ");
 549       print_scaled_words(out, reserved_class_words, scale, 7);
 550       out->print(" reserved, ");
 551       print_scaled_words_and_percentage(out, committed_class_words, reserved_class_words, scale, 7);
 552       out->print_cr(" committed ");
 553 
 554       const size_t reserved_words = reserved_nonclass_words + reserved_class_words;
 555       const size_t committed_words = committed_nonclass_words + committed_class_words;
 556       out->print("             Both:  ");
 557       print_scaled_words(out, reserved_words, scale, 7);
 558       out->print(" reserved, ");
 559       print_scaled_words_and_percentage(out, committed_words, reserved_words, scale, 7);
 560       out->print_cr(" committed ");
 561     }
 562   }
 563 }
 564 
 565 static void print_basic_switches(outputStream* out, size_t scale) {
 566   out->print("MaxMetaspaceSize: ");
 567   if (MaxMetaspaceSize >= (max_uintx) - (2 * os::vm_page_size())) {
 568     // aka "very big". Default is max_uintx, but due to rounding in arg parsing the real
 569     // value is smaller.
 570     out->print("unlimited");
 571   } else {
 572     print_human_readable_size(out, MaxMetaspaceSize, scale);
 573   }
 574   out->cr();
 575   if (Metaspace::using_class_space()) {
 576     out->print("CompressedClassSpaceSize: ");
 577     print_human_readable_size(out, CompressedClassSpaceSize, scale);
 578   }
 579   out->cr();
 580 }
 581 
 582 // This will print out a basic metaspace usage report but
 583 // unlike print_report() is guaranteed not to lock or to walk the CLDG.
 584 void MetaspaceUtils::print_basic_report(outputStream* out, size_t scale) {
 585 
 586   if (!Metaspace::initialized()) {
 587     out->print_cr("Metaspace not yet initialized.");
 588     return;
 589   }
 590 
 591   out->cr();
 592   out->print_cr("Usage:");
 593 
 594   if (Metaspace::using_class_space()) {
 595     out->print("  Non-class:  ");
 596   }
 597 
 598   // In its most basic form, we do not require walking the CLDG. Instead, just print the running totals from
 599   // MetaspaceUtils.
 600   const size_t cap_nc = MetaspaceUtils::capacity_words(Metaspace::NonClassType);
 601   const size_t overhead_nc = MetaspaceUtils::overhead_words(Metaspace::NonClassType);
 602   const size_t used_nc = MetaspaceUtils::used_words(Metaspace::NonClassType);
 603   const size_t free_and_waste_nc = cap_nc - overhead_nc - used_nc;
 604 
 605   print_scaled_words(out, cap_nc, scale, 5);
 606   out->print(" capacity, ");
 607   print_scaled_words_and_percentage(out, used_nc, cap_nc, scale, 5);
 608   out->print(" used, ");
 609   print_scaled_words_and_percentage(out, free_and_waste_nc, cap_nc, scale, 5);
 610   out->print(" free+waste, ");
 611   print_scaled_words_and_percentage(out, overhead_nc, cap_nc, scale, 5);
 612   out->print(" overhead. ");
 613   out->cr();
 614 
 615   if (Metaspace::using_class_space()) {
 616     const size_t cap_c = MetaspaceUtils::capacity_words(Metaspace::ClassType);
 617     const size_t overhead_c = MetaspaceUtils::overhead_words(Metaspace::ClassType);
 618     const size_t used_c = MetaspaceUtils::used_words(Metaspace::ClassType);
 619     const size_t free_and_waste_c = cap_c - overhead_c - used_c;
 620     out->print("      Class:  ");
 621     print_scaled_words(out, cap_c, scale, 5);
 622     out->print(" capacity, ");
 623     print_scaled_words_and_percentage(out, used_c, cap_c, scale, 5);
 624     out->print(" used, ");
 625     print_scaled_words_and_percentage(out, free_and_waste_c, cap_c, scale, 5);
 626     out->print(" free+waste, ");
 627     print_scaled_words_and_percentage(out, overhead_c, cap_c, scale, 5);
 628     out->print(" overhead. ");
 629     out->cr();
 630 
 631     out->print("       Both:  ");
 632     const size_t cap = cap_nc + cap_c;
 633 
 634     print_scaled_words(out, cap, scale, 5);
 635     out->print(" capacity, ");
 636     print_scaled_words_and_percentage(out, used_nc + used_c, cap, scale, 5);
 637     out->print(" used, ");
 638     print_scaled_words_and_percentage(out, free_and_waste_nc + free_and_waste_c, cap, scale, 5);
 639     out->print(" free+waste, ");
 640     print_scaled_words_and_percentage(out, overhead_nc + overhead_c, cap, scale, 5);
 641     out->print(" overhead. ");
 642     out->cr();
 643   }
 644 
 645   out->cr();
 646   out->print_cr("Virtual space:");
 647 
 648   print_vs(out, scale);
 649 
 650   out->cr();
 651   out->print_cr("Chunk freelists:");
 652 
 653   if (Metaspace::using_class_space()) {
 654     out->print("   Non-Class:  ");
 655   }
 656   print_human_readable_size(out, Metaspace::chunk_manager_metadata()->free_chunks_total_bytes(), scale);
 657   out->cr();
 658   if (Metaspace::using_class_space()) {
 659     out->print("       Class:  ");
 660     print_human_readable_size(out, Metaspace::chunk_manager_class()->free_chunks_total_bytes(), scale);
 661     out->cr();
 662     out->print("        Both:  ");
 663     print_human_readable_size(out, Metaspace::chunk_manager_class()->free_chunks_total_bytes() +
 664                               Metaspace::chunk_manager_metadata()->free_chunks_total_bytes(), scale);
 665     out->cr();
 666   }
 667 
 668   out->cr();
 669 
 670   // Print basic settings
 671   print_basic_switches(out, scale);
 672 
 673   out->cr();
 674 
 675 }
 676 
 677 void MetaspaceUtils::print_report(outputStream* out, size_t scale, int flags) {
 678 
 679   if (!Metaspace::initialized()) {
 680     out->print_cr("Metaspace not yet initialized.");
 681     return;
 682   }
 683 
 684   const bool print_loaders = (flags & rf_show_loaders) > 0;
 685   const bool print_classes = (flags & rf_show_classes) > 0;
 686   const bool print_by_chunktype = (flags & rf_break_down_by_chunktype) > 0;
 687   const bool print_by_spacetype = (flags & rf_break_down_by_spacetype) > 0;
 688 
 689   // Some report options require walking the class loader data graph.
 690   PrintCLDMetaspaceInfoClosure cl(out, scale, print_loaders, print_classes, print_by_chunktype);
 691   if (print_loaders) {
 692     out->cr();
 693     out->print_cr("Usage per loader:");
 694     out->cr();
 695   }
 696 
 697   ClassLoaderDataGraph::loaded_cld_do(&cl); // collect data and optionally print
 698 
 699   // Print totals, broken up by space type.
 700   if (print_by_spacetype) {
 701     out->cr();
 702     out->print_cr("Usage per space type:");
 703     out->cr();
 704     for (int space_type = (int)Metaspace::ZeroMetaspaceType;
 705          space_type < (int)Metaspace::MetaspaceTypeCount; space_type ++)
 706     {
 707       uintx num_loaders = cl._num_loaders_by_spacetype[space_type];
 708       uintx num_classes = cl._num_classes_by_spacetype[space_type];
 709       out->print("%s - " UINTX_FORMAT " %s",
 710         space_type_name((Metaspace::MetaspaceType)space_type),
 711         num_loaders, loaders_plural(num_loaders));
 712       if (num_classes > 0) {
 713         out->print(", ");
 714         print_number_of_classes(out, num_classes, cl._num_classes_shared_by_spacetype[space_type]);
 715         out->print(":");
 716         cl._stats_by_spacetype[space_type].print_on(out, scale, print_by_chunktype);
 717       } else {
 718         out->print(".");
 719         out->cr();
 720       }
 721       out->cr();
 722     }
 723   }
 724 
 725   // Print totals for in-use data:
 726   out->cr();
 727   {
 728     uintx num_loaders = cl._num_loaders;
 729     out->print("Total Usage - " UINTX_FORMAT " %s, ",
 730       num_loaders, loaders_plural(num_loaders));
 731     print_number_of_classes(out, cl._num_classes, cl._num_classes_shared);
 732     out->print(":");
 733     cl._stats_total.print_on(out, scale, print_by_chunktype);
 734     out->cr();
 735   }
 736 
 737   // -- Print Virtual space.
 738   out->cr();
 739   out->print_cr("Virtual space:");
 740 
 741   print_vs(out, scale);
 742 
 743   // -- Print VirtualSpaceList details.
 744   if ((flags & rf_show_vslist) > 0) {
 745     out->cr();
 746     out->print_cr("Virtual space list%s:", Metaspace::using_class_space() ? "s" : "");
 747 
 748     if (Metaspace::using_class_space()) {
 749       out->print_cr("   Non-Class:");
 750     }
 751     Metaspace::space_list()->print_on(out, scale);
 752     if (Metaspace::using_class_space()) {
 753       out->print_cr("       Class:");
 754       Metaspace::class_space_list()->print_on(out, scale);
 755     }
 756   }
 757   out->cr();
 758 
 759   // -- Print VirtualSpaceList map.
 760   if ((flags & rf_show_vsmap) > 0) {
 761     out->cr();
 762     out->print_cr("Virtual space map:");
 763 
 764     if (Metaspace::using_class_space()) {
 765       out->print_cr("   Non-Class:");
 766     }
 767     Metaspace::space_list()->print_map(out);
 768     if (Metaspace::using_class_space()) {
 769       out->print_cr("       Class:");
 770       Metaspace::class_space_list()->print_map(out);
 771     }
 772   }
 773   out->cr();
 774 
 775   // -- Print Freelists (ChunkManager) details
 776   out->cr();
 777   out->print_cr("Chunk freelist%s:", Metaspace::using_class_space() ? "s" : "");
 778 
 779   ChunkManagerStatistics non_class_cm_stat;
 780   Metaspace::chunk_manager_metadata()->collect_statistics(&non_class_cm_stat);
 781 
 782   if (Metaspace::using_class_space()) {
 783     out->print_cr("   Non-Class:");
 784   }
 785   non_class_cm_stat.print_on(out, scale);
 786 
 787   if (Metaspace::using_class_space()) {
 788     ChunkManagerStatistics class_cm_stat;
 789     Metaspace::chunk_manager_class()->collect_statistics(&class_cm_stat);
 790     out->print_cr("       Class:");
 791     class_cm_stat.print_on(out, scale);
 792   }
 793 
 794   // As a convenience, print a summary of common waste.
 795   out->cr();
 796   out->print("Waste ");
 797   // For all wastages, print percentages from total. As total use the total size of memory committed for metaspace.
 798   const size_t committed_words = committed_bytes() / BytesPerWord;
 799 
 800   out->print("(percentages refer to total committed size ");
 801   print_scaled_words(out, committed_words, scale);
 802   out->print_cr("):");
 803 
 804   // Print space committed but not yet used by any class loader
 805   const size_t unused_words_in_vs = MetaspaceUtils::free_in_vs_bytes() / BytesPerWord;
 806   out->print("              Committed unused: ");
 807   print_scaled_words_and_percentage(out, unused_words_in_vs, committed_words, scale, 6);
 808   out->cr();
 809 
 810   // Print waste for in-use chunks.
 811   UsedChunksStatistics ucs_nonclass = cl._stats_total.nonclass_sm_stats().totals();
 812   UsedChunksStatistics ucs_class = cl._stats_total.class_sm_stats().totals();
 813   UsedChunksStatistics ucs_all;
 814   ucs_all.add(ucs_nonclass);
 815   ucs_all.add(ucs_class);
 816 
 817   out->print("        Waste in chunks in use: ");
 818   print_scaled_words_and_percentage(out, ucs_all.waste(), committed_words, scale, 6);
 819   out->cr();
 820   out->print("         Free in chunks in use: ");
 821   print_scaled_words_and_percentage(out, ucs_all.free(), committed_words, scale, 6);
 822   out->cr();
 823   out->print("     Overhead in chunks in use: ");
 824   print_scaled_words_and_percentage(out, ucs_all.overhead(), committed_words, scale, 6);
 825   out->cr();
 826 
 827   // Print waste in free chunks.
 828   const size_t total_capacity_in_free_chunks =
 829       Metaspace::chunk_manager_metadata()->free_chunks_total_words() +
 830      (Metaspace::using_class_space() ? Metaspace::chunk_manager_class()->free_chunks_total_words() : 0);
 831   out->print("                In free chunks: ");
 832   print_scaled_words_and_percentage(out, total_capacity_in_free_chunks, committed_words, scale, 6);
 833   out->cr();
 834 
 835   // Print waste in deallocated blocks.
 836   const uintx free_blocks_num =
 837       cl._stats_total.nonclass_sm_stats().free_blocks_num() +
 838       cl._stats_total.class_sm_stats().free_blocks_num();
 839   const size_t free_blocks_cap_words =
 840       cl._stats_total.nonclass_sm_stats().free_blocks_cap_words() +
 841       cl._stats_total.class_sm_stats().free_blocks_cap_words();
 842   out->print("Deallocated from chunks in use: ");
 843   print_scaled_words_and_percentage(out, free_blocks_cap_words, committed_words, scale, 6);
 844   out->print(" (" UINTX_FORMAT " blocks)", free_blocks_num);
 845   out->cr();
 846 
 847   // Print total waste.
 848   const size_t total_waste = ucs_all.waste() + ucs_all.free() + ucs_all.overhead() + total_capacity_in_free_chunks
 849       + free_blocks_cap_words + unused_words_in_vs;
 850   out->print("                       -total-: ");
 851   print_scaled_words_and_percentage(out, total_waste, committed_words, scale, 6);
 852   out->cr();
 853 
 854   // Print internal statistics
 855 #ifdef ASSERT
 856   out->cr();
 857   out->cr();
 858   out->print_cr("Internal statistics:");
 859   out->cr();
 860   out->print_cr("Number of allocations: " UINTX_FORMAT ".", g_internal_statistics.num_allocs);
 861   out->print_cr("Number of space births: " UINTX_FORMAT ".", g_internal_statistics.num_metaspace_births);
 862   out->print_cr("Number of space deaths: " UINTX_FORMAT ".", g_internal_statistics.num_metaspace_deaths);
 863   out->print_cr("Number of virtual space node births: " UINTX_FORMAT ".", g_internal_statistics.num_vsnodes_created);
 864   out->print_cr("Number of virtual space node deaths: " UINTX_FORMAT ".", g_internal_statistics.num_vsnodes_purged);
 865   out->print_cr("Number of times virtual space nodes were expanded: " UINTX_FORMAT ".", g_internal_statistics.num_committed_space_expanded);
 866   out->print_cr("Number of deallocations: " UINTX_FORMAT " (" UINTX_FORMAT " external).", g_internal_statistics.num_deallocs, g_internal_statistics.num_external_deallocs);
 867   out->print_cr("Allocations from deallocated blocks: " UINTX_FORMAT ".", g_internal_statistics.num_allocs_from_deallocated_blocks);
 868   out->print_cr("Number of chunks added to freelist: " UINTX_FORMAT ".",
 869                 g_internal_statistics.num_chunks_added_to_freelist);
 870   out->print_cr("Number of chunks removed from freelist: " UINTX_FORMAT ".",
 871                 g_internal_statistics.num_chunks_removed_from_freelist);
 872   out->print_cr("Number of chunk merges: " UINTX_FORMAT ", split-ups: " UINTX_FORMAT ".",
 873                 g_internal_statistics.num_chunk_merges, g_internal_statistics.num_chunk_splits);
 874 
 875   out->cr();
 876 #endif
 877 
 878   // Print some interesting settings
 879   out->cr();
 880   out->cr();
 881   print_basic_switches(out, scale);
 882 
 883   out->cr();
 884   out->print("InitialBootClassLoaderMetaspaceSize: ");
 885   print_human_readable_size(out, InitialBootClassLoaderMetaspaceSize, scale);
 886 
 887   out->cr();
 888   out->cr();
 889 
 890 } // MetaspaceUtils::print_report()
 891 
 892 // Prints an ASCII representation of the given space.
 893 void MetaspaceUtils::print_metaspace_map(outputStream* out, Metaspace::MetadataType mdtype) {
 894   MutexLocker cl(MetaspaceExpand_lock, Mutex::_no_safepoint_check_flag);
 895   const bool for_class = mdtype == Metaspace::ClassType ? true : false;
 896   VirtualSpaceList* const vsl = for_class ? Metaspace::class_space_list() : Metaspace::space_list();
 897   if (vsl != NULL) {
 898     if (for_class) {
 899       if (!Metaspace::using_class_space()) {
 900         out->print_cr("No Class Space.");
 901         return;
 902       }
 903       out->print_raw("---- Metaspace Map (Class Space) ----");
 904     } else {
 905       out->print_raw("---- Metaspace Map (Non-Class Space) ----");
 906     }
 907     // Print legend:
 908     out->cr();
 909     out->print_cr("Chunk Types (uppercase chunks are in use): x-specialized, s-small, m-medium, h-humongous.");
 910     out->cr();
 911     VirtualSpaceList* const vsl = for_class ? Metaspace::class_space_list() : Metaspace::space_list();
 912     vsl->print_map(out);
 913     out->cr();
 914   }
 915 }
 916 
 917 void MetaspaceUtils::verify_free_chunks() {
 918 #ifdef ASSERT
 919   Metaspace::chunk_manager_metadata()->verify(false);
 920   if (Metaspace::using_class_space()) {
 921     Metaspace::chunk_manager_class()->verify(false);
 922   }
 923 #endif
 924 }
 925 
 926 void MetaspaceUtils::verify_metrics() {
 927 #ifdef ASSERT
 928   // Please note: there are time windows where the internal counters are out of sync with
 929   // reality. For example, when a newly created ClassLoaderMetaspace creates its first chunk -
 930   // the ClassLoaderMetaspace is not yet attached to its ClassLoaderData object and hence will
 931   // not be counted when iterating the CLDG. So be careful when you call this method.
 932   ClassLoaderMetaspaceStatistics total_stat;
 933   collect_statistics(&total_stat);
 934   UsedChunksStatistics nonclass_chunk_stat = total_stat.nonclass_sm_stats().totals();
 935   UsedChunksStatistics class_chunk_stat = total_stat.class_sm_stats().totals();
 936 
 937   bool mismatch = false;
 938   for (int i = 0; i < Metaspace::MetadataTypeCount; i ++) {
 939     Metaspace::MetadataType mdtype = (Metaspace::MetadataType)i;
 940     UsedChunksStatistics chunk_stat = total_stat.sm_stats(mdtype).totals();
 941     if (capacity_words(mdtype) != chunk_stat.cap() ||
 942         used_words(mdtype) != chunk_stat.used() ||
 943         overhead_words(mdtype) != chunk_stat.overhead()) {
 944       mismatch = true;
 945       tty->print_cr("MetaspaceUtils::verify_metrics: counter mismatch for mdtype=%u:", mdtype);
 946       tty->print_cr("Expected cap " SIZE_FORMAT ", used " SIZE_FORMAT ", overhead " SIZE_FORMAT ".",
 947                     capacity_words(mdtype), used_words(mdtype), overhead_words(mdtype));
 948       tty->print_cr("Got cap " SIZE_FORMAT ", used " SIZE_FORMAT ", overhead " SIZE_FORMAT ".",
 949                     chunk_stat.cap(), chunk_stat.used(), chunk_stat.overhead());
 950       tty->flush();
 951     }
 952   }
 953   assert(mismatch == false, "MetaspaceUtils::verify_metrics: counter mismatch.");
 954 #endif
 955 }
 956 
 957 // Metaspace methods
 958 
 959 size_t Metaspace::_first_chunk_word_size = 0;
 960 size_t Metaspace::_first_class_chunk_word_size = 0;
 961 
 962 size_t Metaspace::_commit_alignment = 0;
 963 size_t Metaspace::_reserve_alignment = 0;
 964 
 965 VirtualSpaceList* Metaspace::_space_list = NULL;
 966 VirtualSpaceList* Metaspace::_class_space_list = NULL;
 967 
 968 ChunkManager* Metaspace::_chunk_manager_metadata = NULL;
 969 ChunkManager* Metaspace::_chunk_manager_class = NULL;
 970 
 971 bool Metaspace::_initialized = false;
 972 
 973 #define VIRTUALSPACEMULTIPLIER 2
 974 
 975 #ifdef _LP64
 976 
 977 void Metaspace::print_compressed_class_space(outputStream* st) {
 978   if (_class_space_list != NULL) {
 979     address base = (address)_class_space_list->current_virtual_space()->bottom();
 980     address top = base + compressed_class_space_size();
 981     st->print("Compressed class space mapped at: " PTR_FORMAT "-" PTR_FORMAT ", size: " SIZE_FORMAT,
 982                p2i(base), p2i(top), top - base);

 983     st->cr();
 984   }
 985 }
 986 
 987 // Given a prereserved space, use that to set up the compressed class space list.
 988 void Metaspace::initialize_class_space(ReservedSpace rs) {


 989   assert(using_class_space(), "Must be using class space");
 990   assert(_class_space_list == NULL && _chunk_manager_class == NULL, "Only call once");
 991 
 992   assert(rs.size() == CompressedClassSpaceSize, SIZE_FORMAT " != " SIZE_FORMAT,
 993          rs.size(), CompressedClassSpaceSize);
 994   assert(is_aligned(rs.base(), Metaspace::reserve_alignment()) &&
 995          is_aligned(rs.size(), Metaspace::reserve_alignment()),
 996          "wrong alignment");
 997 
 998   _class_space_list = new VirtualSpaceList(rs);
 999   _chunk_manager_class = new ChunkManager(true/*is_class*/);
1000 
1001   // This does currently not work because rs may be the result of a split
1002   // operation and NMT seems not to be able to handle splits.
1003   // Will be fixed with JDK-8243535.
1004   // MemTracker::record_virtual_memory_type((address)rs.base(), mtClass);
1005 
1006   if (!_class_space_list->initialization_succeeded()) {
1007     vm_exit_during_initialization("Failed to setup compressed class space virtual space list.");
1008   }
1009 



1010 }
1011 
1012 // Reserve a range of memory at an address suitable for en/decoding narrow
1013 // Klass pointers (see: CompressedClassPointers::is_valid_base()).
1014 // The returned address shall both be suitable as a compressed class pointers
1015 //  base, and aligned to Metaspace::reserve_alignment (which is equal to or a
1016 //  multiple of allocation granularity).
1017 // On error, returns an unreserved space.
1018 ReservedSpace Metaspace::reserve_address_space_for_compressed_classes(size_t size) {
1019 
1020 #ifdef AARCH64
1021   const size_t alignment = Metaspace::reserve_alignment();
1022 
1023   // AArch64: Try to align metaspace so that we can decode a compressed
1024   // klass with a single MOVK instruction. We can do this iff the
1025   // compressed class base is a multiple of 4G.
1026   // Additionally, above 32G, ensure the lower LogKlassAlignmentInBytes bits
1027   // of the upper 32-bits of the address are zero so we can handle a shift
1028   // when decoding.
1029 


1046       if (rs.is_reserved()) {
1047         assert(a == (address)rs.base(), "Sanity");
1048         return rs;
1049       }
1050       a +=  search_ranges[i].increment;
1051     }
1052   }
1053 
1054   // Note: on AARCH64, if the code above does not find any good placement, we
1055   // have no recourse. We return an empty space and the VM will exit.
1056   return ReservedSpace();
1057 #else
1058   // Default implementation: Just reserve anywhere.
1059   return ReservedSpace(size, Metaspace::reserve_alignment(), false, (char*)NULL);
1060 #endif // AARCH64
1061 }
1062 
1063 #endif // _LP64
1064 
1065 
1066 void Metaspace::ergo_initialize() {
1067   if (DumpSharedSpaces) {
1068     // Using large pages when dumping the shared archive is currently not implemented.
1069     FLAG_SET_ERGO(UseLargePagesInMetaspace, false);
1070   }
1071 
1072   size_t page_size = os::vm_page_size();
1073   if (UseLargePages && UseLargePagesInMetaspace) {
1074     page_size = os::large_page_size();
1075   }
1076 
1077   _commit_alignment  = page_size;
1078   _reserve_alignment = MAX2(page_size, (size_t)os::vm_allocation_granularity());
1079 
1080   // The upcoming Metaspace rewrite will impose a higher alignment granularity.
1081   // To prepare for that and to catch/prevent any misuse of Metaspace alignment
1082   // which may creep in, up the alignment a bit.
1083   if (_reserve_alignment == 4 * K) {
1084     _reserve_alignment *= 4;
1085   }
1086 
1087   // Do not use FLAG_SET_ERGO to update MaxMetaspaceSize, since this will
1088   // override if MaxMetaspaceSize was set on the command line or not.
1089   // This information is needed later to conform to the specification of the
1090   // java.lang.management.MemoryUsage API.
1091   //
1092   // Ideally, we would be able to set the default value of MaxMetaspaceSize in
1093   // globals.hpp to the aligned value, but this is not possible, since the
1094   // alignment depends on other flags being parsed.
1095   MaxMetaspaceSize = align_down_bounded(MaxMetaspaceSize, _reserve_alignment);














































1096 

1097   if (MetaspaceSize > MaxMetaspaceSize) {
1098     MetaspaceSize = MaxMetaspaceSize;
1099   }
1100 
1101   MetaspaceSize = align_down_bounded(MetaspaceSize, _commit_alignment);
1102 
1103   assert(MetaspaceSize <= MaxMetaspaceSize, "MetaspaceSize should be limited by MaxMetaspaceSize");
1104 
1105   MinMetaspaceExpansion = align_down_bounded(MinMetaspaceExpansion, _commit_alignment);
1106   MaxMetaspaceExpansion = align_down_bounded(MaxMetaspaceExpansion, _commit_alignment);
1107 
1108   CompressedClassSpaceSize = align_down_bounded(CompressedClassSpaceSize, _reserve_alignment);
1109 
1110   // Initial virtual space size will be calculated at global_initialize()
1111   size_t min_metaspace_sz =
1112       VIRTUALSPACEMULTIPLIER * InitialBootClassLoaderMetaspaceSize;
1113   if (UseCompressedClassPointers) {
1114     if ((min_metaspace_sz + CompressedClassSpaceSize) >  MaxMetaspaceSize) {
1115       if (min_metaspace_sz >= MaxMetaspaceSize) {
1116         vm_exit_during_initialization("MaxMetaspaceSize is too small.");
1117       } else {
1118         FLAG_SET_ERGO(CompressedClassSpaceSize,
1119                       MaxMetaspaceSize - min_metaspace_sz);
1120       }
1121     }
1122   } else if (min_metaspace_sz >= MaxMetaspaceSize) {
1123     FLAG_SET_ERGO(InitialBootClassLoaderMetaspaceSize,
1124                   min_metaspace_sz);
1125   }
1126 
1127   set_compressed_class_space_size(CompressedClassSpaceSize);
1128 }
1129 
1130 void Metaspace::global_initialize() {
1131   MetaspaceGC::initialize();


1132 
1133   // If UseCompressedClassPointers=1, we have two cases:
1134   // a) if CDS is active (either dump time or runtime), it will create the ccs
1135   //    for us, initialize it and set up CompressedKlassPointers encoding.
1136   //    Class space will be reserved above the mapped archives.
1137   // b) if CDS is not active, we will create the ccs on our own. It will be
1138   //    placed above the java heap, since we assume it has been placed in low
1139   //    address regions. We may rethink this (see JDK-8244943). Failing that,
1140   //    it will be placed anywhere.
1141 
1142 #if INCLUDE_CDS
1143   // case (a)
1144   if (DumpSharedSpaces) {
1145     MetaspaceShared::initialize_dumptime_shared_and_meta_spaces();
1146   } else if (UseSharedSpaces) {
1147     // If any of the archived space fails to map, UseSharedSpaces
1148     // is reset to false.
1149     MetaspaceShared::initialize_runtime_shared_and_meta_spaces();
1150   }
1151 


1171     address base = UseCompressedOops ? CompressedOops::end() : (address)HeapBaseMinAddress;
1172     base = align_up(base, Metaspace::reserve_alignment());
1173 
1174     const size_t size = align_up(CompressedClassSpaceSize, Metaspace::reserve_alignment());
1175     if (base != NULL) {
1176       if (CompressedKlassPointers::is_valid_base(base)) {
1177         rs = ReservedSpace(size, Metaspace::reserve_alignment(),
1178                            false /* large */, (char*)base);
1179       }
1180     }
1181 
1182     // ...failing that, reserve anywhere, but let platform do optimized placement:
1183     if (!rs.is_reserved()) {
1184       rs = Metaspace::reserve_address_space_for_compressed_classes(size);
1185     }
1186 
1187     // ...failing that, give up.
1188     if (!rs.is_reserved()) {
1189       vm_exit_during_initialization(
1190           err_msg("Could not allocate compressed class space: " SIZE_FORMAT " bytes",
1191                    compressed_class_space_size()));
1192     }
1193 
1194     // Initialize space
1195     Metaspace::initialize_class_space(rs);
1196 
1197     // Set up compressed class pointer encoding.
1198     CompressedKlassPointers::initialize((address)rs.base(), rs.size());
1199   }
1200 
1201 #endif
1202 
1203   // Initialize these before initializing the VirtualSpaceList
1204   _first_chunk_word_size = InitialBootClassLoaderMetaspaceSize / BytesPerWord;
1205   _first_chunk_word_size = align_word_size_up(_first_chunk_word_size);
1206   // Make the first class chunk bigger than a medium chunk so it's not put
1207   // on the medium chunk list.   The next chunk will be small and progress
1208   // from there.  This size calculated by -version.
1209   _first_class_chunk_word_size = MIN2((size_t)MediumChunk*6,
1210                                      (CompressedClassSpaceSize/BytesPerWord)*2);
1211   _first_class_chunk_word_size = align_word_size_up(_first_class_chunk_word_size);
1212   // Arbitrarily set the initial virtual space to a multiple
1213   // of the boot class loader size.
1214   size_t word_size = VIRTUALSPACEMULTIPLIER * _first_chunk_word_size;
1215   word_size = align_up(word_size, Metaspace::reserve_alignment_words());
1216 
1217   // Initialize the list of virtual spaces.
1218   _space_list = new VirtualSpaceList(word_size);
1219   _chunk_manager_metadata = new ChunkManager(false/*metaspace*/);
1220 
1221   if (!_space_list->initialization_succeeded()) {
1222     vm_exit_during_initialization("Unable to setup metadata virtual space list.", NULL);
1223   }
1224 
1225   _tracer = new MetaspaceTracer();
1226 
1227   _initialized = true;












1228 
1229 #ifdef _LP64
1230   if (UseCompressedClassPointers) {
1231     // Note: "cds" would be a better fit but keep this for backward compatibility.
1232     LogTarget(Info, gc, metaspace) lt;
1233     if (lt.is_enabled()) {
1234       ResourceMark rm;
1235       LogStream ls(lt);
1236       CDS_ONLY(MetaspaceShared::print_on(&ls);)
1237       Metaspace::print_compressed_class_space(&ls);
1238       CompressedKlassPointers::print_mode(&ls);
1239     }
1240   }
1241 #endif
1242 
1243 }
1244 
1245 void Metaspace::post_initialize() {
1246   MetaspaceGC::post_initialize();
1247 }
1248 
1249 void Metaspace::verify_global_initialization() {
1250   assert(space_list() != NULL, "Metadata VirtualSpaceList has not been initialized");
1251   assert(chunk_manager_metadata() != NULL, "Metadata ChunkManager has not been initialized");
1252 
1253   if (using_class_space()) {
1254     assert(class_space_list() != NULL, "Class VirtualSpaceList has not been initialized");
1255     assert(chunk_manager_class() != NULL, "Class ChunkManager has not been initialized");
1256   }
1257 }
1258 
1259 size_t Metaspace::align_word_size_up(size_t word_size) {
1260   size_t byte_size = word_size * wordSize;
1261   return ReservedSpace::allocation_align_size_up(byte_size) / wordSize;
1262 }
1263 
1264 MetaWord* Metaspace::allocate(ClassLoaderData* loader_data, size_t word_size,
1265                               MetaspaceObj::Type type, TRAPS) {


1266   assert(!_frozen, "sanity");
1267   assert(!(DumpSharedSpaces && THREAD->is_VM_thread()), "sanity");
1268 
1269   if (HAS_PENDING_EXCEPTION) {
1270     assert(false, "Should not allocate with exception pending");
1271     return NULL;  // caller does a CHECK_NULL too
1272   }
1273 
1274   assert(loader_data != NULL, "Should never pass around a NULL loader_data. "
1275         "ClassLoaderData::the_null_class_loader_data() should have been used.");
1276 
1277   MetadataType mdtype = (type == MetaspaceObj::ClassType) ? ClassType : NonClassType;
1278 
1279   // Try to allocate metadata.
1280   MetaWord* result = loader_data->metaspace_non_null()->allocate(word_size, mdtype);
1281 
1282   if (result == NULL) {
1283     tracer()->report_metaspace_allocation_failure(loader_data, word_size, type, mdtype);
1284 
1285     // Allocation failed.
1286     if (is_init_completed()) {
1287       // Only start a GC if the bootstrapping has completed.
1288       // Try to clean out some heap memory and retry. This can prevent premature
1289       // expansion of the metaspace.
1290       result = Universe::heap()->satisfy_failed_metadata_allocation(loader_data, word_size, mdtype);
1291     }
1292   }
1293 
1294   if (result == NULL) {
1295     if (DumpSharedSpaces) {
1296       // CDS dumping keeps loading classes, so if we hit an OOM we probably will keep hitting OOM.
1297       // We should abort to avoid generating a potentially bad archive.
1298       vm_exit_during_cds_dumping(err_msg("Failed allocating metaspace object type %s of size " SIZE_FORMAT ". CDS dump aborted.",
1299           MetaspaceObj::type_name(type), word_size * BytesPerWord),
1300         err_msg("Please increase MaxMetaspaceSize (currently " SIZE_FORMAT " bytes).", MaxMetaspaceSize));
1301     }
1302     report_metadata_oome(loader_data, word_size, type, mdtype, THREAD);
1303     assert(HAS_PENDING_EXCEPTION, "sanity");
1304     return NULL;
1305   }
1306 
1307   // Zero initialize.
1308   Copy::fill_to_words((HeapWord*)result, word_size, 0);
1309 


1310   return result;
1311 }
1312 
1313 void Metaspace::report_metadata_oome(ClassLoaderData* loader_data, size_t word_size, MetaspaceObj::Type type, MetadataType mdtype, TRAPS) {
1314   tracer()->report_metadata_oom(loader_data, word_size, type, mdtype);
1315 
1316   // If result is still null, we are out of memory.
1317   Log(gc, metaspace, freelist, oom) log;
1318   if (log.is_info()) {
1319     log.info("Metaspace (%s) allocation failed for size " SIZE_FORMAT,
1320              is_class_space_allocation(mdtype) ? "class" : "data", word_size);
1321     ResourceMark rm;
1322     if (log.is_debug()) {
1323       if (loader_data->metaspace_or_null() != NULL) {
1324         LogStream ls(log.debug());
1325         loader_data->print_value_on(&ls);
1326       }
1327     }
1328     LogStream ls(log.info());
1329     // In case of an OOM, log out a short but still useful report.
1330     MetaspaceUtils::print_basic_report(&ls, 0);
1331   }
1332 

1333   bool out_of_compressed_class_space = false;
1334   if (is_class_space_allocation(mdtype)) {
1335     ClassLoaderMetaspace* metaspace = loader_data->metaspace_non_null();
1336     out_of_compressed_class_space =
1337       MetaspaceUtils::committed_bytes(Metaspace::ClassType) +
1338       (metaspace->class_chunk_size(word_size) * BytesPerWord) >




1339       CompressedClassSpaceSize;
1340   }
1341 
1342   // -XX:+HeapDumpOnOutOfMemoryError and -XX:OnOutOfMemoryError support
1343   const char* space_string = out_of_compressed_class_space ?
1344     "Compressed class space" : "Metaspace";
1345 
1346   report_java_out_of_memory(space_string);
1347 
1348   if (JvmtiExport::should_post_resource_exhausted()) {
1349     JvmtiExport::post_resource_exhausted(
1350         JVMTI_RESOURCE_EXHAUSTED_OOM_ERROR,
1351         space_string);
1352   }
1353 
1354   if (!is_init_completed()) {
1355     vm_exit_during_initialization("OutOfMemoryError", space_string);
1356   }
1357 
1358   if (out_of_compressed_class_space) {
1359     THROW_OOP(Universe::out_of_memory_error_class_metaspace());
1360   } else {
1361     THROW_OOP(Universe::out_of_memory_error_metaspace());
1362   }
1363 }
1364 
1365 const char* Metaspace::metadata_type_name(Metaspace::MetadataType mdtype) {
1366   switch (mdtype) {
1367     case Metaspace::ClassType: return "Class";
1368     case Metaspace::NonClassType: return "Metadata";
1369     default:
1370       assert(false, "Got bad mdtype: %d", (int) mdtype);
1371       return NULL;
1372   }
1373 }
1374 
1375 void Metaspace::purge(MetadataType mdtype) {
1376   get_space_list(mdtype)->purge(get_chunk_manager(mdtype));
1377 }
1378 
1379 void Metaspace::purge() {
1380   MutexLocker cl(MetaspaceExpand_lock,
1381                  Mutex::_no_safepoint_check_flag);
1382   purge(NonClassType);

1383   if (using_class_space()) {
1384     purge(ClassType);



1385   }
1386 }
1387 
1388 bool Metaspace::contains(const void* ptr) {
1389   if (MetaspaceShared::is_in_shared_metaspace(ptr)) {
1390     return true;
1391   }
1392   return contains_non_shared(ptr);
1393 }
1394 
1395 bool Metaspace::contains_non_shared(const void* ptr) {
1396   if (using_class_space() && get_space_list(ClassType)->contains(ptr)) {
1397      return true;
1398   }
1399 
1400   return get_space_list(NonClassType)->contains(ptr);
1401 }
1402 
1403 // ClassLoaderMetaspace
1404 
1405 ClassLoaderMetaspace::ClassLoaderMetaspace(Mutex* lock, Metaspace::MetaspaceType type)
1406   : _space_type(type)
1407   , _lock(lock)
1408   , _vsm(NULL)
1409   , _class_vsm(NULL)
1410 {
1411   initialize(lock, type);
1412 }
1413 
1414 ClassLoaderMetaspace::~ClassLoaderMetaspace() {
1415   Metaspace::assert_not_frozen();
1416   DEBUG_ONLY(Atomic::inc(&g_internal_statistics.num_metaspace_deaths));
1417   delete _vsm;
1418   if (Metaspace::using_class_space()) {
1419     delete _class_vsm;
1420   }
1421 }
1422 
1423 void ClassLoaderMetaspace::initialize_first_chunk(Metaspace::MetaspaceType type, Metaspace::MetadataType mdtype) {
1424   Metachunk* chunk = get_initialization_chunk(type, mdtype);
1425   if (chunk != NULL) {
1426     // Add to this manager's list of chunks in use and make it the current_chunk().
1427     get_space_manager(mdtype)->add_chunk(chunk, true);
1428   }
1429 }
1430 
1431 Metachunk* ClassLoaderMetaspace::get_initialization_chunk(Metaspace::MetaspaceType type, Metaspace::MetadataType mdtype) {
1432   size_t chunk_word_size = get_space_manager(mdtype)->get_initial_chunk_size(type);
1433 
1434   // Get a chunk from the chunk freelist
1435   Metachunk* chunk = Metaspace::get_chunk_manager(mdtype)->chunk_freelist_allocate(chunk_word_size);
1436 
1437   if (chunk == NULL) {
1438     chunk = Metaspace::get_space_list(mdtype)->get_new_chunk(chunk_word_size,
1439                                                   get_space_manager(mdtype)->medium_chunk_bunch());
1440   }
1441 
1442   return chunk;
1443 }
1444 
1445 void ClassLoaderMetaspace::initialize(Mutex* lock, Metaspace::MetaspaceType type) {
1446   Metaspace::verify_global_initialization();
1447 
1448   DEBUG_ONLY(Atomic::inc(&g_internal_statistics.num_metaspace_births));
1449 
1450   // Allocate SpaceManager for metadata objects.
1451   _vsm = new SpaceManager(Metaspace::NonClassType, type, lock);
1452 
1453   if (Metaspace::using_class_space()) {
1454     // Allocate SpaceManager for classes.
1455     _class_vsm = new SpaceManager(Metaspace::ClassType, type, lock);
1456   }
1457 
1458   MutexLocker cl(MetaspaceExpand_lock, Mutex::_no_safepoint_check_flag);
1459 
1460   // Allocate chunk for metadata objects
1461   initialize_first_chunk(type, Metaspace::NonClassType);
1462 
1463   // Allocate chunk for class metadata objects
1464   if (Metaspace::using_class_space()) {
1465     initialize_first_chunk(type, Metaspace::ClassType);
1466   }
1467 }
1468 
1469 MetaWord* ClassLoaderMetaspace::allocate(size_t word_size, Metaspace::MetadataType mdtype) {
1470   Metaspace::assert_not_frozen();
1471 
1472   DEBUG_ONLY(Atomic::inc(&g_internal_statistics.num_allocs));
1473 
1474   // Don't use class_vsm() unless UseCompressedClassPointers is true.
1475   if (Metaspace::is_class_space_allocation(mdtype)) {
1476     return  class_vsm()->allocate(word_size);
1477   } else {
1478     return  vsm()->allocate(word_size);
1479   }
1480 }
1481 
1482 MetaWord* ClassLoaderMetaspace::expand_and_allocate(size_t word_size, Metaspace::MetadataType mdtype) {
1483   Metaspace::assert_not_frozen();
1484   size_t delta_bytes = MetaspaceGC::delta_capacity_until_GC(word_size * BytesPerWord);
1485   assert(delta_bytes > 0, "Must be");
1486 
1487   size_t before = 0;
1488   size_t after = 0;
1489   bool can_retry = true;
1490   MetaWord* res;
1491   bool incremented;
1492 
1493   // Each thread increments the HWM at most once. Even if the thread fails to increment
1494   // the HWM, an allocation is still attempted. This is because another thread must then
1495   // have incremented the HWM and therefore the allocation might still succeed.
1496   do {
1497     incremented = MetaspaceGC::inc_capacity_until_GC(delta_bytes, &after, &before, &can_retry);
1498     res = allocate(word_size, mdtype);
1499   } while (!incremented && res == NULL && can_retry);
1500 
1501   if (incremented) {
1502     Metaspace::tracer()->report_gc_threshold(before, after,
1503                                   MetaspaceGCThresholdUpdater::ExpandAndAllocate);
1504     log_trace(gc, metaspace)("Increase capacity to GC from " SIZE_FORMAT " to " SIZE_FORMAT, before, after);
1505   }
1506 
1507   return res;
1508 }
1509 
1510 size_t ClassLoaderMetaspace::allocated_blocks_bytes() const {
1511   return (vsm()->used_words() +
1512       (Metaspace::using_class_space() ? class_vsm()->used_words() : 0)) * BytesPerWord;
1513 }
1514 
1515 size_t ClassLoaderMetaspace::allocated_chunks_bytes() const {
1516   return (vsm()->capacity_words() +
1517       (Metaspace::using_class_space() ? class_vsm()->capacity_words() : 0)) * BytesPerWord;
1518 }
1519 
1520 void ClassLoaderMetaspace::deallocate(MetaWord* ptr, size_t word_size, bool is_class) {
1521   Metaspace::assert_not_frozen();
1522   assert(!SafepointSynchronize::is_at_safepoint()
1523          || Thread::current()->is_VM_thread(), "should be the VM thread");
1524 
1525   DEBUG_ONLY(Atomic::inc(&g_internal_statistics.num_external_deallocs));
1526 
1527   MutexLocker ml(vsm()->lock(), Mutex::_no_safepoint_check_flag);
1528 
1529   if (is_class && Metaspace::using_class_space()) {
1530     class_vsm()->deallocate(ptr, word_size);
1531   } else {
1532     vsm()->deallocate(ptr, word_size);
1533   }
1534 }
1535 
1536 size_t ClassLoaderMetaspace::class_chunk_size(size_t word_size) {
1537   assert(Metaspace::using_class_space(), "Has to use class space");
1538   return class_vsm()->calc_chunk_size(word_size);
1539 }
1540 
1541 void ClassLoaderMetaspace::print_on(outputStream* out) const {
1542   // Print both class virtual space counts and metaspace.
1543   if (Verbose) {
1544     vsm()->print_on(out);
1545     if (Metaspace::using_class_space()) {
1546       class_vsm()->print_on(out);
1547     }
1548   }
1549 }
1550 
1551 void ClassLoaderMetaspace::verify() {
1552   vsm()->verify();
1553   if (Metaspace::using_class_space()) {
1554     class_vsm()->verify();
1555   }
1556 }
1557 
1558 void ClassLoaderMetaspace::add_to_statistics_locked(ClassLoaderMetaspaceStatistics* out) const {
1559   assert_lock_strong(lock());
1560   vsm()->add_to_statistics_locked(&out->nonclass_sm_stats());
1561   if (Metaspace::using_class_space()) {
1562     class_vsm()->add_to_statistics_locked(&out->class_sm_stats());
1563   }
1564 }
1565 
1566 void ClassLoaderMetaspace::add_to_statistics(ClassLoaderMetaspaceStatistics* out) const {
1567   MutexLocker cl(lock(), Mutex::_no_safepoint_check_flag);
1568   add_to_statistics_locked(out);
1569 }
1570 
1571 /////////////// Unit tests ///////////////
1572 
1573 struct chunkmanager_statistics_t {
1574   int num_specialized_chunks;
1575   int num_small_chunks;
1576   int num_medium_chunks;
1577   int num_humongous_chunks;
1578 };
1579 
1580 extern void test_metaspace_retrieve_chunkmanager_statistics(Metaspace::MetadataType mdType, chunkmanager_statistics_t* out) {
1581   ChunkManager* const chunk_manager = Metaspace::get_chunk_manager(mdType);
1582   ChunkManagerStatistics stat;
1583   chunk_manager->collect_statistics(&stat);
1584   out->num_specialized_chunks = (int)stat.chunk_stats(SpecializedIndex).num();
1585   out->num_small_chunks = (int)stat.chunk_stats(SmallIndex).num();
1586   out->num_medium_chunks = (int)stat.chunk_stats(MediumIndex).num();
1587   out->num_humongous_chunks = (int)stat.chunk_stats(HumongousIndex).num();
1588 }
1589 
1590 struct chunk_geometry_t {
1591   size_t specialized_chunk_word_size;
1592   size_t small_chunk_word_size;
1593   size_t medium_chunk_word_size;
1594 };
1595 
1596 extern void test_metaspace_retrieve_chunk_geometry(Metaspace::MetadataType mdType, chunk_geometry_t* out) {
1597   if (mdType == Metaspace::NonClassType) {
1598     out->specialized_chunk_word_size = SpecializedChunk;
1599     out->small_chunk_word_size = SmallChunk;
1600     out->medium_chunk_word_size = MediumChunk;
1601   } else {
1602     out->specialized_chunk_word_size = ClassSpecializedChunk;
1603     out->small_chunk_word_size = ClassSmallChunk;
1604     out->medium_chunk_word_size = ClassMediumChunk;
1605   }
1606 }


   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 
  27 #include "aot/aotLoader.hpp"

  28 #include "gc/shared/collectedHeap.hpp"
  29 #include "logging/log.hpp"
  30 #include "logging/logStream.hpp"
  31 #include "memory/filemap.hpp"
  32 #include "memory/metaspace.hpp"
  33 #include "memory/metaspaceShared.hpp"
  34 #include "memory/metaspaceTracer.hpp"
  35 #include "memory/metaspace/chunkHeaderPool.hpp"
  36 #include "memory/metaspace/chunkManager.hpp"
  37 #include "memory/metaspace/commitLimiter.hpp"
  38 #include "memory/metaspace/metaspaceCommon.hpp"
  39 #include "memory/metaspace/metaspaceContext.hpp"
  40 #include "memory/metaspace/metaspaceEnums.hpp"
  41 #include "memory/metaspace/metaspaceReport.hpp"
  42 #include "memory/metaspace/metaspaceSizesSnapshot.hpp"
  43 #include "memory/metaspace/runningCounters.hpp"
  44 #include "memory/metaspace/settings.hpp"
  45 #include "memory/metaspace/virtualSpaceList.hpp"


  46 #include "memory/universe.hpp"
  47 #include "oops/compressedOops.hpp"
  48 #include "runtime/atomic.hpp"
  49 #include "runtime/init.hpp"
  50 #include "runtime/java.hpp"
  51 #include "services/memTracker.hpp"
  52 #include "utilities/copy.hpp"
  53 #include "utilities/debug.hpp"
  54 #include "utilities/formatBuffer.hpp"
  55 #include "utilities/globalDefinitions.hpp"

  56 
  57 
  58 using metaspace::ChunkManager;
  59 using metaspace::CommitLimiter;
  60 using metaspace::MetaspaceContext;
  61 using metaspace::MetaspaceReporter;
  62 using metaspace::RunningCounters;
  63 using metaspace::VirtualSpaceList;
  64 

  65 
  66 size_t MetaspaceUtils::used_words() {
  67   return RunningCounters::used_words();
  68 }
  69 
  70 size_t MetaspaceUtils::used_words(Metaspace::MetadataType mdtype) {
  71   return metaspace::is_class(mdtype) ? RunningCounters::used_words_class() : RunningCounters::used_words_nonclass();
  72 }
  73 
  74 size_t MetaspaceUtils::reserved_words() {
  75   return RunningCounters::reserved_words();
  76 }
  77 
  78 size_t MetaspaceUtils::reserved_words(Metaspace::MetadataType mdtype) {
  79   return metaspace::is_class(mdtype) ? RunningCounters::reserved_words_class() : RunningCounters::reserved_words_nonclass();
  80 }
  81 
  82 size_t MetaspaceUtils::committed_words() {
  83   return RunningCounters::committed_words();
  84 }
  85 
  86 size_t MetaspaceUtils::committed_words(Metaspace::MetadataType mdtype) {
  87   return metaspace::is_class(mdtype) ? RunningCounters::committed_words_class() : RunningCounters::committed_words_nonclass();
  88 }
  89 
  90 
  91 
  92 void MetaspaceUtils::print_metaspace_change(const metaspace::MetaspaceSizesSnapshot& pre_meta_values) {
  93   const metaspace::MetaspaceSizesSnapshot meta_values;
  94 
  95   // We print used and committed since these are the most useful at-a-glance vitals for Metaspace:
  96   // - used tells you how much memory is actually used for metadata
  97   // - committed tells you how much memory is committed for the purpose of metadata
  98   // The difference between those two would be waste, which can have various forms (freelists,
  99   //   unused parts of committed chunks etc)
 100   //
 101   // Left out is reserved, since this is not as exciting as the first two values: for class space,
 102   // it is a constant (to uninformed users, often confusingly large). For non-class space, it would
 103   // be interesting since free chunks can be uncommitted, but for now it is left out.
 104 
 105   if (Metaspace::using_class_space()) {
 106     log_info(gc, metaspace)(HEAP_CHANGE_FORMAT" "
 107                             HEAP_CHANGE_FORMAT" "
 108                             HEAP_CHANGE_FORMAT,
 109                             HEAP_CHANGE_FORMAT_ARGS("Metaspace",
 110                                                     pre_meta_values.used(),
 111                                                     pre_meta_values.committed(),
 112                                                     meta_values.used(),
 113                                                     meta_values.committed()),
 114                             HEAP_CHANGE_FORMAT_ARGS("NonClass",
 115                                                     pre_meta_values.non_class_used(),
 116                                                     pre_meta_values.non_class_committed(),
 117                                                     meta_values.non_class_used(),
 118                                                     meta_values.non_class_committed()),
 119                             HEAP_CHANGE_FORMAT_ARGS("Class",
 120                                                     pre_meta_values.class_used(),
 121                                                     pre_meta_values.class_committed(),
 122                                                     meta_values.class_used(),
 123                                                     meta_values.class_committed()));
 124   } else {
 125     log_info(gc, metaspace)(HEAP_CHANGE_FORMAT,
 126                             HEAP_CHANGE_FORMAT_ARGS("Metaspace",
 127                                                     pre_meta_values.used(),
 128                                                     pre_meta_values.committed(),
 129                                                     meta_values.used(),
 130                                                     meta_values.committed()));
 131   }

 132 }
 133 
 134 // This will print out a basic metaspace usage report but
 135 // unlike print_report() is guaranteed not to lock or to walk the CLDG.
 136 void MetaspaceUtils::print_basic_report(outputStream* out, size_t scale) {
 137   MetaspaceReporter::print_basic_report(out, scale);
 138 }
 139 
 140 // Prints a report about the current metaspace state.
 141 // Optional parts can be enabled via flags.
 142 // Function will walk the CLDG and will lock the expand lock; if that is not
 143 // convenient, use print_basic_report() instead.
 144 void MetaspaceUtils::print_report(outputStream* out, size_t scale) {
 145   const int flags =
 146       MetaspaceReporter::rf_show_loaders |
 147       MetaspaceReporter::rf_break_down_by_chunktype |
 148       MetaspaceReporter::rf_show_classes;
 149   MetaspaceReporter::print_report(out, scale, flags);
 150 }
 151 
 152 void MetaspaceUtils::print_on(outputStream* out) {
 153 
 154   // Used from all GCs. It first prints out totals, then, separately, the class space portion.
 155 
 156   out->print_cr(" Metaspace       "
 157                 "used "      SIZE_FORMAT "K, "
 158                 "committed " SIZE_FORMAT "K, "
 159                 "reserved "  SIZE_FORMAT "K",
 160                 used_bytes()/K,
 161                 committed_bytes()/K,
 162                 reserved_bytes()/K);
 163 
 164   if (Metaspace::using_class_space()) {
 165     const Metaspace::MetadataType ct = Metaspace::ClassType;
 166     out->print_cr("  class space    "
 167                   "used "      SIZE_FORMAT "K, "
 168                   "committed " SIZE_FORMAT "K, "
 169                   "reserved "  SIZE_FORMAT "K",
 170                   used_bytes(ct)/K,
 171                   committed_bytes(ct)/K,
 172                   reserved_bytes(ct)/K);
 173   }
 174 }
 175 
 176 #ifdef ASSERT
 177 void MetaspaceUtils::verify(bool slow) {
 178   if (Metaspace::initialized()) {
 179 
 180     // Verify non-class chunkmanager...
 181     ChunkManager* cm = ChunkManager::chunkmanager_nonclass();
 182     cm->verify(slow);
 183 
 184     // ... and space list.
 185     VirtualSpaceList* vsl = VirtualSpaceList::vslist_nonclass();
 186     vsl->verify(slow);
 187 
 188     if (Metaspace::using_class_space()) {
 189       // If we use compressed class pointers, verify class chunkmanager...
 190       cm = ChunkManager::chunkmanager_class();
 191       assert(cm != NULL, "Sanity");
 192       cm->verify(slow);
 193 
 194       // ... and class spacelist.
 195       VirtualSpaceList* vsl = VirtualSpaceList::vslist_nonclass();
 196       assert(vsl != NULL, "Sanity");
 197       vsl->verify(slow);
 198     }
 199 
 200   }
 201 }
 202 #endif
 203 
 204 ////////////////////////////////7
 205 // MetaspaceGC methods
 206 
 207 volatile size_t MetaspaceGC::_capacity_until_GC = 0;
 208 uint MetaspaceGC::_shrink_factor = 0;
 209 
 210 // VM_CollectForMetadataAllocation is the vm operation used to GC.
 211 // Within the VM operation after the GC the attempt to allocate the metadata
 212 // should succeed.  If the GC did not free enough space for the metaspace
 213 // allocation, the HWM is increased so that another virtualspace will be
 214 // allocated for the metadata.  With perm gen the increase in the perm
 215 // gen had bounds, MinMetaspaceExpansion and MaxMetaspaceExpansion.  The
 216 // metaspace policy uses those as the small and large steps for the HWM.
 217 //
 218 // After the GC the compute_new_size() for MetaspaceGC is called to
 219 // resize the capacity of the metaspaces.  The current implementation
 220 // is based on the flags MinMetaspaceFreeRatio and MaxMetaspaceFreeRatio used
 221 // to resize the Java heap by some GC's.  New flags can be implemented
 222 // if really needed.  MinMetaspaceFreeRatio is used to calculate how much
 223 // free space is desirable in the metaspace capacity to decide how much
 224 // to increase the HWM.  MaxMetaspaceFreeRatio is used to decide how much
 225 // free space is desirable in the metaspace capacity before decreasing
 226 // the HWM.
 227 
 228 // Calculate the amount to increase the high water mark (HWM).
 229 // Increase by a minimum amount (MinMetaspaceExpansion) so that


 461       } else {
 462         _shrink_factor = MIN2(current_shrink_factor * 4, (uint) 100);
 463       }
 464       log_trace(gc, metaspace)("    shrinking:  initThreshold: %.1fK  maximum_desired_capacity: %.1fK",
 465                                MetaspaceSize / (double) K, maximum_desired_capacity / (double) K);
 466       log_trace(gc, metaspace)("    shrink_bytes: %.1fK  current_shrink_factor: %d  new shrink factor: %d  MinMetaspaceExpansion: %.1fK",
 467                                shrink_bytes / (double) K, current_shrink_factor, _shrink_factor, MinMetaspaceExpansion / (double) K);
 468     }
 469   }
 470 
 471   // Don't shrink unless it's significant
 472   if (shrink_bytes >= MinMetaspaceExpansion &&
 473       ((capacity_until_GC - shrink_bytes) >= MetaspaceSize)) {
 474     size_t new_capacity_until_GC = MetaspaceGC::dec_capacity_until_GC(shrink_bytes);
 475     Metaspace::tracer()->report_gc_threshold(capacity_until_GC,
 476                                              new_capacity_until_GC,
 477                                              MetaspaceGCThresholdUpdater::ComputeNewSize);
 478   }
 479 }
 480 























 481 



 482 
 483 //////  Metaspace methods /////



 484 
 485 const MetaspaceTracer* Metaspace::_tracer = NULL;
















































































































































































































































































































































































































































































































































 486 
 487 DEBUG_ONLY(bool Metaspace::_frozen = false;)







 488 
 489 bool Metaspace::initialized() {
 490   return metaspace::MetaspaceContext::context_nonclass() != NULL &&
 491       (using_class_space() ? metaspace::MetaspaceContext::context_class() != NULL : true);


























 492 }
 493 


















 494 #ifdef _LP64
 495 
 496 void Metaspace::print_compressed_class_space(outputStream* st) {
 497   if (VirtualSpaceList::vslist_class() != NULL) {
 498     MetaWord* base = VirtualSpaceList::vslist_class()->base_of_first_node();
 499     size_t size = VirtualSpaceList::vslist_class()->word_size_of_first_node();
 500     MetaWord* top = base + size;
 501     st->print("Compressed class space mapped at: " PTR_FORMAT "-" PTR_FORMAT ", reserved size: " SIZE_FORMAT,
 502                p2i(base), p2i(top), (top - base) * BytesPerWord);
 503     st->cr();
 504   }
 505 }
 506 
 507 // Given a prereserved space, use that to set up the compressed class space list.
 508 void Metaspace::initialize_class_space(ReservedSpace rs) {
 509   assert(rs.size() >= CompressedClassSpaceSize,
 510          SIZE_FORMAT " != " SIZE_FORMAT, rs.size(), CompressedClassSpaceSize);
 511   assert(using_class_space(), "Must be using class space");

 512 
 513   assert(rs.size() == CompressedClassSpaceSize, SIZE_FORMAT " != " SIZE_FORMAT,
 514          rs.size(), CompressedClassSpaceSize);
 515   assert(is_aligned(rs.base(), Metaspace::reserve_alignment()) &&
 516          is_aligned(rs.size(), Metaspace::reserve_alignment()),
 517          "wrong alignment");
 518 
 519   MetaspaceContext::initialize_class_space_context(rs);

 520 
 521   // This does currently not work because rs may be the result of a split
 522   // operation and NMT seems not to be able to handle splits.
 523   // Will be fixed with JDK-8243535.
 524   // MemTracker::record_virtual_memory_type((address)rs.base(), mtClass);
 525 
 526 }


 527 
 528 // Returns true if class space has been setup (initialize_class_space).
 529 bool Metaspace::class_space_is_initialized() {
 530   return MetaspaceContext::context_class() != NULL;
 531 }
 532 
 533 // Reserve a range of memory at an address suitable for en/decoding narrow
 534 // Klass pointers (see: CompressedClassPointers::is_valid_base()).
 535 // The returned address shall both be suitable as a compressed class pointers
 536 //  base, and aligned to Metaspace::reserve_alignment (which is equal to or a
 537 //  multiple of allocation granularity).
 538 // On error, returns an unreserved space.
 539 ReservedSpace Metaspace::reserve_address_space_for_compressed_classes(size_t size) {
 540 
 541 #ifdef AARCH64
 542   const size_t alignment = Metaspace::reserve_alignment();
 543 
 544   // AArch64: Try to align metaspace so that we can decode a compressed
 545   // klass with a single MOVK instruction. We can do this iff the
 546   // compressed class base is a multiple of 4G.
 547   // Additionally, above 32G, ensure the lower LogKlassAlignmentInBytes bits
 548   // of the upper 32-bits of the address are zero so we can handle a shift
 549   // when decoding.
 550 


 567       if (rs.is_reserved()) {
 568         assert(a == (address)rs.base(), "Sanity");
 569         return rs;
 570       }
 571       a +=  search_ranges[i].increment;
 572     }
 573   }
 574 
 575   // Note: on AARCH64, if the code above does not find any good placement, we
 576   // have no recourse. We return an empty space and the VM will exit.
 577   return ReservedSpace();
 578 #else
 579   // Default implementation: Just reserve anywhere.
 580   return ReservedSpace(size, Metaspace::reserve_alignment(), false, (char*)NULL);
 581 #endif // AARCH64
 582 }
 583 
 584 #endif // _LP64
 585 
 586 
 587 size_t Metaspace::reserve_alignment_words() {
 588   return metaspace::Settings::virtual_space_node_reserve_alignment_words();
 589 }


 590 
 591 size_t Metaspace::commit_alignment_words() {
 592   return metaspace::Settings::commit_granule_words();
 593 }

 594 
 595 void Metaspace::ergo_initialize() {

 596 
 597   // Must happen before using any setting from Settings::---
 598   metaspace::Settings::ergo_initialize();




 599 
 600   // MaxMetaspaceSize and CompressedClassSpaceSize:



 601   //
 602   // MaxMetaspaceSize is the maximum size, in bytes, of memory we are allowed
 603   //  to commit for the Metaspace.
 604   //  It is just a number; a limit we compare against before committing. It
 605   //  does not have to be aligned to anything.
 606   //  It gets used as compare value in class CommitLimiter.
 607   //  It is set to max_uintx in globals.hpp by default, so by default it does
 608   //  not limit anything.
 609   //
 610   // CompressedClassSpaceSize is the size, in bytes, of the address range we
 611   //  pre-reserve for the compressed class space (if we use class space).
 612   //  This size has to be aligned to the metaspace reserve alignment (to the
 613   //  size of a root chunk). It gets aligned up from whatever value the caller
 614   //  gave us to the next multiple of root chunk size.
 615   //
 616   // Note: Strictly speaking MaxMetaspaceSize and CompressedClassSpaceSize have
 617   //  very little to do with each other. The notion often encountered:
 618   //  MaxMetaspaceSize = CompressedClassSpaceSize + <non-class metadata size>
 619   //  is subtly wrong: MaxMetaspaceSize can besmaller than CompressedClassSpaceSize,
 620   //  in which case we just would not be able to fully commit the class space range.
 621   //
 622   // We still adjust CompressedClassSpaceSize to reasonable limits, mainly to
 623   //  save on reserved space, and to make ergnonomics less confusing.
 624 
 625   // (aligned just for cleanliness:)
 626   MaxMetaspaceSize = MAX2(align_down(MaxMetaspaceSize, commit_alignment()), commit_alignment());
 627 
 628   if (UseCompressedClassPointers) {
 629     // Let CCS size not be larger than 80% of MaxMetaspaceSize. Note that is
 630     // grossly over-dimensioned for most usage scenarios; typical ratio of
 631     // class space : non class space usage is about 1:6. With many small classes,
 632     // it can get as low as 1:2. It is not a big deal though since ccs is only
 633     // reserved and will be committed on demand only.
 634     size_t max_ccs_size = MaxMetaspaceSize * 0.8;
 635     size_t adjusted_ccs_size = MIN2(CompressedClassSpaceSize, max_ccs_size);
 636 
 637     // CCS must be aligned to root chunk size, and be at least the size of one
 638     //  root chunk.
 639     adjusted_ccs_size = align_up(adjusted_ccs_size, reserve_alignment());
 640     adjusted_ccs_size = MAX2(adjusted_ccs_size, reserve_alignment());
 641 
 642     // Note: re-adjusting may have us left with a CompressedClassSpaceSize
 643     //  larger than MaxMetaspaceSize for very small values of MaxMetaspaceSize.
 644     //  Lets just live with that, its not a big deal.
 645 
 646     if (adjusted_ccs_size != CompressedClassSpaceSize) {
 647       FLAG_SET_ERGO(CompressedClassSpaceSize, adjusted_ccs_size);
 648       log_info(metaspace)("Setting CompressedClassSpaceSize to " SIZE_FORMAT ".",
 649                           CompressedClassSpaceSize);
 650     }
 651   }
 652 
 653   // Set MetaspaceSize, MinMetaspaceExpansion and MaxMetaspaceExpansion
 654   if (MetaspaceSize > MaxMetaspaceSize) {
 655     MetaspaceSize = MaxMetaspaceSize;
 656   }
 657 
 658   MetaspaceSize = align_down_bounded(MetaspaceSize, commit_alignment());
 659 
 660   assert(MetaspaceSize <= MaxMetaspaceSize, "MetaspaceSize should be limited by MaxMetaspaceSize");
 661 
 662   MinMetaspaceExpansion = align_down_bounded(MinMetaspaceExpansion, commit_alignment());
 663   MaxMetaspaceExpansion = align_down_bounded(MaxMetaspaceExpansion, commit_alignment());



















 664 

 665 }
 666 
 667 void Metaspace::global_initialize() {
 668   MetaspaceGC::initialize(); // <- since we do not prealloc init chunks anymore is this still needed?
 669 
 670   metaspace::ChunkHeaderPool::initialize();
 671 
 672   // If UseCompressedClassPointers=1, we have two cases:
 673   // a) if CDS is active (either dump time or runtime), it will create the ccs
 674   //    for us, initialize it and set up CompressedKlassPointers encoding.
 675   //    Class space will be reserved above the mapped archives.
 676   // b) if CDS is not active, we will create the ccs on our own. It will be
 677   //    placed above the java heap, since we assume it has been placed in low
 678   //    address regions. We may rethink this (see JDK-8244943). Failing that,
 679   //    it will be placed anywhere.
 680 
 681 #if INCLUDE_CDS
 682   // case (a)
 683   if (DumpSharedSpaces) {
 684     MetaspaceShared::initialize_dumptime_shared_and_meta_spaces();
 685   } else if (UseSharedSpaces) {
 686     // If any of the archived space fails to map, UseSharedSpaces
 687     // is reset to false.
 688     MetaspaceShared::initialize_runtime_shared_and_meta_spaces();
 689   }
 690 


 710     address base = UseCompressedOops ? CompressedOops::end() : (address)HeapBaseMinAddress;
 711     base = align_up(base, Metaspace::reserve_alignment());
 712 
 713     const size_t size = align_up(CompressedClassSpaceSize, Metaspace::reserve_alignment());
 714     if (base != NULL) {
 715       if (CompressedKlassPointers::is_valid_base(base)) {
 716         rs = ReservedSpace(size, Metaspace::reserve_alignment(),
 717                            false /* large */, (char*)base);
 718       }
 719     }
 720 
 721     // ...failing that, reserve anywhere, but let platform do optimized placement:
 722     if (!rs.is_reserved()) {
 723       rs = Metaspace::reserve_address_space_for_compressed_classes(size);
 724     }
 725 
 726     // ...failing that, give up.
 727     if (!rs.is_reserved()) {
 728       vm_exit_during_initialization(
 729           err_msg("Could not allocate compressed class space: " SIZE_FORMAT " bytes",
 730                    CompressedClassSpaceSize));
 731     }
 732 
 733     // Initialize space
 734     Metaspace::initialize_class_space(rs);
 735 
 736     // Set up compressed class pointer encoding.
 737     CompressedKlassPointers::initialize((address)rs.base(), rs.size());
 738   }
 739 
 740 #endif
 741 
 742   // Initialize non-class virtual space list, and its chunk manager:
 743   MetaspaceContext::initialize_nonclass_space_context();



















 744 
 745   _tracer = new MetaspaceTracer();
 746 
 747   // We must prevent the very first address of the ccs from being used to store
 748   // metadata, since that address would translate to a narrow pointer of 0, and the
 749   // VM does not distinguish between "narrow 0 as in NULL" and "narrow 0 as in start
 750   //  of ccs".
 751   // Before Elastic Metaspace that did not happen due to the fact that every Metachunk
 752   // had a header and therefore could not allocate anything at offset 0.
 753 #ifdef _LP64
 754   if (using_class_space()) {
 755     // The simplest way to fix this is to allocate a tiny dummy chunk right at the
 756     // start of ccs and do not use it for anything.
 757     MetaspaceContext::context_class()->cm()->get_chunk(metaspace::chunklevel::HIGHEST_CHUNK_LEVEL);
 758   }
 759 #endif
 760 
 761 #ifdef _LP64
 762   if (UseCompressedClassPointers) {
 763     // Note: "cds" would be a better fit but keep this for backward compatibility.
 764     LogTarget(Info, gc, metaspace) lt;
 765     if (lt.is_enabled()) {
 766       ResourceMark rm;
 767       LogStream ls(lt);
 768       CDS_ONLY(MetaspaceShared::print_on(&ls);)
 769       Metaspace::print_compressed_class_space(&ls);
 770       CompressedKlassPointers::print_mode(&ls);
 771     }
 772   }
 773 #endif
 774 
 775 }
 776 
 777 void Metaspace::post_initialize() {
 778   MetaspaceGC::post_initialize();
 779 }
 780 
 781 size_t Metaspace::max_allocation_word_size() {
 782   const size_t max_overhead_words = metaspace::get_raw_word_size_for_requested_word_size(1);
 783   return metaspace::chunklevel::MAX_CHUNK_WORD_SIZE - max_overhead_words;










 784 }
 785 
 786 MetaWord* Metaspace::allocate(ClassLoaderData* loader_data, size_t word_size,
 787                               MetaspaceObj::Type type, TRAPS) {
 788   assert(word_size <= Metaspace::max_allocation_word_size(),
 789          "allocation size too large (" SIZE_FORMAT ")", word_size);
 790   assert(!_frozen, "sanity");
 791   assert(!(DumpSharedSpaces && THREAD->is_VM_thread()), "sanity");
 792 
 793   if (HAS_PENDING_EXCEPTION) {
 794     assert(false, "Should not allocate with exception pending");
 795     return NULL;  // caller does a CHECK_NULL too
 796   }
 797 
 798   assert(loader_data != NULL, "Should never pass around a NULL loader_data. "
 799         "ClassLoaderData::the_null_class_loader_data() should have been used.");
 800 
 801   Metaspace::MetadataType mdtype = (type == MetaspaceObj::ClassType) ? Metaspace::ClassType : Metaspace::NonClassType;
 802 
 803   // Try to allocate metadata.
 804   MetaWord* result = loader_data->metaspace_non_null()->allocate(word_size, mdtype);
 805 
 806   if (result == NULL) {
 807     tracer()->report_metaspace_allocation_failure(loader_data, word_size, type, mdtype);
 808 
 809     // Allocation failed.
 810     if (is_init_completed()) {
 811       // Only start a GC if the bootstrapping has completed.
 812       // Try to clean out some heap memory and retry. This can prevent premature
 813       // expansion of the metaspace.
 814       result = Universe::heap()->satisfy_failed_metadata_allocation(loader_data, word_size, mdtype);
 815     }
 816   }
 817 
 818   if (result == NULL) {
 819     if (DumpSharedSpaces) {
 820       // CDS dumping keeps loading classes, so if we hit an OOM we probably will keep hitting OOM.
 821       // We should abort to avoid generating a potentially bad archive.
 822       vm_exit_during_cds_dumping(err_msg("Failed allocating metaspace object type %s of size " SIZE_FORMAT ". CDS dump aborted.",
 823           MetaspaceObj::type_name(type), word_size * BytesPerWord),
 824         err_msg("Please increase MaxMetaspaceSize (currently " SIZE_FORMAT " bytes).", MaxMetaspaceSize));
 825     }
 826     report_metadata_oome(loader_data, word_size, type, mdtype, THREAD);
 827     assert(HAS_PENDING_EXCEPTION, "sanity");
 828     return NULL;
 829   }
 830 
 831   // Zero initialize.
 832   Copy::fill_to_words((HeapWord*)result, word_size, 0);
 833 
 834   log_trace(metaspace)("Metaspace::allocate: type %d return " PTR_FORMAT ".", (int)type, p2i(result));
 835 
 836   return result;
 837 }
 838 
 839 void Metaspace::report_metadata_oome(ClassLoaderData* loader_data, size_t word_size, MetaspaceObj::Type type, MetadataType mdtype, TRAPS) {
 840   tracer()->report_metadata_oom(loader_data, word_size, type, mdtype);
 841 
 842   // If result is still null, we are out of memory.
 843   Log(gc, metaspace, freelist, oom) log;
 844   if (log.is_info()) {
 845     log.info("Metaspace (%s) allocation failed for size " SIZE_FORMAT,
 846              metaspace::is_class(mdtype) ? "class" : "data", word_size);
 847     ResourceMark rm;
 848     if (log.is_debug()) {
 849       if (loader_data->metaspace_or_null() != NULL) {
 850         LogStream ls(log.debug());
 851         loader_data->print_value_on(&ls);
 852       }
 853     }
 854     LogStream ls(log.info());
 855     // In case of an OOM, log out a short but still useful report.
 856     MetaspaceUtils::print_basic_report(&ls, 0);
 857   }
 858 
 859   // Which limit did we hit? CompressedClassSpaceSize or MaxMetaspaceSize?
 860   bool out_of_compressed_class_space = false;
 861   if (metaspace::is_class(mdtype)) {
 862     ClassLoaderMetaspace* metaspace = loader_data->metaspace_non_null();
 863     out_of_compressed_class_space =
 864       MetaspaceUtils::committed_bytes(Metaspace::ClassType) +
 865       // TODO: Okay this is just cheesy.
 866       // Of course this may fail and return incorrect results.
 867       // Think this over - we need some clean way to remember which limit
 868       // exactly we hit during an allocation. Some sort of allocation context structure?
 869       align_up(word_size * BytesPerWord, 4 * M) >
 870       CompressedClassSpaceSize;
 871   }
 872 
 873   // -XX:+HeapDumpOnOutOfMemoryError and -XX:OnOutOfMemoryError support
 874   const char* space_string = out_of_compressed_class_space ?
 875     "Compressed class space" : "Metaspace";
 876 
 877   report_java_out_of_memory(space_string);
 878 
 879   if (JvmtiExport::should_post_resource_exhausted()) {
 880     JvmtiExport::post_resource_exhausted(
 881         JVMTI_RESOURCE_EXHAUSTED_OOM_ERROR,
 882         space_string);
 883   }
 884 
 885   if (!is_init_completed()) {
 886     vm_exit_during_initialization("OutOfMemoryError", space_string);
 887   }
 888 
 889   if (out_of_compressed_class_space) {
 890     THROW_OOP(Universe::out_of_memory_error_class_metaspace());
 891   } else {
 892     THROW_OOP(Universe::out_of_memory_error_metaspace());
 893   }
 894 }
 895 














 896 void Metaspace::purge() {
 897   ChunkManager* cm = ChunkManager::chunkmanager_nonclass();
 898   if (cm != NULL) {
 899     cm->purge();
 900   }
 901   if (using_class_space()) {
 902     cm = ChunkManager::chunkmanager_class();
 903     if (cm != NULL) {
 904       cm->purge();
 905     }
 906   }
 907 }
 908 
 909 bool Metaspace::contains(const void* ptr) {
 910   if (MetaspaceShared::is_in_shared_metaspace(ptr)) {
 911     return true;
 912   }
 913   return contains_non_shared(ptr);
 914 }
 915 
 916 bool Metaspace::contains_non_shared(const void* ptr) {
 917   if (using_class_space() && VirtualSpaceList::vslist_class()->contains((MetaWord*)ptr)) {
 918      return true;
 919   }
 920 
 921   return VirtualSpaceList::vslist_nonclass()->contains((MetaWord*)ptr);













































































































































































































 922 }
< prev index next >