< prev index next >

src/hotspot/share/memory/metaspaceShared.cpp

Print this page


   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "jvm.h"
  27 #include "classfile/classLoaderDataGraph.hpp"

  28 #include "classfile/classListParser.hpp"
  29 #include "classfile/classLoaderExt.hpp"
  30 #include "classfile/dictionary.hpp"
  31 #include "classfile/loaderConstraints.hpp"
  32 #include "classfile/javaClasses.inline.hpp"
  33 #include "classfile/placeholders.hpp"
  34 #include "classfile/symbolTable.hpp"
  35 #include "classfile/stringTable.hpp"
  36 #include "classfile/systemDictionary.hpp"
  37 #include "classfile/systemDictionaryShared.hpp"
  38 #include "code/codeCache.hpp"
  39 #include "gc/shared/softRefPolicy.hpp"
  40 #include "interpreter/bytecodeStream.hpp"
  41 #include "interpreter/bytecodes.hpp"
  42 #include "logging/log.hpp"
  43 #include "logging/logMessage.hpp"
  44 #include "memory/archiveUtils.inline.hpp"
  45 #include "memory/dynamicArchive.hpp"
  46 #include "memory/filemap.hpp"
  47 #include "memory/heapShared.inline.hpp"


  73 #include "utilities/defaultStream.hpp"
  74 #include "utilities/hashtable.inline.hpp"
  75 #if INCLUDE_G1GC
  76 #include "gc/g1/g1CollectedHeap.hpp"
  77 #endif
  78 
  79 ReservedSpace MetaspaceShared::_shared_rs;
  80 VirtualSpace MetaspaceShared::_shared_vs;
  81 ReservedSpace MetaspaceShared::_symbol_rs;
  82 VirtualSpace MetaspaceShared::_symbol_vs;
  83 MetaspaceSharedStats MetaspaceShared::_stats;
  84 bool MetaspaceShared::_has_error_classes;
  85 bool MetaspaceShared::_archive_loading_failed = false;
  86 bool MetaspaceShared::_remapped_readwrite = false;
  87 address MetaspaceShared::_i2i_entry_code_buffers = NULL;
  88 size_t MetaspaceShared::_i2i_entry_code_buffers_size = 0;
  89 void* MetaspaceShared::_shared_metaspace_static_top = NULL;
  90 intx MetaspaceShared::_relocation_delta;
  91 char* MetaspaceShared::_requested_base_address;
  92 bool MetaspaceShared::_use_optimized_module_handling = true;

  93 
  94 // The CDS archive is divided into the following regions:
  95 //     mc  - misc code (the method entry trampolines, c++ vtables)
  96 //     rw  - read-write metadata
  97 //     ro  - read-only metadata and read-only tables
  98 //
  99 //     ca0 - closed archive heap space #0
 100 //     ca1 - closed archive heap space #1 (may be empty)
 101 //     oa0 - open archive heap space #0
 102 //     oa1 - open archive heap space #1 (may be empty)
 103 //
 104 // The mc, rw, and ro regions are linearly allocated, starting from
 105 // SharedBaseAddress, in the order of mc->rw->ro. The size of these 3 regions
 106 // are page-aligned, and there's no gap between any consecutive regions.
 107 //
 108 // These 3 regions are populated in the following steps:
 109 // [1] All classes are loaded in MetaspaceShared::preload_classes(). All metadata are
 110 //     temporarily allocated outside of the shared regions. Only the method entry
 111 //     trampolines are written into the mc region.
 112 // [2] C++ vtables are copied into the mc region.


 223   return &_ro_region;
 224 }
 225 
 226 void MetaspaceShared::pack_dump_space(DumpRegion* current, DumpRegion* next,
 227                                       ReservedSpace* rs) {
 228   current->pack(next);
 229 }
 230 
 231 char* MetaspaceShared::symbol_space_alloc(size_t num_bytes) {
 232   return _symbol_region.allocate(num_bytes);
 233 }
 234 
 235 char* MetaspaceShared::misc_code_space_alloc(size_t num_bytes) {
 236   return _mc_region.allocate(num_bytes);
 237 }
 238 
 239 char* MetaspaceShared::read_only_space_alloc(size_t num_bytes) {
 240   return _ro_region.allocate(num_bytes);
 241 }
 242 




 243 size_t MetaspaceShared::reserved_space_alignment() { return os::vm_allocation_granularity(); }
 244 
 245 static bool shared_base_valid(char* shared_base) {
 246 #ifdef _LP64
 247   return CompressedKlassPointers::is_valid_base((address)shared_base);
 248 #else
 249   return true;
 250 #endif
 251 }
 252 
 253 static bool shared_base_too_high(char* shared_base, size_t cds_total) {
 254   if (SharedBaseAddress != 0 && shared_base < (char*)SharedBaseAddress) {
 255     // SharedBaseAddress is very high (e.g., 0xffffffffffffff00) so
 256     // align_up(SharedBaseAddress, MetaspaceShared::reserved_space_alignment()) has wrapped around.
 257     return true;
 258   }
 259   if (max_uintx - uintx(shared_base) < uintx(cds_total)) {
 260     // The end of the archive will wrap around
 261     return true;
 262   }


 572 
 573   // Dump/restore references to commonly used names and signatures.
 574   vmSymbols::serialize(soc);
 575   soc->do_tag(--tag);
 576 
 577   // Dump/restore the symbol/string/subgraph_info tables
 578   SymbolTable::serialize_shared_table_header(soc);
 579   StringTable::serialize_shared_table_header(soc);
 580   HeapShared::serialize_subgraph_info_table_header(soc);
 581   SystemDictionaryShared::serialize_dictionary_headers(soc);
 582 
 583   InstanceMirrorKlass::serialize_offsets(soc);
 584 
 585   // Dump/restore well known classes (pointers)
 586   SystemDictionaryShared::serialize_well_known_klasses(soc);
 587   soc->do_tag(--tag);
 588 
 589   serialize_cloned_cpp_vtptrs(soc);
 590   soc->do_tag(--tag);
 591 


 592   soc->do_tag(666);
 593 }
 594 
 595 address MetaspaceShared::i2i_entry_code_buffers(size_t total_size) {
 596   if (DumpSharedSpaces) {
 597     if (_i2i_entry_code_buffers == NULL) {
 598       _i2i_entry_code_buffers = (address)misc_code_space_alloc(total_size);
 599       _i2i_entry_code_buffers_size = total_size;
 600     }
 601   } else if (UseSharedSpaces) {
 602     assert(_i2i_entry_code_buffers != NULL, "must already been initialized");
 603   } else {
 604     return NULL;
 605   }
 606 
 607   assert(_i2i_entry_code_buffers_size == total_size, "must not change");
 608   return _i2i_entry_code_buffers;
 609 }
 610 
 611 uintx MetaspaceShared::object_delta_uintx(void* obj) {


1066   do_tag((int)size);
1067   while (size > 0) {
1068     _dump_region->append_intptr_t(*(intptr_t*)start, true);
1069     start += sizeof(intptr_t);
1070     size -= sizeof(intptr_t);
1071   }
1072 }
1073 
1074 // This is for dumping detailed statistics for the allocations
1075 // in the shared spaces.
1076 class DumpAllocStats : public ResourceObj {
1077 public:
1078 
1079   // Here's poor man's enum inheritance
1080 #define SHAREDSPACE_OBJ_TYPES_DO(f) \
1081   METASPACE_OBJ_TYPES_DO(f) \
1082   f(SymbolHashentry) \
1083   f(SymbolBucket) \
1084   f(StringHashentry) \
1085   f(StringBucket) \

1086   f(Other)
1087 
1088   enum Type {
1089     // Types are MetaspaceObj::ClassType, MetaspaceObj::SymbolType, etc
1090     SHAREDSPACE_OBJ_TYPES_DO(METASPACE_OBJ_TYPE_DECLARE)
1091     _number_of_types
1092   };
1093 
1094   static const char * type_name(Type type) {
1095     switch(type) {
1096     SHAREDSPACE_OBJ_TYPES_DO(METASPACE_OBJ_TYPE_NAME_CASE)
1097     default:
1098       ShouldNotReachHere();
1099       return NULL;
1100     }
1101   }
1102 
1103 public:
1104   enum { RO = 0, RW = 1 };
1105 
1106   int _counts[2][_number_of_types];
1107   int _bytes [2][_number_of_types];
1108 
1109   DumpAllocStats() {
1110     memset(_counts, 0, sizeof(_counts));
1111     memset(_bytes,  0, sizeof(_bytes));
1112   };
1113 
1114   void record(MetaspaceObj::Type type, int byte_size, bool read_only) {
1115     assert(int(type) >= 0 && type < MetaspaceObj::_number_of_types, "sanity");
1116     int which = (read_only) ? RO : RW;
1117     _counts[which][type] ++;
1118     _bytes [which][type] += byte_size;
1119   }
1120 





1121   void record_other_type(int byte_size, bool read_only) {
1122     int which = (read_only) ? RO : RW;
1123     _bytes [which][OtherType] += byte_size;
1124   }
1125   void print_stats(int ro_all, int rw_all, int mc_all);
1126 };
1127 
1128 void DumpAllocStats::print_stats(int ro_all, int rw_all, int mc_all) {
1129   // Calculate size of data that was not allocated by Metaspace::allocate()
1130   MetaspaceSharedStats *stats = MetaspaceShared::stats();
1131 
1132   // symbols
1133   _counts[RO][SymbolHashentryType] = stats->symbol.hashentry_count;
1134   _bytes [RO][SymbolHashentryType] = stats->symbol.hashentry_bytes;
1135 
1136   _counts[RO][SymbolBucketType] = stats->symbol.bucket_count;
1137   _bytes [RO][SymbolBucketType] = stats->symbol.bucket_bytes;
1138 
1139   // strings
1140   _counts[RO][StringHashentryType] = stats->string.hashentry_count;


1390                "must be relocated to point to CDS archive");
1391       }
1392       return false; // Do not recurse.
1393     }
1394   };
1395 #endif
1396 
1397 public:
1398   static void copy_and_compact() {
1399     ResourceMark rm;
1400 
1401     log_info(cds)("Scanning all metaspace objects ... ");
1402     {
1403       // allocate and shallow-copy RW objects, immediately following the MC region
1404       log_info(cds)("Allocating RW objects ... ");
1405       _mc_region.pack(&_rw_region);
1406 
1407       ResourceMark rm;
1408       ShallowCopier rw_copier(false);
1409       iterate_roots(&rw_copier);







1410     }
1411     {
1412       // allocate and shallow-copy of RO object, immediately following the RW region
1413       log_info(cds)("Allocating RO objects ... ");
1414       _rw_region.pack(&_ro_region);
1415 
1416       ResourceMark rm;
1417       ShallowCopier ro_copier(true);
1418       iterate_roots(&ro_copier);





1419     }
1420     {
1421       log_info(cds)("Relocating embedded pointers ... ");
1422       ResourceMark rm;
1423       ShallowCopyEmbeddedRefRelocator emb_reloc;
1424       iterate_roots(&emb_reloc);
1425     }
1426     {
1427       log_info(cds)("Relocating external roots ... ");
1428       ResourceMark rm;
1429       RefRelocator ext_reloc;
1430       iterate_roots(&ext_reloc);
1431     }
1432     {
1433       log_info(cds)("Fixing symbol identity hash ... ");
1434       os::init_random(0x12345678);
1435       GrowableArray<Symbol*>* all_symbols = MetaspaceShared::collected_symbols();
1436       all_symbols->sort(compare_symbols_by_address);
1437       for (int i = 0; i < all_symbols->length(); i++) {
1438         assert(all_symbols->at(i)->is_permanent(), "archived symbols must be permanent");


1493         it->push(_global_klass_objects->adr_at(i));
1494       }
1495     }
1496     FileMapInfo::metaspace_pointers_do(it, false);
1497     SystemDictionaryShared::dumptime_classes_do(it);
1498     Universe::metaspace_pointers_do(it);
1499     SymbolTable::metaspace_pointers_do(it);
1500     vmSymbols::metaspace_pointers_do(it);
1501 
1502     it->finish();
1503   }
1504 
1505   static Klass* get_relocated_klass(Klass* orig_klass) {
1506     assert(DumpSharedSpaces, "dump time only");
1507     address* pp = _new_loc_table->lookup((address)orig_klass);
1508     assert(pp != NULL, "must be");
1509     Klass* klass = (Klass*)(*pp);
1510     assert(klass->is_klass(), "must be");
1511     return klass;
1512   }







1513 };
1514 
1515 DumpAllocStats* ArchiveCompactor::_alloc_stats;
1516 ArchiveCompactor::RelocationTable* ArchiveCompactor::_new_loc_table;
1517 
1518 void VM_PopulateDumpSharedSpace::dump_symbols() {
1519   log_info(cds)("Dumping symbol table ...");
1520 
1521   NOT_PRODUCT(SymbolTable::verify());
1522   SymbolTable::write_to_archive();
1523 }
1524 
1525 char* VM_PopulateDumpSharedSpace::dump_read_only_tables() {
1526   ArchiveCompactor::OtherROAllocMark mark;
1527 
1528   log_info(cds)("Removing java_mirror ... ");
1529   if (!HeapShared::is_heap_object_archiving_allowed()) {
1530     Universe::clear_basic_type_mirrors();
1531   }
1532   remove_java_mirror_in_classes();


1797   mapinfo->write_region(region_idx, dump_region->base(), dump_region->used(), read_only, allow_exec);
1798 }
1799 
1800 // Update a Java object to point its Klass* to the new location after
1801 // shared archive has been compacted.
1802 void MetaspaceShared::relocate_klass_ptr(oop o) {
1803   assert(DumpSharedSpaces, "sanity");
1804   Klass* k = ArchiveCompactor::get_relocated_klass(o->klass());
1805   o->set_klass(k);
1806 }
1807 
1808 Klass* MetaspaceShared::get_relocated_klass(Klass *k, bool is_final) {
1809   assert(DumpSharedSpaces, "sanity");
1810   k = ArchiveCompactor::get_relocated_klass(k);
1811   if (is_final) {
1812     k = (Klass*)(address(k) + final_delta());
1813   }
1814   return k;
1815 }
1816 




1817 class LinkSharedClassesClosure : public KlassClosure {
1818   Thread* THREAD;
1819   bool    _made_progress;
1820  public:
1821   LinkSharedClassesClosure(Thread* thread) : THREAD(thread), _made_progress(false) {}
1822 
1823   void reset()               { _made_progress = false; }
1824   bool made_progress() const { return _made_progress; }
1825 
1826   void do_klass(Klass* k) {
1827     if (k->is_instance_klass()) {
1828       InstanceKlass* ik = InstanceKlass::cast(k);
1829       // For dynamic CDS dump, only link classes loaded by the builtin class loaders.
1830       bool do_linking = DumpSharedSpaces ? true : !ik->is_shared_unregistered_class();
1831       if (do_linking) {
1832         // Link the class to cause the bytecodes to be rewritten and the
1833         // cpcache to be created. Class verification is done according
1834         // to -Xverify setting.
1835         _made_progress |= MetaspaceShared::try_link_class(ik, THREAD);
1836         guarantee(!HAS_PENDING_EXCEPTION, "exception in link_class");


1911     log_info(cds)("Shared spaces: preloaded %d classes", class_count);
1912 
1913     if (SharedArchiveConfigFile) {
1914       log_info(cds)("Reading extra data from %s ...", SharedArchiveConfigFile);
1915       read_extra_data(SharedArchiveConfigFile, THREAD);
1916     }
1917     log_info(cds)("Reading extra data: done.");
1918 
1919     HeapShared::init_subgraph_entry_fields(THREAD);
1920 
1921     // Rewrite and link classes
1922     log_info(cds)("Rewriting and linking classes ...");
1923 
1924     // Link any classes which got missed. This would happen if we have loaded classes that
1925     // were not explicitly specified in the classlist. E.g., if an interface implemented by class K
1926     // fails verification, all other interfaces that were not specified in the classlist but
1927     // are implemented by K are not verified.
1928     link_and_cleanup_shared_classes(CATCH);
1929     log_info(cds)("Rewriting and linking classes: done");
1930 






1931     VM_PopulateDumpSharedSpace op;
1932     MutexLocker ml(THREAD, HeapShared::is_heap_object_archiving_allowed() ?
1933                    Heap_lock : NULL);     // needed by HeapShared::run_gc()
1934     VMThread::execute(&op);
1935   }
1936 }
1937 
1938 
1939 int MetaspaceShared::preload_classes(const char* class_list_path, TRAPS) {
1940   ClassListParser parser(class_list_path);
1941   int class_count = 0;
1942 
1943   while (parser.parse_one_line()) {
1944     Klass* klass = parser.load_current_class(THREAD);
1945     if (HAS_PENDING_EXCEPTION) {
1946       if (klass == NULL &&
1947           (PENDING_EXCEPTION->klass()->name() == vmSymbols::java_lang_ClassNotFoundException())) {
1948         // print a warning only when the pending exception is class not found
1949         log_warning(cds)("Preload Warning: Cannot find %s", parser.current_class_name());
1950       }


2328   }
2329 
2330   if (result == MAP_ARCHIVE_SUCCESS) {
2331     SharedBaseAddress = (size_t)mapped_base_address;
2332     LP64_ONLY({
2333         if (Metaspace::using_class_space()) {
2334           // Set up ccs in metaspace.
2335           Metaspace::initialize_class_space(class_space_rs);
2336 
2337           // Set up compressed Klass pointer encoding: the encoding range must
2338           //  cover both archive and class space.
2339           address cds_base = (address)static_mapinfo->mapped_base();
2340           address ccs_end = (address)class_space_rs.end();
2341           CompressedKlassPointers::initialize(cds_base, ccs_end - cds_base);
2342 
2343           // map_heap_regions() compares the current narrow oop and klass encodings
2344           // with the archived ones, so it must be done after all encodings are determined.
2345           static_mapinfo->map_heap_regions();
2346         }
2347       });
2348     log_info(cds)("Using optimized module handling %s", MetaspaceShared::use_optimized_module_handling() ? "enabled" : "disabled");

2349   } else {
2350     unmap_archive(static_mapinfo);
2351     unmap_archive(dynamic_mapinfo);
2352     release_reserved_spaces(archive_space_rs, class_space_rs);
2353   }
2354 
2355   return result;
2356 }
2357 
2358 
2359 // This will reserve two address spaces suitable to house Klass structures, one
2360 //  for the cds archives (static archive and optionally dynamic archive) and
2361 //  optionally one move for ccs.
2362 //
2363 // Since both spaces must fall within the compressed class pointer encoding
2364 //  range, they are allocated close to each other.
2365 //
2366 // Space for archives will be reserved first, followed by a potential gap,
2367 //  followed by the space for ccs:
2368 //


2653 
2654 void MetaspaceShared::report_out_of_space(const char* name, size_t needed_bytes) {
2655   // This is highly unlikely to happen on 64-bits because we have reserved a 4GB space.
2656   // On 32-bit we reserve only 256MB so you could run out of space with 100,000 classes
2657   // or so.
2658   _mc_region.print_out_of_space_msg(name, needed_bytes);
2659   _rw_region.print_out_of_space_msg(name, needed_bytes);
2660   _ro_region.print_out_of_space_msg(name, needed_bytes);
2661 
2662   vm_exit_during_initialization(err_msg("Unable to allocate from '%s' region", name),
2663                                 "Please reduce the number of shared classes.");
2664 }
2665 
2666 // This is used to relocate the pointers so that the base archive can be mapped at
2667 // MetaspaceShared::requested_base_address() without runtime relocation.
2668 intx MetaspaceShared::final_delta() {
2669   return intx(MetaspaceShared::requested_base_address())  // We want the base archive to be mapped to here at runtime
2670        - intx(SharedBaseAddress);                         // .. but the base archive is mapped at here at dump time
2671 }
2672 





2673 void MetaspaceShared::print_on(outputStream* st) {
2674   if (UseSharedSpaces || DumpSharedSpaces) {
2675     st->print("CDS archive(s) mapped at: ");
2676     address base;
2677     address top;
2678     if (UseSharedSpaces) { // Runtime
2679       base = (address)MetaspaceObj::shared_metaspace_base();
2680       address static_top = (address)_shared_metaspace_static_top;
2681       top = (address)MetaspaceObj::shared_metaspace_top();
2682       st->print("[" PTR_FORMAT "-" PTR_FORMAT "-" PTR_FORMAT "), ", p2i(base), p2i(static_top), p2i(top));
2683     } else if (DumpSharedSpaces) { // Dump Time
2684       base = (address)_shared_rs.base();
2685       top = (address)_shared_rs.end();
2686       st->print("[" PTR_FORMAT "-" PTR_FORMAT "), ", p2i(base), p2i(top));
2687     }
2688     st->print("size " SIZE_FORMAT ", ", top - base);
2689     st->print("SharedBaseAddress: " PTR_FORMAT ", ArchiveRelocationMode: %d.", SharedBaseAddress, (int)ArchiveRelocationMode);
2690   } else {
2691     st->print("CDS disabled.");
2692   }
2693   st->cr();
2694 }
2695 
2696 
2697 
2698 
2699 


   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "jvm.h"
  27 #include "classfile/classLoaderDataGraph.hpp"
  28 #include "classfile/classLoaderDataShared.hpp"
  29 #include "classfile/classListParser.hpp"
  30 #include "classfile/classLoaderExt.hpp"
  31 #include "classfile/dictionary.hpp"
  32 #include "classfile/loaderConstraints.hpp"
  33 #include "classfile/javaClasses.inline.hpp"
  34 #include "classfile/placeholders.hpp"
  35 #include "classfile/symbolTable.hpp"
  36 #include "classfile/stringTable.hpp"
  37 #include "classfile/systemDictionary.hpp"
  38 #include "classfile/systemDictionaryShared.hpp"
  39 #include "code/codeCache.hpp"
  40 #include "gc/shared/softRefPolicy.hpp"
  41 #include "interpreter/bytecodeStream.hpp"
  42 #include "interpreter/bytecodes.hpp"
  43 #include "logging/log.hpp"
  44 #include "logging/logMessage.hpp"
  45 #include "memory/archiveUtils.inline.hpp"
  46 #include "memory/dynamicArchive.hpp"
  47 #include "memory/filemap.hpp"
  48 #include "memory/heapShared.inline.hpp"


  74 #include "utilities/defaultStream.hpp"
  75 #include "utilities/hashtable.inline.hpp"
  76 #if INCLUDE_G1GC
  77 #include "gc/g1/g1CollectedHeap.hpp"
  78 #endif
  79 
  80 ReservedSpace MetaspaceShared::_shared_rs;
  81 VirtualSpace MetaspaceShared::_shared_vs;
  82 ReservedSpace MetaspaceShared::_symbol_rs;
  83 VirtualSpace MetaspaceShared::_symbol_vs;
  84 MetaspaceSharedStats MetaspaceShared::_stats;
  85 bool MetaspaceShared::_has_error_classes;
  86 bool MetaspaceShared::_archive_loading_failed = false;
  87 bool MetaspaceShared::_remapped_readwrite = false;
  88 address MetaspaceShared::_i2i_entry_code_buffers = NULL;
  89 size_t MetaspaceShared::_i2i_entry_code_buffers_size = 0;
  90 void* MetaspaceShared::_shared_metaspace_static_top = NULL;
  91 intx MetaspaceShared::_relocation_delta;
  92 char* MetaspaceShared::_requested_base_address;
  93 bool MetaspaceShared::_use_optimized_module_handling = true;
  94 bool MetaspaceShared::_use_full_module_graph = true;
  95 
  96 // The CDS archive is divided into the following regions:
  97 //     mc  - misc code (the method entry trampolines, c++ vtables)
  98 //     rw  - read-write metadata
  99 //     ro  - read-only metadata and read-only tables
 100 //
 101 //     ca0 - closed archive heap space #0
 102 //     ca1 - closed archive heap space #1 (may be empty)
 103 //     oa0 - open archive heap space #0
 104 //     oa1 - open archive heap space #1 (may be empty)
 105 //
 106 // The mc, rw, and ro regions are linearly allocated, starting from
 107 // SharedBaseAddress, in the order of mc->rw->ro. The size of these 3 regions
 108 // are page-aligned, and there's no gap between any consecutive regions.
 109 //
 110 // These 3 regions are populated in the following steps:
 111 // [1] All classes are loaded in MetaspaceShared::preload_classes(). All metadata are
 112 //     temporarily allocated outside of the shared regions. Only the method entry
 113 //     trampolines are written into the mc region.
 114 // [2] C++ vtables are copied into the mc region.


 225   return &_ro_region;
 226 }
 227 
 228 void MetaspaceShared::pack_dump_space(DumpRegion* current, DumpRegion* next,
 229                                       ReservedSpace* rs) {
 230   current->pack(next);
 231 }
 232 
 233 char* MetaspaceShared::symbol_space_alloc(size_t num_bytes) {
 234   return _symbol_region.allocate(num_bytes);
 235 }
 236 
 237 char* MetaspaceShared::misc_code_space_alloc(size_t num_bytes) {
 238   return _mc_region.allocate(num_bytes);
 239 }
 240 
 241 char* MetaspaceShared::read_only_space_alloc(size_t num_bytes) {
 242   return _ro_region.allocate(num_bytes);
 243 }
 244 
 245 char* MetaspaceShared::read_write_space_alloc(size_t num_bytes) {
 246   return _rw_region.allocate(num_bytes);
 247 }
 248 
 249 size_t MetaspaceShared::reserved_space_alignment() { return os::vm_allocation_granularity(); }
 250 
 251 static bool shared_base_valid(char* shared_base) {
 252 #ifdef _LP64
 253   return CompressedKlassPointers::is_valid_base((address)shared_base);
 254 #else
 255   return true;
 256 #endif
 257 }
 258 
 259 static bool shared_base_too_high(char* shared_base, size_t cds_total) {
 260   if (SharedBaseAddress != 0 && shared_base < (char*)SharedBaseAddress) {
 261     // SharedBaseAddress is very high (e.g., 0xffffffffffffff00) so
 262     // align_up(SharedBaseAddress, MetaspaceShared::reserved_space_alignment()) has wrapped around.
 263     return true;
 264   }
 265   if (max_uintx - uintx(shared_base) < uintx(cds_total)) {
 266     // The end of the archive will wrap around
 267     return true;
 268   }


 578 
 579   // Dump/restore references to commonly used names and signatures.
 580   vmSymbols::serialize(soc);
 581   soc->do_tag(--tag);
 582 
 583   // Dump/restore the symbol/string/subgraph_info tables
 584   SymbolTable::serialize_shared_table_header(soc);
 585   StringTable::serialize_shared_table_header(soc);
 586   HeapShared::serialize_subgraph_info_table_header(soc);
 587   SystemDictionaryShared::serialize_dictionary_headers(soc);
 588 
 589   InstanceMirrorKlass::serialize_offsets(soc);
 590 
 591   // Dump/restore well known classes (pointers)
 592   SystemDictionaryShared::serialize_well_known_klasses(soc);
 593   soc->do_tag(--tag);
 594 
 595   serialize_cloned_cpp_vtptrs(soc);
 596   soc->do_tag(--tag);
 597 
 598   CDS_JAVA_HEAP_ONLY(ClassLoaderDataShared::serialize(soc));
 599 
 600   soc->do_tag(666);
 601 }
 602 
 603 address MetaspaceShared::i2i_entry_code_buffers(size_t total_size) {
 604   if (DumpSharedSpaces) {
 605     if (_i2i_entry_code_buffers == NULL) {
 606       _i2i_entry_code_buffers = (address)misc_code_space_alloc(total_size);
 607       _i2i_entry_code_buffers_size = total_size;
 608     }
 609   } else if (UseSharedSpaces) {
 610     assert(_i2i_entry_code_buffers != NULL, "must already been initialized");
 611   } else {
 612     return NULL;
 613   }
 614 
 615   assert(_i2i_entry_code_buffers_size == total_size, "must not change");
 616   return _i2i_entry_code_buffers;
 617 }
 618 
 619 uintx MetaspaceShared::object_delta_uintx(void* obj) {


1074   do_tag((int)size);
1075   while (size > 0) {
1076     _dump_region->append_intptr_t(*(intptr_t*)start, true);
1077     start += sizeof(intptr_t);
1078     size -= sizeof(intptr_t);
1079   }
1080 }
1081 
1082 // This is for dumping detailed statistics for the allocations
1083 // in the shared spaces.
1084 class DumpAllocStats : public ResourceObj {
1085 public:
1086 
1087   // Here's poor man's enum inheritance
1088 #define SHAREDSPACE_OBJ_TYPES_DO(f) \
1089   METASPACE_OBJ_TYPES_DO(f) \
1090   f(SymbolHashentry) \
1091   f(SymbolBucket) \
1092   f(StringHashentry) \
1093   f(StringBucket) \
1094   f(ModulesNatives) \
1095   f(Other)
1096 
1097   enum Type {
1098     // Types are MetaspaceObj::ClassType, MetaspaceObj::SymbolType, etc
1099     SHAREDSPACE_OBJ_TYPES_DO(METASPACE_OBJ_TYPE_DECLARE)
1100     _number_of_types
1101   };
1102 
1103   static const char * type_name(Type type) {
1104     switch(type) {
1105     SHAREDSPACE_OBJ_TYPES_DO(METASPACE_OBJ_TYPE_NAME_CASE)
1106     default:
1107       ShouldNotReachHere();
1108       return NULL;
1109     }
1110   }
1111 
1112 public:
1113   enum { RO = 0, RW = 1 };
1114 
1115   int _counts[2][_number_of_types];
1116   int _bytes [2][_number_of_types];
1117 
1118   DumpAllocStats() {
1119     memset(_counts, 0, sizeof(_counts));
1120     memset(_bytes,  0, sizeof(_bytes));
1121   };
1122 
1123   void record(MetaspaceObj::Type type, int byte_size, bool read_only) {
1124     assert(int(type) >= 0 && type < MetaspaceObj::_number_of_types, "sanity");
1125     int which = (read_only) ? RO : RW;
1126     _counts[which][type] ++;
1127     _bytes [which][type] += byte_size;
1128   }
1129 
1130   void record_modules(int byte_size, bool read_only) {
1131     int which = (read_only) ? RO : RW;
1132     _bytes [which][ModulesNativesType] += byte_size;
1133   }
1134 
1135   void record_other_type(int byte_size, bool read_only) {
1136     int which = (read_only) ? RO : RW;
1137     _bytes [which][OtherType] += byte_size;
1138   }
1139   void print_stats(int ro_all, int rw_all, int mc_all);
1140 };
1141 
1142 void DumpAllocStats::print_stats(int ro_all, int rw_all, int mc_all) {
1143   // Calculate size of data that was not allocated by Metaspace::allocate()
1144   MetaspaceSharedStats *stats = MetaspaceShared::stats();
1145 
1146   // symbols
1147   _counts[RO][SymbolHashentryType] = stats->symbol.hashentry_count;
1148   _bytes [RO][SymbolHashentryType] = stats->symbol.hashentry_bytes;
1149 
1150   _counts[RO][SymbolBucketType] = stats->symbol.bucket_count;
1151   _bytes [RO][SymbolBucketType] = stats->symbol.bucket_bytes;
1152 
1153   // strings
1154   _counts[RO][StringHashentryType] = stats->string.hashentry_count;


1404                "must be relocated to point to CDS archive");
1405       }
1406       return false; // Do not recurse.
1407     }
1408   };
1409 #endif
1410 
1411 public:
1412   static void copy_and_compact() {
1413     ResourceMark rm;
1414 
1415     log_info(cds)("Scanning all metaspace objects ... ");
1416     {
1417       // allocate and shallow-copy RW objects, immediately following the MC region
1418       log_info(cds)("Allocating RW objects ... ");
1419       _mc_region.pack(&_rw_region);
1420 
1421       ResourceMark rm;
1422       ShallowCopier rw_copier(false);
1423       iterate_roots(&rw_copier);
1424 
1425 #if INCLUDE_CDS_JAVA_HEAP
1426       // Archive the ModuleEntry's and PackageEntry's of the 3 built-in loaders
1427       char* start = _rw_region.top();
1428       ClassLoaderDataShared::allocate_archived_tables();
1429       ArchiveCompactor::alloc_stats()->record_modules(_rw_region.top() - start, /*read_only*/false);
1430 #endif
1431     }
1432     {
1433       // allocate and shallow-copy of RO object, immediately following the RW region
1434       log_info(cds)("Allocating RO objects ... ");
1435       _rw_region.pack(&_ro_region);
1436 
1437       ResourceMark rm;
1438       ShallowCopier ro_copier(true);
1439       iterate_roots(&ro_copier);
1440 #if INCLUDE_CDS_JAVA_HEAP
1441       char* start = _ro_region.top();
1442       ClassLoaderDataShared::init_archived_tables();
1443       ArchiveCompactor::alloc_stats()->record_modules(_ro_region.top() - start, /*read_only*/true);
1444 #endif
1445     }
1446     {
1447       log_info(cds)("Relocating embedded pointers ... ");
1448       ResourceMark rm;
1449       ShallowCopyEmbeddedRefRelocator emb_reloc;
1450       iterate_roots(&emb_reloc);
1451     }
1452     {
1453       log_info(cds)("Relocating external roots ... ");
1454       ResourceMark rm;
1455       RefRelocator ext_reloc;
1456       iterate_roots(&ext_reloc);
1457     }
1458     {
1459       log_info(cds)("Fixing symbol identity hash ... ");
1460       os::init_random(0x12345678);
1461       GrowableArray<Symbol*>* all_symbols = MetaspaceShared::collected_symbols();
1462       all_symbols->sort(compare_symbols_by_address);
1463       for (int i = 0; i < all_symbols->length(); i++) {
1464         assert(all_symbols->at(i)->is_permanent(), "archived symbols must be permanent");


1519         it->push(_global_klass_objects->adr_at(i));
1520       }
1521     }
1522     FileMapInfo::metaspace_pointers_do(it, false);
1523     SystemDictionaryShared::dumptime_classes_do(it);
1524     Universe::metaspace_pointers_do(it);
1525     SymbolTable::metaspace_pointers_do(it);
1526     vmSymbols::metaspace_pointers_do(it);
1527 
1528     it->finish();
1529   }
1530 
1531   static Klass* get_relocated_klass(Klass* orig_klass) {
1532     assert(DumpSharedSpaces, "dump time only");
1533     address* pp = _new_loc_table->lookup((address)orig_klass);
1534     assert(pp != NULL, "must be");
1535     Klass* klass = (Klass*)(*pp);
1536     assert(klass->is_klass(), "must be");
1537     return klass;
1538   }
1539 
1540   static Symbol* get_relocated_symbol(Symbol* orig_symbol) {
1541     assert(DumpSharedSpaces, "dump time only");
1542     address* pp = _new_loc_table->lookup((address)orig_symbol);
1543     assert(pp != NULL, "must be");
1544     return (Symbol*)(*pp);
1545   }
1546 };
1547 
1548 DumpAllocStats* ArchiveCompactor::_alloc_stats;
1549 ArchiveCompactor::RelocationTable* ArchiveCompactor::_new_loc_table;
1550 
1551 void VM_PopulateDumpSharedSpace::dump_symbols() {
1552   log_info(cds)("Dumping symbol table ...");
1553 
1554   NOT_PRODUCT(SymbolTable::verify());
1555   SymbolTable::write_to_archive();
1556 }
1557 
1558 char* VM_PopulateDumpSharedSpace::dump_read_only_tables() {
1559   ArchiveCompactor::OtherROAllocMark mark;
1560 
1561   log_info(cds)("Removing java_mirror ... ");
1562   if (!HeapShared::is_heap_object_archiving_allowed()) {
1563     Universe::clear_basic_type_mirrors();
1564   }
1565   remove_java_mirror_in_classes();


1830   mapinfo->write_region(region_idx, dump_region->base(), dump_region->used(), read_only, allow_exec);
1831 }
1832 
1833 // Update a Java object to point its Klass* to the new location after
1834 // shared archive has been compacted.
1835 void MetaspaceShared::relocate_klass_ptr(oop o) {
1836   assert(DumpSharedSpaces, "sanity");
1837   Klass* k = ArchiveCompactor::get_relocated_klass(o->klass());
1838   o->set_klass(k);
1839 }
1840 
1841 Klass* MetaspaceShared::get_relocated_klass(Klass *k, bool is_final) {
1842   assert(DumpSharedSpaces, "sanity");
1843   k = ArchiveCompactor::get_relocated_klass(k);
1844   if (is_final) {
1845     k = (Klass*)(address(k) + final_delta());
1846   }
1847   return k;
1848 }
1849 
1850 Symbol* MetaspaceShared::get_relocated_symbol(Symbol* orig_symbol) {
1851   return ArchiveCompactor::get_relocated_symbol(orig_symbol);
1852 }
1853 
1854 class LinkSharedClassesClosure : public KlassClosure {
1855   Thread* THREAD;
1856   bool    _made_progress;
1857  public:
1858   LinkSharedClassesClosure(Thread* thread) : THREAD(thread), _made_progress(false) {}
1859 
1860   void reset()               { _made_progress = false; }
1861   bool made_progress() const { return _made_progress; }
1862 
1863   void do_klass(Klass* k) {
1864     if (k->is_instance_klass()) {
1865       InstanceKlass* ik = InstanceKlass::cast(k);
1866       // For dynamic CDS dump, only link classes loaded by the builtin class loaders.
1867       bool do_linking = DumpSharedSpaces ? true : !ik->is_shared_unregistered_class();
1868       if (do_linking) {
1869         // Link the class to cause the bytecodes to be rewritten and the
1870         // cpcache to be created. Class verification is done according
1871         // to -Xverify setting.
1872         _made_progress |= MetaspaceShared::try_link_class(ik, THREAD);
1873         guarantee(!HAS_PENDING_EXCEPTION, "exception in link_class");


1948     log_info(cds)("Shared spaces: preloaded %d classes", class_count);
1949 
1950     if (SharedArchiveConfigFile) {
1951       log_info(cds)("Reading extra data from %s ...", SharedArchiveConfigFile);
1952       read_extra_data(SharedArchiveConfigFile, THREAD);
1953     }
1954     log_info(cds)("Reading extra data: done.");
1955 
1956     HeapShared::init_subgraph_entry_fields(THREAD);
1957 
1958     // Rewrite and link classes
1959     log_info(cds)("Rewriting and linking classes ...");
1960 
1961     // Link any classes which got missed. This would happen if we have loaded classes that
1962     // were not explicitly specified in the classlist. E.g., if an interface implemented by class K
1963     // fails verification, all other interfaces that were not specified in the classlist but
1964     // are implemented by K are not verified.
1965     link_and_cleanup_shared_classes(CATCH);
1966     log_info(cds)("Rewriting and linking classes: done");
1967 
1968 #if INCLUDE_CDS_JAVA_HEAP
1969     if (use_full_module_graph()) {
1970       HeapShared::reset_archived_object_states(THREAD);
1971     }
1972 #endif
1973 
1974     VM_PopulateDumpSharedSpace op;
1975     MutexLocker ml(THREAD, HeapShared::is_heap_object_archiving_allowed() ?
1976                    Heap_lock : NULL);     // needed by HeapShared::run_gc()
1977     VMThread::execute(&op);
1978   }
1979 }
1980 
1981 
1982 int MetaspaceShared::preload_classes(const char* class_list_path, TRAPS) {
1983   ClassListParser parser(class_list_path);
1984   int class_count = 0;
1985 
1986   while (parser.parse_one_line()) {
1987     Klass* klass = parser.load_current_class(THREAD);
1988     if (HAS_PENDING_EXCEPTION) {
1989       if (klass == NULL &&
1990           (PENDING_EXCEPTION->klass()->name() == vmSymbols::java_lang_ClassNotFoundException())) {
1991         // print a warning only when the pending exception is class not found
1992         log_warning(cds)("Preload Warning: Cannot find %s", parser.current_class_name());
1993       }


2371   }
2372 
2373   if (result == MAP_ARCHIVE_SUCCESS) {
2374     SharedBaseAddress = (size_t)mapped_base_address;
2375     LP64_ONLY({
2376         if (Metaspace::using_class_space()) {
2377           // Set up ccs in metaspace.
2378           Metaspace::initialize_class_space(class_space_rs);
2379 
2380           // Set up compressed Klass pointer encoding: the encoding range must
2381           //  cover both archive and class space.
2382           address cds_base = (address)static_mapinfo->mapped_base();
2383           address ccs_end = (address)class_space_rs.end();
2384           CompressedKlassPointers::initialize(cds_base, ccs_end - cds_base);
2385 
2386           // map_heap_regions() compares the current narrow oop and klass encodings
2387           // with the archived ones, so it must be done after all encodings are determined.
2388           static_mapinfo->map_heap_regions();
2389         }
2390       });
2391     log_info(cds)("optimized module handling: %s", MetaspaceShared::use_optimized_module_handling() ? "enabled" : "disabled");
2392     log_info(cds)("full module graph: %s", MetaspaceShared::use_full_module_graph() ? "enabled" : "disabled");
2393   } else {
2394     unmap_archive(static_mapinfo);
2395     unmap_archive(dynamic_mapinfo);
2396     release_reserved_spaces(archive_space_rs, class_space_rs);
2397   }
2398 
2399   return result;
2400 }
2401 
2402 
2403 // This will reserve two address spaces suitable to house Klass structures, one
2404 //  for the cds archives (static archive and optionally dynamic archive) and
2405 //  optionally one move for ccs.
2406 //
2407 // Since both spaces must fall within the compressed class pointer encoding
2408 //  range, they are allocated close to each other.
2409 //
2410 // Space for archives will be reserved first, followed by a potential gap,
2411 //  followed by the space for ccs:
2412 //


2697 
2698 void MetaspaceShared::report_out_of_space(const char* name, size_t needed_bytes) {
2699   // This is highly unlikely to happen on 64-bits because we have reserved a 4GB space.
2700   // On 32-bit we reserve only 256MB so you could run out of space with 100,000 classes
2701   // or so.
2702   _mc_region.print_out_of_space_msg(name, needed_bytes);
2703   _rw_region.print_out_of_space_msg(name, needed_bytes);
2704   _ro_region.print_out_of_space_msg(name, needed_bytes);
2705 
2706   vm_exit_during_initialization(err_msg("Unable to allocate from '%s' region", name),
2707                                 "Please reduce the number of shared classes.");
2708 }
2709 
2710 // This is used to relocate the pointers so that the base archive can be mapped at
2711 // MetaspaceShared::requested_base_address() without runtime relocation.
2712 intx MetaspaceShared::final_delta() {
2713   return intx(MetaspaceShared::requested_base_address())  // We want the base archive to be mapped to here at runtime
2714        - intx(SharedBaseAddress);                         // .. but the base archive is mapped at here at dump time
2715 }
2716 
2717 bool MetaspaceShared::use_full_module_graph() {
2718   return _use_optimized_module_handling && _use_full_module_graph &&
2719     (UseSharedSpaces || DumpSharedSpaces) && HeapShared::is_heap_object_archiving_allowed();
2720 }
2721 
2722 void MetaspaceShared::print_on(outputStream* st) {
2723   if (UseSharedSpaces || DumpSharedSpaces) {
2724     st->print("CDS archive(s) mapped at: ");
2725     address base;
2726     address top;
2727     if (UseSharedSpaces) { // Runtime
2728       base = (address)MetaspaceObj::shared_metaspace_base();
2729       address static_top = (address)_shared_metaspace_static_top;
2730       top = (address)MetaspaceObj::shared_metaspace_top();
2731       st->print("[" PTR_FORMAT "-" PTR_FORMAT "-" PTR_FORMAT "), ", p2i(base), p2i(static_top), p2i(top));
2732     } else if (DumpSharedSpaces) { // Dump Time
2733       base = (address)_shared_rs.base();
2734       top = (address)_shared_rs.end();
2735       st->print("[" PTR_FORMAT "-" PTR_FORMAT "), ", p2i(base), p2i(top));
2736     }
2737     st->print("size " SIZE_FORMAT ", ", top - base);
2738     st->print("SharedBaseAddress: " PTR_FORMAT ", ArchiveRelocationMode: %d.", SharedBaseAddress, (int)ArchiveRelocationMode);
2739   } else {
2740     st->print("CDS disabled.");
2741   }
2742   st->cr();
2743 }





< prev index next >