47 #include "memory/resourceArea.hpp"
48 #include "oops/compressedOops.inline.hpp"
49 #include "oops/instanceClassLoaderKlass.hpp"
50 #include "oops/instanceMirrorKlass.hpp"
51 #include "oops/instanceRefKlass.hpp"
52 #include "oops/objArrayKlass.hpp"
53 #include "oops/objArrayOop.hpp"
54 #include "oops/oop.inline.hpp"
55 #include "oops/typeArrayKlass.hpp"
56 #include "prims/jvmtiRedefineClasses.hpp"
57 #include "runtime/handles.inline.hpp"
58 #include "runtime/os.hpp"
59 #include "runtime/safepointVerifiers.hpp"
60 #include "runtime/signature.hpp"
61 #include "runtime/timerTrace.hpp"
62 #include "runtime/vmThread.hpp"
63 #include "runtime/vm_operations.hpp"
64 #include "utilities/align.hpp"
65 #include "utilities/bitMap.hpp"
66 #include "utilities/defaultStream.hpp"
67 #if INCLUDE_G1GC
68 #include "gc/g1/g1CollectedHeap.hpp"
69 #endif
70
71 ReservedSpace MetaspaceShared::_shared_rs;
72 VirtualSpace MetaspaceShared::_shared_vs;
73 MetaspaceSharedStats MetaspaceShared::_stats;
74 bool MetaspaceShared::_has_error_classes;
75 bool MetaspaceShared::_archive_loading_failed = false;
76 bool MetaspaceShared::_remapped_readwrite = false;
77 address MetaspaceShared::_cds_i2i_entry_code_buffers = NULL;
78 size_t MetaspaceShared::_cds_i2i_entry_code_buffers_size = 0;
79 size_t MetaspaceShared::_core_spaces_size = 0;
80
81 // The CDS archive is divided into the following regions:
82 // mc - misc code (the method entry trampolines)
83 // rw - read-write metadata
84 // ro - read-only metadata and read-only tables
85 // md - misc data (the c++ vtables)
86 // od - optional data (original class files)
1053 public:
1054 SortedSymbolClosure() {
1055 SymbolTable::symbols_do(this);
1056 _symbols.sort(compare_symbols_by_address);
1057 }
1058 GrowableArray<Symbol*>* get_sorted_symbols() {
1059 return &_symbols;
1060 }
1061 };
1062
1063 // ArchiveCompactor --
1064 //
1065 // This class is the central piece of shared archive compaction -- all metaspace data are
1066 // initially allocated outside of the shared regions. ArchiveCompactor copies the
1067 // metaspace data into their final location in the shared regions.
1068
1069 class ArchiveCompactor : AllStatic {
1070 static DumpAllocStats* _alloc_stats;
1071 static SortedSymbolClosure* _ssc;
1072
1073 static unsigned my_hash(const address& a) {
1074 return primitive_hash<address>(a);
1075 }
1076 static bool my_equals(const address& a0, const address& a1) {
1077 return primitive_equals<address>(a0, a1);
1078 }
1079 typedef ResourceHashtable<
1080 address, address,
1081 ArchiveCompactor::my_hash, // solaris compiler doesn't like: primitive_hash<address>
1082 ArchiveCompactor::my_equals, // solaris compiler doesn't like: primitive_equals<address>
1083 16384, ResourceObj::C_HEAP> RelocationTable;
1084 static RelocationTable* _new_loc_table;
1085
1086 public:
1087 static void initialize() {
1088 _alloc_stats = new(ResourceObj::C_HEAP, mtInternal)DumpAllocStats;
1089 _new_loc_table = new(ResourceObj::C_HEAP, mtInternal)RelocationTable;
1090 }
1091 static DumpAllocStats* alloc_stats() {
1092 return _alloc_stats;
1093 }
1094
1095 // Use this when you allocate space with MetaspaceShare::read_only_space_alloc()
1096 // outside of ArchiveCompactor::allocate(). These are usually for misc tables
1097 // that are allocated in the RO space.
1098 class OtherROAllocMark {
1099 char* _oldtop;
1100 public:
1101 OtherROAllocMark() {
1102 _oldtop = _ro_region.top();
1103 }
1104 ~OtherROAllocMark() {
1105 char* newtop = _ro_region.top();
1106 ArchiveCompactor::alloc_stats()->record_other_type(int(newtop - _oldtop), true);
1107 }
1108 };
1109
1119 oldtop = _ro_region.top();
1120 p = _ro_region.allocate(bytes, alignment);
1121 newtop = _ro_region.top();
1122 } else {
1123 oldtop = _rw_region.top();
1124 if (ref->msotype() == MetaspaceObj::ClassType) {
1125 // Save a pointer immediate in front of an InstanceKlass, so
1126 // we can do a quick lookup from InstanceKlass* -> RunTimeSharedClassInfo*
1127 // without building another hashtable. See RunTimeSharedClassInfo::get_for()
1128 // in systemDictionaryShared.cpp.
1129 Klass* klass = (Klass*)obj;
1130 if (klass->is_instance_klass()) {
1131 SystemDictionaryShared::validate_before_archiving(InstanceKlass::cast(klass));
1132 _rw_region.allocate(sizeof(address), BytesPerWord);
1133 }
1134 }
1135 p = _rw_region.allocate(bytes, alignment);
1136 newtop = _rw_region.top();
1137 }
1138 memcpy(p, obj, bytes);
1139 bool isnew = _new_loc_table->put(obj, (address)p);
1140 log_trace(cds)("Copy: " PTR_FORMAT " ==> " PTR_FORMAT " %d", p2i(obj), p2i(p), bytes);
1141 assert(isnew, "must be");
1142
1143 _alloc_stats->record(ref->msotype(), int(newtop - oldtop), read_only);
1144 }
1145
1146 static address get_new_loc(MetaspaceClosure::Ref* ref) {
1147 address* pp = _new_loc_table->get(ref->obj());
1148 assert(pp != NULL, "must be");
1149 return *pp;
1150 }
1151
1152 private:
1153 // Makes a shallow copy of visited MetaspaceObj's
1154 class ShallowCopier: public UniqueMetaspaceClosure {
1155 bool _read_only;
1156 public:
1157 ShallowCopier(bool read_only) : _read_only(read_only) {}
1158
1159 virtual void do_unique_ref(Ref* ref, bool read_only) {
1160 if (read_only == _read_only) {
1161 allocate(ref, read_only);
1162 }
1163 }
1164 };
1165
1166 // Relocate embedded pointers within a MetaspaceObj's shallow copy
1167 class ShallowCopyEmbeddedRefRelocator: public UniqueMetaspaceClosure {
1271 GrowableArray<Symbol*>* symbols = _ssc->get_sorted_symbols();
1272 for (int i=0; i<symbols->length(); i++) {
1273 it->push(symbols->adr_at(i));
1274 }
1275 if (_global_klass_objects != NULL) {
1276 // Need to fix up the pointers
1277 for (int i = 0; i < _global_klass_objects->length(); i++) {
1278 // NOTE -- this requires that the vtable is NOT yet patched, or else we are hosed.
1279 it->push(_global_klass_objects->adr_at(i));
1280 }
1281 }
1282 FileMapInfo::metaspace_pointers_do(it);
1283 SystemDictionaryShared::dumptime_classes_do(it);
1284 Universe::metaspace_pointers_do(it);
1285 SymbolTable::metaspace_pointers_do(it);
1286 vmSymbols::metaspace_pointers_do(it);
1287 }
1288
1289 static Klass* get_relocated_klass(Klass* orig_klass) {
1290 assert(DumpSharedSpaces, "dump time only");
1291 address* pp = _new_loc_table->get((address)orig_klass);
1292 assert(pp != NULL, "must be");
1293 Klass* klass = (Klass*)(*pp);
1294 assert(klass->is_klass(), "must be");
1295 return klass;
1296 }
1297 };
1298
1299 DumpAllocStats* ArchiveCompactor::_alloc_stats;
1300 SortedSymbolClosure* ArchiveCompactor::_ssc;
1301 ArchiveCompactor::RelocationTable* ArchiveCompactor::_new_loc_table;
1302
1303 void VM_PopulateDumpSharedSpace::write_region(FileMapInfo* mapinfo, int region_idx,
1304 DumpRegion* dump_region, bool read_only, bool allow_exec) {
1305 mapinfo->write_region(region_idx, dump_region->base(), dump_region->used(), read_only, allow_exec);
1306 }
1307
1308 void VM_PopulateDumpSharedSpace::dump_symbols() {
1309 tty->print_cr("Dumping symbol table ...");
1310
1311 NOT_PRODUCT(SymbolTable::verify());
|
47 #include "memory/resourceArea.hpp"
48 #include "oops/compressedOops.inline.hpp"
49 #include "oops/instanceClassLoaderKlass.hpp"
50 #include "oops/instanceMirrorKlass.hpp"
51 #include "oops/instanceRefKlass.hpp"
52 #include "oops/objArrayKlass.hpp"
53 #include "oops/objArrayOop.hpp"
54 #include "oops/oop.inline.hpp"
55 #include "oops/typeArrayKlass.hpp"
56 #include "prims/jvmtiRedefineClasses.hpp"
57 #include "runtime/handles.inline.hpp"
58 #include "runtime/os.hpp"
59 #include "runtime/safepointVerifiers.hpp"
60 #include "runtime/signature.hpp"
61 #include "runtime/timerTrace.hpp"
62 #include "runtime/vmThread.hpp"
63 #include "runtime/vm_operations.hpp"
64 #include "utilities/align.hpp"
65 #include "utilities/bitMap.hpp"
66 #include "utilities/defaultStream.hpp"
67 #include "utilities/hashtable.inline.hpp"
68 #if INCLUDE_G1GC
69 #include "gc/g1/g1CollectedHeap.hpp"
70 #endif
71
72 ReservedSpace MetaspaceShared::_shared_rs;
73 VirtualSpace MetaspaceShared::_shared_vs;
74 MetaspaceSharedStats MetaspaceShared::_stats;
75 bool MetaspaceShared::_has_error_classes;
76 bool MetaspaceShared::_archive_loading_failed = false;
77 bool MetaspaceShared::_remapped_readwrite = false;
78 address MetaspaceShared::_cds_i2i_entry_code_buffers = NULL;
79 size_t MetaspaceShared::_cds_i2i_entry_code_buffers_size = 0;
80 size_t MetaspaceShared::_core_spaces_size = 0;
81
82 // The CDS archive is divided into the following regions:
83 // mc - misc code (the method entry trampolines)
84 // rw - read-write metadata
85 // ro - read-only metadata and read-only tables
86 // md - misc data (the c++ vtables)
87 // od - optional data (original class files)
1054 public:
1055 SortedSymbolClosure() {
1056 SymbolTable::symbols_do(this);
1057 _symbols.sort(compare_symbols_by_address);
1058 }
1059 GrowableArray<Symbol*>* get_sorted_symbols() {
1060 return &_symbols;
1061 }
1062 };
1063
1064 // ArchiveCompactor --
1065 //
1066 // This class is the central piece of shared archive compaction -- all metaspace data are
1067 // initially allocated outside of the shared regions. ArchiveCompactor copies the
1068 // metaspace data into their final location in the shared regions.
1069
1070 class ArchiveCompactor : AllStatic {
1071 static DumpAllocStats* _alloc_stats;
1072 static SortedSymbolClosure* _ssc;
1073
1074 typedef KVHashtable<address, address, mtInternal> RelocationTable;
1075 static RelocationTable* _new_loc_table;
1076
1077 public:
1078 static void initialize() {
1079 _alloc_stats = new(ResourceObj::C_HEAP, mtInternal)DumpAllocStats;
1080 _new_loc_table = new RelocationTable(8087);
1081 }
1082 static DumpAllocStats* alloc_stats() {
1083 return _alloc_stats;
1084 }
1085
1086 // Use this when you allocate space with MetaspaceShare::read_only_space_alloc()
1087 // outside of ArchiveCompactor::allocate(). These are usually for misc tables
1088 // that are allocated in the RO space.
1089 class OtherROAllocMark {
1090 char* _oldtop;
1091 public:
1092 OtherROAllocMark() {
1093 _oldtop = _ro_region.top();
1094 }
1095 ~OtherROAllocMark() {
1096 char* newtop = _ro_region.top();
1097 ArchiveCompactor::alloc_stats()->record_other_type(int(newtop - _oldtop), true);
1098 }
1099 };
1100
1110 oldtop = _ro_region.top();
1111 p = _ro_region.allocate(bytes, alignment);
1112 newtop = _ro_region.top();
1113 } else {
1114 oldtop = _rw_region.top();
1115 if (ref->msotype() == MetaspaceObj::ClassType) {
1116 // Save a pointer immediate in front of an InstanceKlass, so
1117 // we can do a quick lookup from InstanceKlass* -> RunTimeSharedClassInfo*
1118 // without building another hashtable. See RunTimeSharedClassInfo::get_for()
1119 // in systemDictionaryShared.cpp.
1120 Klass* klass = (Klass*)obj;
1121 if (klass->is_instance_klass()) {
1122 SystemDictionaryShared::validate_before_archiving(InstanceKlass::cast(klass));
1123 _rw_region.allocate(sizeof(address), BytesPerWord);
1124 }
1125 }
1126 p = _rw_region.allocate(bytes, alignment);
1127 newtop = _rw_region.top();
1128 }
1129 memcpy(p, obj, bytes);
1130 assert(_new_loc_table->lookup(obj) == NULL, "each object can be relocated at most once");
1131 _new_loc_table->add(obj, (address)p);
1132 log_trace(cds)("Copy: " PTR_FORMAT " ==> " PTR_FORMAT " %d", p2i(obj), p2i(p), bytes);
1133 if (_new_loc_table->maybe_grow()) {
1134 log_info(cds, hashtables)("Expanded _new_loc_table to %d", _new_loc_table->table_size());
1135 }
1136 _alloc_stats->record(ref->msotype(), int(newtop - oldtop), read_only);
1137 }
1138
1139 static address get_new_loc(MetaspaceClosure::Ref* ref) {
1140 address* pp = _new_loc_table->lookup(ref->obj());
1141 assert(pp != NULL, "must be");
1142 return *pp;
1143 }
1144
1145 private:
1146 // Makes a shallow copy of visited MetaspaceObj's
1147 class ShallowCopier: public UniqueMetaspaceClosure {
1148 bool _read_only;
1149 public:
1150 ShallowCopier(bool read_only) : _read_only(read_only) {}
1151
1152 virtual void do_unique_ref(Ref* ref, bool read_only) {
1153 if (read_only == _read_only) {
1154 allocate(ref, read_only);
1155 }
1156 }
1157 };
1158
1159 // Relocate embedded pointers within a MetaspaceObj's shallow copy
1160 class ShallowCopyEmbeddedRefRelocator: public UniqueMetaspaceClosure {
1264 GrowableArray<Symbol*>* symbols = _ssc->get_sorted_symbols();
1265 for (int i=0; i<symbols->length(); i++) {
1266 it->push(symbols->adr_at(i));
1267 }
1268 if (_global_klass_objects != NULL) {
1269 // Need to fix up the pointers
1270 for (int i = 0; i < _global_klass_objects->length(); i++) {
1271 // NOTE -- this requires that the vtable is NOT yet patched, or else we are hosed.
1272 it->push(_global_klass_objects->adr_at(i));
1273 }
1274 }
1275 FileMapInfo::metaspace_pointers_do(it);
1276 SystemDictionaryShared::dumptime_classes_do(it);
1277 Universe::metaspace_pointers_do(it);
1278 SymbolTable::metaspace_pointers_do(it);
1279 vmSymbols::metaspace_pointers_do(it);
1280 }
1281
1282 static Klass* get_relocated_klass(Klass* orig_klass) {
1283 assert(DumpSharedSpaces, "dump time only");
1284 address* pp = _new_loc_table->lookup((address)orig_klass);
1285 assert(pp != NULL, "must be");
1286 Klass* klass = (Klass*)(*pp);
1287 assert(klass->is_klass(), "must be");
1288 return klass;
1289 }
1290 };
1291
1292 DumpAllocStats* ArchiveCompactor::_alloc_stats;
1293 SortedSymbolClosure* ArchiveCompactor::_ssc;
1294 ArchiveCompactor::RelocationTable* ArchiveCompactor::_new_loc_table;
1295
1296 void VM_PopulateDumpSharedSpace::write_region(FileMapInfo* mapinfo, int region_idx,
1297 DumpRegion* dump_region, bool read_only, bool allow_exec) {
1298 mapinfo->write_region(region_idx, dump_region->base(), dump_region->used(), read_only, allow_exec);
1299 }
1300
1301 void VM_PopulateDumpSharedSpace::dump_symbols() {
1302 tty->print_cr("Dumping symbol table ...");
1303
1304 NOT_PRODUCT(SymbolTable::verify());
|