src/share/vm/memory/metaspace.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hs-gc-g1-mmap Sdiff src/share/vm/memory

src/share/vm/memory/metaspace.cpp

Print this page




1125   bool succeeded = class_entry->initialize();
1126   _chunk_manager.free_chunks(SpecializedIndex)->set_size(SpecializedChunk);
1127   _chunk_manager.free_chunks(SmallIndex)->set_size(ClassSmallChunk);
1128   _chunk_manager.free_chunks(MediumIndex)->set_size(ClassMediumChunk);
1129   assert(succeeded, " VirtualSpaceList initialization should not fail");
1130   link_vs(class_entry);
1131 }
1132 
1133 size_t VirtualSpaceList::free_bytes() {
1134   return virtual_space_list()->free_words_in_vs() * BytesPerWord;
1135 }
1136 
1137 // Allocate another meta virtual space and add it to the list.
1138 bool VirtualSpaceList::grow_vs(size_t vs_word_size) {
1139   assert_lock_strong(SpaceManager::expand_lock());
1140   if (vs_word_size == 0) {
1141     return false;
1142   }
1143   // Reserve the space
1144   size_t vs_byte_size = vs_word_size * BytesPerWord;
1145   assert(vs_byte_size % os::vm_page_size() == 0, "Not aligned");
1146 
1147   // Allocate the meta virtual space and initialize it.
1148   VirtualSpaceNode* new_entry = new VirtualSpaceNode(vs_byte_size);
1149   if (!new_entry->initialize()) {
1150     delete new_entry;
1151     return false;
1152   } else {
1153     assert(new_entry->reserved_words() == vs_word_size, "Must be");
1154     // ensure lock-free iteration sees fully initialized node
1155     OrderAccess::storestore();
1156     link_vs(new_entry);
1157     return true;
1158   }
1159 }
1160 
1161 void VirtualSpaceList::link_vs(VirtualSpaceNode* new_entry) {
1162   if (virtual_space_list() == NULL) {
1163       set_virtual_space_list(new_entry);
1164   } else {
1165     current_virtual_space()->set_next(new_entry);


1204     // Allocate a chunk out of the current virtual space.
1205     next = current_virtual_space()->get_chunk_vs(grow_chunks_by_words);
1206   }
1207 
1208   if (next == NULL) {
1209     // Not enough room in current virtual space.  Try to commit
1210     // more space.
1211     size_t expand_vs_by_words = MAX2(medium_chunk_bunch,
1212                                      grow_chunks_by_words);
1213     size_t page_size_words = os::vm_page_size() / BytesPerWord;
1214     size_t aligned_expand_vs_by_words = align_size_up(expand_vs_by_words,
1215                                                         page_size_words);
1216     bool vs_expanded =
1217       expand_by(current_virtual_space(), aligned_expand_vs_by_words);
1218     if (!vs_expanded) {
1219       // Should the capacity of the metaspaces be expanded for
1220       // this allocation?  If it's the virtual space for classes and is
1221       // being used for CompressedHeaders, don't allocate a new virtualspace.
1222       if (can_grow() && MetaspaceGC::should_expand(this, word_size)) {
1223         // Get another virtual space.


1224           size_t grow_vs_words =
1225             MAX2((size_t)VirtualSpaceSize, aligned_expand_vs_by_words);
1226         if (grow_vs(grow_vs_words)) {
1227           // Got it.  It's on the list now.  Get a chunk from it.
1228           assert(current_virtual_space()->expanded_words() == 0,
1229               "New virtuals space nodes should not have expanded");
1230 
1231           size_t grow_chunks_by_words_aligned = align_size_up(grow_chunks_by_words,
1232                                                               page_size_words);
1233           // We probably want to expand by aligned_expand_vs_by_words here.
1234           expand_by(current_virtual_space(), grow_chunks_by_words_aligned);
1235           next = current_virtual_space()->get_chunk_vs(grow_chunks_by_words);
1236         }
1237       } else {
1238         // Allocation will fail and induce a GC
1239         if (TraceMetadataChunkAllocation && Verbose) {
1240           gclog_or_tty->print_cr("VirtualSpaceList::get_new_chunk():"
1241             " Fail instead of expand the metaspace");
1242         }
1243       }
1244     } else {
1245       // The virtual space expanded, get a new chunk
1246       next = current_virtual_space()->get_chunk_vs(grow_chunks_by_words);
1247       assert(next != NULL, "Just expanded, should succeed");
1248     }
1249   }


3409 
3410   static void test_committed() {
3411     size_t committed = MetaspaceAux::committed_bytes();
3412 
3413     assert(committed > 0, "assert");
3414 
3415     size_t reserved  = MetaspaceAux::reserved_bytes();
3416     assert(committed <= reserved, "assert");
3417 
3418     size_t committed_metadata = MetaspaceAux::committed_bytes(Metaspace::NonClassType);
3419     assert(committed_metadata > 0, "assert");
3420     assert(committed_metadata <= committed, "assert");
3421 
3422     if (UseCompressedClassPointers) {
3423       size_t committed_class    = MetaspaceAux::committed_bytes(Metaspace::ClassType);
3424       assert(committed_class > 0, "assert");
3425       assert(committed_class < committed, "assert");
3426     }
3427   }
3428 










3429   static void test() {
3430     test_reserved();
3431     test_committed();

3432   }
3433 };
3434 
3435 void MetaspaceAux_test() {
3436   MetaspaceAuxTest::test();
3437 }
3438 
3439 #endif


1125   bool succeeded = class_entry->initialize();
1126   _chunk_manager.free_chunks(SpecializedIndex)->set_size(SpecializedChunk);
1127   _chunk_manager.free_chunks(SmallIndex)->set_size(ClassSmallChunk);
1128   _chunk_manager.free_chunks(MediumIndex)->set_size(ClassMediumChunk);
1129   assert(succeeded, " VirtualSpaceList initialization should not fail");
1130   link_vs(class_entry);
1131 }
1132 
1133 size_t VirtualSpaceList::free_bytes() {
1134   return virtual_space_list()->free_words_in_vs() * BytesPerWord;
1135 }
1136 
1137 // Allocate another meta virtual space and add it to the list.
1138 bool VirtualSpaceList::grow_vs(size_t vs_word_size) {
1139   assert_lock_strong(SpaceManager::expand_lock());
1140   if (vs_word_size == 0) {
1141     return false;
1142   }
1143   // Reserve the space
1144   size_t vs_byte_size = vs_word_size * BytesPerWord;
1145   assert(vs_byte_size % os::vm_allocation_granularity() == 0, "Not aligned");
1146 
1147   // Allocate the meta virtual space and initialize it.
1148   VirtualSpaceNode* new_entry = new VirtualSpaceNode(vs_byte_size);
1149   if (!new_entry->initialize()) {
1150     delete new_entry;
1151     return false;
1152   } else {
1153     assert(new_entry->reserved_words() == vs_word_size, "Must be");
1154     // ensure lock-free iteration sees fully initialized node
1155     OrderAccess::storestore();
1156     link_vs(new_entry);
1157     return true;
1158   }
1159 }
1160 
1161 void VirtualSpaceList::link_vs(VirtualSpaceNode* new_entry) {
1162   if (virtual_space_list() == NULL) {
1163       set_virtual_space_list(new_entry);
1164   } else {
1165     current_virtual_space()->set_next(new_entry);


1204     // Allocate a chunk out of the current virtual space.
1205     next = current_virtual_space()->get_chunk_vs(grow_chunks_by_words);
1206   }
1207 
1208   if (next == NULL) {
1209     // Not enough room in current virtual space.  Try to commit
1210     // more space.
1211     size_t expand_vs_by_words = MAX2(medium_chunk_bunch,
1212                                      grow_chunks_by_words);
1213     size_t page_size_words = os::vm_page_size() / BytesPerWord;
1214     size_t aligned_expand_vs_by_words = align_size_up(expand_vs_by_words,
1215                                                         page_size_words);
1216     bool vs_expanded =
1217       expand_by(current_virtual_space(), aligned_expand_vs_by_words);
1218     if (!vs_expanded) {
1219       // Should the capacity of the metaspaces be expanded for
1220       // this allocation?  If it's the virtual space for classes and is
1221       // being used for CompressedHeaders, don't allocate a new virtualspace.
1222       if (can_grow() && MetaspaceGC::should_expand(this, word_size)) {
1223         // Get another virtual space.
1224         size_t allocation_aligned_expand_words =
1225             align_size_up(aligned_expand_vs_by_words, os::vm_allocation_granularity() / BytesPerWord);
1226         size_t grow_vs_words =
1227             MAX2((size_t)VirtualSpaceSize, allocation_aligned_expand_words);
1228         if (grow_vs(grow_vs_words)) {
1229           // Got it.  It's on the list now.  Get a chunk from it.
1230           assert(current_virtual_space()->expanded_words() == 0,
1231               "New virtual space nodes should not have expanded");
1232 
1233           size_t grow_chunks_by_words_aligned = align_size_up(grow_chunks_by_words,
1234                                                               page_size_words);
1235           // We probably want to expand by aligned_expand_vs_by_words here.
1236           expand_by(current_virtual_space(), grow_chunks_by_words_aligned);
1237           next = current_virtual_space()->get_chunk_vs(grow_chunks_by_words);
1238         }
1239       } else {
1240         // Allocation will fail and induce a GC
1241         if (TraceMetadataChunkAllocation && Verbose) {
1242           gclog_or_tty->print_cr("VirtualSpaceList::get_new_chunk():"
1243             " Fail instead of expand the metaspace");
1244         }
1245       }
1246     } else {
1247       // The virtual space expanded, get a new chunk
1248       next = current_virtual_space()->get_chunk_vs(grow_chunks_by_words);
1249       assert(next != NULL, "Just expanded, should succeed");
1250     }
1251   }


3411 
3412   static void test_committed() {
3413     size_t committed = MetaspaceAux::committed_bytes();
3414 
3415     assert(committed > 0, "assert");
3416 
3417     size_t reserved  = MetaspaceAux::reserved_bytes();
3418     assert(committed <= reserved, "assert");
3419 
3420     size_t committed_metadata = MetaspaceAux::committed_bytes(Metaspace::NonClassType);
3421     assert(committed_metadata > 0, "assert");
3422     assert(committed_metadata <= committed, "assert");
3423 
3424     if (UseCompressedClassPointers) {
3425       size_t committed_class    = MetaspaceAux::committed_bytes(Metaspace::ClassType);
3426       assert(committed_class > 0, "assert");
3427       assert(committed_class < committed, "assert");
3428     }
3429   }
3430 
3431   static void test_virtual_space_list_large_chunk() {
3432     VirtualSpaceList* vs_list = new VirtualSpaceList(os::vm_allocation_granularity());
3433     MutexLockerEx cl(SpaceManager::expand_lock(), Mutex::_no_safepoint_check_flag);
3434     // A size larger than VirtualSpaceSize (256k) and add one page to make it _not_ be
3435     // vm_allocation_granularity aligned on Windows.
3436     size_t large_size = (size_t)(2*256*K + (os::vm_page_size()/BytesPerWord));
3437     large_size += (os::vm_page_size()/BytesPerWord);
3438     vs_list->get_new_chunk(large_size, large_size, 0);
3439   }
3440 
3441   static void test() {
3442     test_reserved();
3443     test_committed();
3444     test_virtual_space_list_large_chunk();
3445   }
3446 };
3447 
3448 void MetaspaceAux_test() {
3449   MetaspaceAuxTest::test();
3450 }
3451 
3452 #endif
src/share/vm/memory/metaspace.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File