< prev index next >

src/hotspot/share/memory/metaspace.cpp

Print this page
rev 49736 : [mq]: metaspace-additions-to-jcmd


  26 #include "gc/shared/collectedHeap.hpp"
  27 #include "gc/shared/collectorPolicy.hpp"
  28 #include "logging/log.hpp"
  29 #include "logging/logStream.hpp"
  30 #include "memory/allocation.hpp"
  31 #include "memory/binaryTreeDictionary.inline.hpp"
  32 #include "memory/filemap.hpp"
  33 #include "memory/freeList.inline.hpp"
  34 #include "memory/metachunk.hpp"
  35 #include "memory/metaspace.hpp"
  36 #include "memory/metaspaceGCThresholdUpdater.hpp"
  37 #include "memory/metaspaceShared.hpp"
  38 #include "memory/metaspaceTracer.hpp"
  39 #include "memory/resourceArea.hpp"
  40 #include "memory/universe.hpp"
  41 #include "runtime/atomic.hpp"
  42 #include "runtime/globals.hpp"
  43 #include "runtime/init.hpp"
  44 #include "runtime/java.hpp"
  45 #include "runtime/mutex.hpp"

  46 #include "runtime/orderAccess.inline.hpp"
  47 #include "services/memTracker.hpp"
  48 #include "services/memoryService.hpp"
  49 #include "utilities/align.hpp"
  50 #include "utilities/copy.hpp"
  51 #include "utilities/debug.hpp"
  52 #include "utilities/macros.hpp"
  53 
  54 typedef BinaryTreeDictionary<Metablock, FreeList<Metablock> > BlockTreeDictionary;
  55 typedef BinaryTreeDictionary<Metachunk, FreeList<Metachunk> > ChunkTreeDictionary;
  56 
  57 // Helper function that does a bunch of checks for a chunk.
  58 DEBUG_ONLY(static void do_verify_chunk(Metachunk* chunk);)
  59 
  60 // Given a Metachunk, update its in-use information (both in the
  61 // chunk and the occupancy map).
  62 static void do_update_in_use_info_for_chunk(Metachunk* chunk, bool inuse);
  63 
  64 size_t const allocation_from_dictionary_limit = 4 * K;
  65 
  66 MetaWord* last_allocated = 0;
  67 
  68 size_t Metaspace::_compressed_class_space_size;
  69 const MetaspaceTracer* Metaspace::_tracer = NULL;
  70 
  71 DEBUG_ONLY(bool Metaspace::_frozen = false;)
  72 






















  73 enum ChunkSizes {    // in words.
  74   ClassSpecializedChunk = 128,
  75   SpecializedChunk = 128,
  76   ClassSmallChunk = 256,
  77   SmallChunk = 512,
  78   ClassMediumChunk = 4 * K,
  79   MediumChunk = 8 * K
  80 };
  81 
  82 // Returns size of this chunk type.
  83 size_t get_size_for_nonhumongous_chunktype(ChunkIndex chunktype, bool is_class) {
  84   assert(is_valid_nonhumongous_chunktype(chunktype), "invalid chunk type.");
  85   size_t size = 0;
  86   if (is_class) {
  87     switch(chunktype) {
  88       case SpecializedIndex: size = ClassSpecializedChunk; break;
  89       case SmallIndex: size = ClassSmallChunk; break;
  90       case MediumIndex: size = ClassMediumChunk; break;
  91       default:
  92         ShouldNotReachHere();


 127       // A valid humongous chunk size is a multiple of the smallest chunk size.
 128       assert(is_aligned(size, SpecializedChunk), "Invalid chunk size");
 129       return HumongousIndex;
 130     }
 131   }
 132   ShouldNotReachHere();
 133   return (ChunkIndex)-1;
 134 }
 135 
 136 
 137 static ChunkIndex next_chunk_index(ChunkIndex i) {
 138   assert(i < NumberOfInUseLists, "Out of bound");
 139   return (ChunkIndex) (i+1);
 140 }
 141 
 142 static ChunkIndex prev_chunk_index(ChunkIndex i) {
 143   assert(i > ZeroIndex, "Out of bound");
 144   return (ChunkIndex) (i-1);
 145 }
 146 












 147 static const char* scale_unit(size_t scale) {
 148   switch(scale) {
 149     case 1: return "BYTES";

 150     case K: return "KB";
 151     case M: return "MB";
 152     case G: return "GB";
 153     default:
 154       ShouldNotReachHere();
 155       return NULL;
 156   }
 157 }
 158 











































































 159 volatile intptr_t MetaspaceGC::_capacity_until_GC = 0;
 160 uint MetaspaceGC::_shrink_factor = 0;
 161 bool MetaspaceGC::_should_concurrent_collect = false;
 162 







































































































































































































































































































 163 typedef class FreeList<Metachunk> ChunkList;
 164 
 165 // Manages the global free lists of chunks.
 166 class ChunkManager : public CHeapObj<mtInternal> {
 167   friend class TestVirtualSpaceNodeTest;
 168 
 169   // Free list of chunks of different sizes.
 170   //   SpecializedChunk
 171   //   SmallChunk
 172   //   MediumChunk
 173   ChunkList _free_chunks[NumberOfFreeLists];
 174 
 175   // Whether or not this is the class chunkmanager.
 176   const bool _is_class;
 177 
 178   // Return non-humongous chunk list by its index.
 179   ChunkList* free_chunks(ChunkIndex index);
 180 
 181   // Returns non-humongous chunk list for the given chunk word size.
 182   ChunkList* find_free_chunks_list(size_t word_size);


 223 
 224   // Helper for chunk merging:
 225   //  Given an address range with 1-n chunks which are all supposed to be
 226   //  free and hence currently managed by this ChunkManager, remove them
 227   //  from this ChunkManager and mark them as invalid.
 228   // - This does not correct the occupancy map.
 229   // - This does not adjust the counters in ChunkManager.
 230   // - Does not adjust container count counter in containing VirtualSpaceNode.
 231   // Returns number of chunks removed.
 232   int remove_chunks_in_area(MetaWord* p, size_t word_size);
 233 
 234   // Helper for chunk splitting: given a target chunk size and a larger free chunk,
 235   // split up the larger chunk into n smaller chunks, at least one of which should be
 236   // the target chunk of target chunk size. The smaller chunks, including the target
 237   // chunk, are returned to the freelist. The pointer to the target chunk is returned.
 238   // Note that this chunk is supposed to be removed from the freelist right away.
 239   Metachunk* split_chunk(size_t target_chunk_word_size, Metachunk* chunk);
 240 
 241  public:
 242 
 243   struct ChunkManagerStatistics {
 244     size_t num_by_type[NumberOfFreeLists];
 245     size_t single_size_by_type[NumberOfFreeLists];
 246     size_t total_size_by_type[NumberOfFreeLists];
 247     size_t num_humongous_chunks;
 248     size_t total_size_humongous_chunks;
 249   };
 250 
 251   void locked_get_statistics(ChunkManagerStatistics* stat) const;
 252   void get_statistics(ChunkManagerStatistics* stat) const;
 253   static void print_statistics(const ChunkManagerStatistics* stat, outputStream* out, size_t scale);
 254 
 255 
 256   ChunkManager(bool is_class)
 257       : _is_class(is_class), _free_chunks_total(0), _free_chunks_count(0) {
 258     _free_chunks[SpecializedIndex].set_size(get_size_for_nonhumongous_chunktype(SpecializedIndex, is_class));
 259     _free_chunks[SmallIndex].set_size(get_size_for_nonhumongous_chunktype(SmallIndex, is_class));
 260     _free_chunks[MediumIndex].set_size(get_size_for_nonhumongous_chunktype(MediumIndex, is_class));
 261   }
 262 
 263   // Add or delete (return) a chunk to the global freelist.
 264   Metachunk* chunk_freelist_allocate(size_t word_size);
 265 
 266   // Map a size to a list index assuming that there are lists
 267   // for special, small, medium, and humongous chunks.
 268   ChunkIndex list_index(size_t size);
 269 
 270   // Map a given index to the chunk size.
 271   size_t size_by_index(ChunkIndex index) const;
 272 
 273   bool is_class() const { return _is_class; }
 274 
 275   // Convenience accessors.


 341   // Debug support
 342   void verify();
 343   void slow_verify() {
 344     if (VerifyMetaspace) {
 345       verify();
 346     }
 347   }
 348   void locked_verify();
 349   void slow_locked_verify() {
 350     if (VerifyMetaspace) {
 351       locked_verify();
 352     }
 353   }
 354   void verify_free_chunks_total();
 355 
 356   void locked_print_free_chunks(outputStream* st);
 357   void locked_print_sum_free_chunks(outputStream* st);
 358 
 359   void print_on(outputStream* st) const;
 360 
 361   // Prints composition for both non-class and (if available)
 362   // class chunk manager.
 363   static void print_all_chunkmanagers(outputStream* out, size_t scale = 1);
 364 };
 365 
 366 class SmallBlocks : public CHeapObj<mtClass> {
 367   const static uint _small_block_max_size = sizeof(TreeChunk<Metablock,  FreeList<Metablock> >)/HeapWordSize;
 368   const static uint _small_block_min_size = sizeof(Metablock)/HeapWordSize;
 369 
 370  private:
 371   FreeList<Metablock> _small_lists[_small_block_max_size - _small_block_min_size];
 372 
 373   FreeList<Metablock>& list_at(size_t word_size) {
 374     assert(word_size >= _small_block_min_size, "There are no metaspace objects less than %u words", _small_block_min_size);
 375     return _small_lists[word_size - _small_block_min_size];
 376   }
 377 
 378  public:
 379   SmallBlocks() {
 380     for (uint i = _small_block_min_size; i < _small_block_max_size; i++) {
 381       uint k = i - _small_block_min_size;
 382       _small_lists[k].set_size(i);
 383     }
 384   }
 385 
 386   size_t total_size() const {
 387     size_t result = 0;
 388     for (uint i = _small_block_min_size; i < _small_block_max_size; i++) {
 389       uint k = i - _small_block_min_size;
 390       result = result + _small_lists[k].count() * _small_lists[k].size();
 391     }
 392     return result;
 393   }
 394 









 395   static uint small_block_max_size() { return _small_block_max_size; }
 396   static uint small_block_min_size() { return _small_block_min_size; }
 397 
 398   MetaWord* get_block(size_t word_size) {
 399     if (list_at(word_size).count() > 0) {
 400       MetaWord* new_block = (MetaWord*) list_at(word_size).get_chunk_at_head();
 401       return new_block;
 402     } else {
 403       return NULL;
 404     }
 405   }
 406   void return_block(Metablock* free_chunk, size_t word_size) {
 407     list_at(word_size).return_chunk_at_head(free_chunk, false);
 408     assert(list_at(word_size).count() > 0, "Should have a chunk");
 409   }
 410 
 411   void print_on(outputStream* st) const {
 412     st->print_cr("SmallBlocks:");
 413     for (uint i = _small_block_min_size; i < _small_block_max_size; i++) {
 414       uint k = i - _small_block_min_size;


 435     }
 436     return _small_blocks;
 437   }
 438 
 439  public:
 440   BlockFreelist();
 441   ~BlockFreelist();
 442 
 443   // Get and return a block to the free list
 444   MetaWord* get_block(size_t word_size);
 445   void return_block(MetaWord* p, size_t word_size);
 446 
 447   size_t total_size() const  {
 448     size_t result = dictionary()->total_size();
 449     if (_small_blocks != NULL) {
 450       result = result + _small_blocks->total_size();
 451     }
 452     return result;
 453   }
 454 








 455   static size_t min_dictionary_size()   { return TreeChunk<Metablock, FreeList<Metablock> >::min_size(); }
 456   void print_on(outputStream* st) const;
 457 };
 458 
 459 // Helper for Occupancy Bitmap. A type trait to give an all-bits-are-one-unsigned constant.
 460 template <typename T> struct all_ones  { static const T value; };
 461 template <> struct all_ones <uint64_t> { static const uint64_t value = 0xFFFFFFFFFFFFFFFFULL; };
 462 template <> struct all_ones <uint32_t> { static const uint32_t value = 0xFFFFFFFF; };
 463 
 464 // The OccupancyMap is a bitmap which, for a given VirtualSpaceNode,
 465 // keeps information about
 466 // - where a chunk starts
 467 // - whether a chunk is in-use or free
 468 // A bit in this bitmap represents one range of memory in the smallest
 469 // chunk size (SpecializedChunk or ClassSpecializedChunk).
 470 class OccupancyMap : public CHeapObj<mtInternal> {
 471 
 472   // The address range this map covers.
 473   const MetaWord* const _reference_address;
 474   const size_t _word_size;


 841   // Allocate a chunk from the virtual space and return it.
 842   Metachunk* get_chunk_vs(size_t chunk_word_size);
 843 
 844   // Expands/shrinks the committed space in a virtual space.  Delegates
 845   // to Virtualspace
 846   bool expand_by(size_t min_words, size_t preferred_words);
 847 
 848   // In preparation for deleting this node, remove all the chunks
 849   // in the node from any freelist.
 850   void purge(ChunkManager* chunk_manager);
 851 
 852   // If an allocation doesn't fit in the current node a new node is created.
 853   // Allocate chunks out of the remaining committed space in this node
 854   // to avoid wasting that memory.
 855   // This always adds up because all the chunk sizes are multiples of
 856   // the smallest chunk size.
 857   void retire(ChunkManager* chunk_manager);
 858 
 859 
 860   void print_on(outputStream* st) const;

 861   void print_map(outputStream* st, bool is_class) const;
 862 
 863   // Debug support
 864   DEBUG_ONLY(void mangle();)
 865   // Verify counters, all chunks in this list node and the occupancy map.
 866   DEBUG_ONLY(void verify();)
 867   // Verify that all free chunks in this node are ideally merged
 868   // (there not should be multiple small chunks where a large chunk could exist.)
 869   DEBUG_ONLY(void verify_free_chunks_are_ideally_merged();)
 870 
 871 };
 872 
 873 #define assert_is_aligned(value, alignment)                  \
 874   assert(is_aligned((value), (alignment)),                   \
 875          SIZE_FORMAT_HEX " is not aligned to "               \
 876          SIZE_FORMAT, (size_t)(uintptr_t)value, (alignment))
 877 






 878 // Decide if large pages should be committed when the memory is reserved.
 879 static bool should_commit_large_pages_when_reserving(size_t bytes) {
 880   if (UseLargePages && UseLargePagesInMetaspace && !os::can_commit_large_page_memory()) {
 881     size_t words = bytes / BytesPerWord;
 882     bool is_class = false; // We never reserve large pages for the class space.
 883     if (MetaspaceGC::can_expand(words, is_class) &&
 884         MetaspaceGC::allowed_expansion() >= words) {
 885       return true;
 886     }
 887   }
 888 
 889   return false;
 890 }
 891 
 892   // byte_size is the size of the associated virtualspace.
 893 VirtualSpaceNode::VirtualSpaceNode(bool is_class, size_t bytes) :
 894   _is_class(is_class), _top(NULL), _next(NULL), _rs(), _container_count(0), _occupancy_map(NULL) {
 895   assert_is_aligned(bytes, Metaspace::reserve_alignment());
 896   bool large_pages = should_commit_large_pages_when_reserving(bytes);
 897   _rs = ReservedSpace(bytes, Metaspace::reserve_alignment(), large_pages);


1165   bool initialization_succeeded() { return _virtual_space_list != NULL; }
1166 
1167   size_t reserved_words()  { return _reserved_words; }
1168   size_t reserved_bytes()  { return reserved_words() * BytesPerWord; }
1169   size_t committed_words() { return _committed_words; }
1170   size_t committed_bytes() { return committed_words() * BytesPerWord; }
1171 
1172   void inc_reserved_words(size_t v);
1173   void dec_reserved_words(size_t v);
1174   void inc_committed_words(size_t v);
1175   void dec_committed_words(size_t v);
1176   void inc_virtual_space_count();
1177   void dec_virtual_space_count();
1178 
1179   bool contains(const void* ptr);
1180 
1181   // Unlink empty VirtualSpaceNodes and free it.
1182   void purge(ChunkManager* chunk_manager);
1183 
1184   void print_on(outputStream* st) const;

1185   void print_map(outputStream* st) const;
1186 
1187   class VirtualSpaceListIterator : public StackObj {
1188     VirtualSpaceNode* _virtual_spaces;
1189    public:
1190     VirtualSpaceListIterator(VirtualSpaceNode* virtual_spaces) :
1191       _virtual_spaces(virtual_spaces) {}
1192 
1193     bool repeat() {
1194       return _virtual_spaces != NULL;
1195     }
1196 
1197     VirtualSpaceNode* get_next() {
1198       VirtualSpaceNode* result = _virtual_spaces;
1199       if (_virtual_spaces != NULL) {
1200         _virtual_spaces = _virtual_spaces->next();
1201       }
1202       return result;
1203     }
1204   };
1205 };
1206 
1207 class Metadebug : AllStatic {
1208   // Debugging support for Metaspaces
1209   static int _allocation_fail_alot_count;
1210 
1211  public:
1212 
1213   static void init_allocation_fail_alot_count();
1214 #ifdef ASSERT
1215   static bool test_metadata_failure();
1216 #endif
1217 };
1218 
1219 int Metadebug::_allocation_fail_alot_count = 0;
1220 

1221 //  SpaceManager - used by Metaspace to handle allocations
1222 class SpaceManager : public CHeapObj<mtClass> {
1223   friend class ClassLoaderMetaspace;
1224   friend class Metadebug;
1225 
1226  private:
1227 
1228   // protects allocations
1229   Mutex* const _lock;
1230 
1231   // Type of metadata allocated.
1232   const Metaspace::MetadataType   _mdtype;
1233 
1234   // Type of metaspace
1235   const Metaspace::MetaspaceType  _space_type;
1236 
1237   // List of chunks in use by this SpaceManager.  Allocations
1238   // are done from the current chunk.  The list is used for deallocating
1239   // chunks when the SpaceManager is freed.
1240   Metachunk* _chunks_in_use[NumberOfInUseLists];
1241   Metachunk* _current_chunk;
1242 
1243   // Maximum number of small chunks to allocate to a SpaceManager
1244   static uint const _small_chunk_limit;
1245 
1246   // Maximum number of specialize chunks to allocate for anonymous and delegating
1247   // metadata space to a SpaceManager
1248   static uint const _anon_and_delegating_metadata_specialize_chunk_limit;
1249 
1250   // Sum of all space in allocated chunks
1251   size_t _allocated_blocks_words;
1252 
1253   // Sum of all allocated chunks
1254   size_t _allocated_chunks_words;
1255   size_t _allocated_chunks_count;
1256 
1257   // Free lists of blocks are per SpaceManager since they
1258   // are assumed to be in chunks in use by the SpaceManager
1259   // and all chunks in use by a SpaceManager are freed when
1260   // the class loader using the SpaceManager is collected.
1261   BlockFreelist* _block_freelists;
1262 
1263  private:
1264   // Accessors
1265   Metachunk* chunks_in_use(ChunkIndex index) const { return _chunks_in_use[index]; }
1266   void set_chunks_in_use(ChunkIndex index, Metachunk* v) {
1267     _chunks_in_use[index] = v;
1268   }
1269 
1270   BlockFreelist* block_freelists() const { return _block_freelists; }
1271 
1272   Metaspace::MetadataType mdtype() { return _mdtype; }
1273 
1274   VirtualSpaceList* vs_list()   const { return Metaspace::get_space_list(_mdtype); }
1275   ChunkManager* chunk_manager() const { return Metaspace::get_chunk_manager(_mdtype); }
1276 
1277   Metachunk* current_chunk() const { return _current_chunk; }
1278   void set_current_chunk(Metachunk* v) {
1279     _current_chunk = v;
1280   }
1281 
1282   Metachunk* find_current_chunk(size_t word_size);
1283 
1284   // Add chunk to the list of chunks in use
1285   void add_chunk(Metachunk* v, bool make_current);
1286   void retire_current_chunk();
1287 
1288   Mutex* lock() const { return _lock; }
1289 






1290  protected:
1291   void initialize();
1292 
1293  public:
1294   SpaceManager(Metaspace::MetadataType mdtype,
1295                Metaspace::MetaspaceType space_type,
1296                Mutex* lock);
1297   ~SpaceManager();
1298 
1299   enum ChunkMultiples {
1300     MediumChunkMultiple = 4
1301   };
1302 
1303   static size_t specialized_chunk_size(bool is_class) { return is_class ? ClassSpecializedChunk : SpecializedChunk; }
1304   static size_t small_chunk_size(bool is_class)       { return is_class ? ClassSmallChunk : SmallChunk; }
1305   static size_t medium_chunk_size(bool is_class)      { return is_class ? ClassMediumChunk : MediumChunk; }
1306 
1307   static size_t smallest_chunk_size(bool is_class)    { return specialized_chunk_size(is_class); }
1308 
1309   // Accessors
1310   bool is_class() const { return _mdtype == Metaspace::ClassType; }
1311 
1312   size_t specialized_chunk_size() const { return specialized_chunk_size(is_class()); }
1313   size_t small_chunk_size()       const { return small_chunk_size(is_class()); }
1314   size_t medium_chunk_size()      const { return medium_chunk_size(is_class()); }
1315 
1316   size_t smallest_chunk_size()    const { return smallest_chunk_size(is_class()); }
1317 
1318   size_t medium_chunk_bunch()     const { return medium_chunk_size() * MediumChunkMultiple; }
1319 
1320   size_t allocated_blocks_words() const { return _allocated_blocks_words; }
1321   size_t allocated_blocks_bytes() const { return _allocated_blocks_words * BytesPerWord; }
1322   size_t allocated_chunks_words() const { return _allocated_chunks_words; }
1323   size_t allocated_chunks_bytes() const { return _allocated_chunks_words * BytesPerWord; }
1324   size_t allocated_chunks_count() const { return _allocated_chunks_count; }
1325 
1326   bool is_humongous(size_t word_size) { return word_size > medium_chunk_size(); }
1327 
1328   // Increment the per Metaspace and global running sums for Metachunks
1329   // by the given size.  This is used when a Metachunk to added to
1330   // the in-use list.
1331   void inc_size_metrics(size_t words);
1332   // Increment the per Metaspace and global running sums Metablocks by the given
1333   // size.  This is used when a Metablock is allocated.
1334   void inc_used_metrics(size_t words);
1335   // Delete the portion of the running sums for this SpaceManager. That is,
1336   // the globals running sums for the Metachunks and Metablocks are
1337   // decremented for all the Metachunks in-use by this SpaceManager.
1338   void dec_total_from_size_metrics();
1339 
1340   // Adjust the initial chunk size to match one of the fixed chunk list sizes,
1341   // or return the unadjusted size if the requested size is humongous.
1342   static size_t adjust_initial_chunk_size(size_t requested, bool is_class_space);
1343   size_t adjust_initial_chunk_size(size_t requested) const;
1344 
1345   // Get the initial chunks size for this metaspace type.
1346   size_t get_initial_chunk_size(Metaspace::MetaspaceType type) const;
1347 
1348   size_t sum_capacity_in_chunks_in_use() const;
1349   size_t sum_used_in_chunks_in_use() const;
1350   size_t sum_free_in_chunks_in_use() const;
1351   size_t sum_waste_in_chunks_in_use() const;
1352   size_t sum_waste_in_chunks_in_use(ChunkIndex index ) const;
1353 
1354   size_t sum_count_in_chunks_in_use();
1355   size_t sum_count_in_chunks_in_use(ChunkIndex i);
1356 
1357   Metachunk* get_new_chunk(size_t chunk_word_size);
1358 
1359   // Block allocation and deallocation.
1360   // Allocates a block from the current chunk
1361   MetaWord* allocate(size_t word_size);
1362 
1363   // Helper for allocations
1364   MetaWord* allocate_work(size_t word_size);
1365 
1366   // Returns a block to the per manager freelist
1367   void deallocate(MetaWord* p, size_t word_size);
1368 
1369   // Based on the allocation size and a minimum chunk size,
1370   // returned chunk size (for expanding space for chunk allocation).
1371   size_t calc_chunk_size(size_t allocation_word_size);
1372 
1373   // Called when an allocation from the current chunk fails.
1374   // Gets a new chunk (may require getting a new virtual space),
1375   // and allocates from that chunk.
1376   MetaWord* grow_and_allocate(size_t word_size);
1377 
1378   // Notify memory usage to MemoryService.
1379   void track_metaspace_memory_usage();
1380 
1381   // debugging support.
1382 
1383   void dump(outputStream* const out) const;
1384   void print_on(outputStream* st) const;
1385   void locked_print_chunks_in_use_on(outputStream* st) const;
1386 
1387   void verify();
1388   void verify_chunk_size(Metachunk* chunk);
1389 #ifdef ASSERT
1390   void verify_allocated_blocks_words();
1391 #endif
1392 
1393   // This adjusts the size given to be greater than the minimum allocation size in
1394   // words for data in metaspace.  Esentially the minimum size is currently 3 words.
1395   size_t get_allocation_word_size(size_t word_size) {
1396     size_t byte_size = word_size * BytesPerWord;
1397 
1398     size_t raw_bytes_size = MAX2(byte_size, sizeof(Metablock));
1399     raw_bytes_size = align_up(raw_bytes_size, Metachunk::object_alignment());
1400 
1401     size_t raw_word_size = raw_bytes_size / BytesPerWord;
1402     assert(raw_word_size * BytesPerWord == raw_bytes_size, "Size problem");
1403 
1404     return raw_word_size;
1405   }







1406 };
1407 
1408 uint const SpaceManager::_small_chunk_limit = 4;
1409 uint const SpaceManager::_anon_and_delegating_metadata_specialize_chunk_limit = 4;
1410 
1411 void VirtualSpaceNode::inc_container_count() {
1412   assert_lock_strong(MetaspaceExpand_lock);
1413   _container_count++;
1414 }
1415 
1416 void VirtualSpaceNode::dec_container_count() {
1417   assert_lock_strong(MetaspaceExpand_lock);
1418   _container_count--;
1419 }
1420 
1421 #ifdef ASSERT
1422 void VirtualSpaceNode::verify_container_count() {
1423   assert(_container_count == container_count_slow(),
1424          "Inconsistency in container_count _container_count " UINTX_FORMAT
1425          " container_count_slow() " UINTX_FORMAT, _container_count, container_count_slow());


1686 }
1687 
1688 
1689 // Expand the virtual space (commit more of the reserved space)
1690 bool VirtualSpaceNode::expand_by(size_t min_words, size_t preferred_words) {
1691   size_t min_bytes = min_words * BytesPerWord;
1692   size_t preferred_bytes = preferred_words * BytesPerWord;
1693 
1694   size_t uncommitted = virtual_space()->reserved_size() - virtual_space()->actual_committed_size();
1695 
1696   if (uncommitted < min_bytes) {
1697     return false;
1698   }
1699 
1700   size_t commit = MIN2(preferred_bytes, uncommitted);
1701   bool result = virtual_space()->expand_by(commit, false);
1702 
1703   if (result) {
1704     log_trace(gc, metaspace, freelist)("Expanded %s virtual space list node by " SIZE_FORMAT " words.",
1705               (is_class() ? "class" : "non-class"), commit);

1706   } else {
1707     log_trace(gc, metaspace, freelist)("Failed to expand %s virtual space list node by " SIZE_FORMAT " words.",
1708               (is_class() ? "class" : "non-class"), commit);
1709   }
1710 
1711   assert(result, "Failed to commit memory");
1712 
1713   return result;
1714 }
1715 
1716 Metachunk* VirtualSpaceNode::get_chunk_vs(size_t chunk_word_size) {
1717   assert_lock_strong(MetaspaceExpand_lock);
1718   Metachunk* result = take_from_committed(chunk_word_size);
1719   return result;
1720 }
1721 
1722 bool VirtualSpaceNode::initialize() {
1723 
1724   if (!_rs.is_reserved()) {
1725     return false;


1746     set_reserved(MemRegion((HeapWord*)_rs.base(),
1747                  (HeapWord*)(_rs.base() + _rs.size())));
1748 
1749     assert(reserved()->start() == (HeapWord*) _rs.base(),
1750            "Reserved start was not set properly " PTR_FORMAT
1751            " != " PTR_FORMAT, p2i(reserved()->start()), p2i(_rs.base()));
1752     assert(reserved()->word_size() == _rs.size() / BytesPerWord,
1753            "Reserved size was not set properly " SIZE_FORMAT
1754            " != " SIZE_FORMAT, reserved()->word_size(),
1755            _rs.size() / BytesPerWord);
1756   }
1757 
1758   // Initialize Occupancy Map.
1759   const size_t smallest_chunk_size = is_class() ? ClassSpecializedChunk : SpecializedChunk;
1760   _occupancy_map = new OccupancyMap(bottom(), reserved_words(), smallest_chunk_size);
1761 
1762   return result;
1763 }
1764 
1765 void VirtualSpaceNode::print_on(outputStream* st) const {
1766   size_t used = used_words_in_vs();
1767   size_t capacity = capacity_words_in_vs();





1768   VirtualSpace* vs = virtual_space();
1769   st->print_cr("   space @ " PTR_FORMAT " " SIZE_FORMAT "K, " SIZE_FORMAT_W(3) "%% used "
1770            "[" PTR_FORMAT ", " PTR_FORMAT ", "








1771            PTR_FORMAT ", " PTR_FORMAT ")",
1772            p2i(vs), capacity / K,
1773            capacity == 0 ? 0 : used * 100 / capacity,
1774            p2i(bottom()), p2i(top()), p2i(end()),
1775            p2i(vs->high_boundary()));
1776 }
1777 
1778 #ifdef ASSERT
1779 void VirtualSpaceNode::mangle() {
1780   size_t word_size = capacity_words_in_vs();
1781   Copy::fill_to_words((HeapWord*) low(), word_size, 0xf1f1f1f1);
1782 }
1783 #endif // ASSERT
1784 
1785 // VirtualSpaceList methods
1786 // Space allocated from the VirtualSpace
1787 
1788 VirtualSpaceList::~VirtualSpaceList() {
1789   VirtualSpaceListIterator iter(virtual_space_list());
1790   while (iter.repeat()) {
1791     VirtualSpaceNode* vsl = iter.get_next();
1792     delete vsl;
1793   }


1975 // Walk the list of VirtualSpaceNodes and delete
1976 // nodes with a 0 container_count.  Remove Metachunks in
1977 // the node from their respective freelists.
1978 void VirtualSpaceList::purge(ChunkManager* chunk_manager) {
1979   assert(SafepointSynchronize::is_at_safepoint(), "must be called at safepoint for contains to work");
1980   assert_lock_strong(MetaspaceExpand_lock);
1981   // Don't use a VirtualSpaceListIterator because this
1982   // list is being changed and a straightforward use of an iterator is not safe.
1983   VirtualSpaceNode* purged_vsl = NULL;
1984   VirtualSpaceNode* prev_vsl = virtual_space_list();
1985   VirtualSpaceNode* next_vsl = prev_vsl;
1986   while (next_vsl != NULL) {
1987     VirtualSpaceNode* vsl = next_vsl;
1988     DEBUG_ONLY(vsl->verify_container_count();)
1989     next_vsl = vsl->next();
1990     // Don't free the current virtual space since it will likely
1991     // be needed soon.
1992     if (vsl->container_count() == 0 && vsl != current_virtual_space()) {
1993       log_trace(gc, metaspace, freelist)("Purging VirtualSpaceNode " PTR_FORMAT " (capacity: " SIZE_FORMAT
1994                                          ", used: " SIZE_FORMAT ").", p2i(vsl), vsl->capacity_words_in_vs(), vsl->used_words_in_vs());

1995       // Unlink it from the list
1996       if (prev_vsl == vsl) {
1997         // This is the case of the current node being the first node.
1998         assert(vsl == virtual_space_list(), "Expected to be the first node");
1999         set_virtual_space_list(vsl->next());
2000       } else {
2001         prev_vsl->set_next(vsl->next());
2002       }
2003 
2004       vsl->purge(chunk_manager);
2005       dec_reserved_words(vsl->reserved_words());
2006       dec_committed_words(vsl->committed_words());
2007       dec_virtual_space_count();
2008       purged_vsl = vsl;
2009       delete vsl;
2010     } else {
2011       prev_vsl = vsl;
2012     }
2013   }
2014 #ifdef ASSERT


2122   if (vs_word_size == 0) {
2123     assert(false, "vs_word_size should always be at least _reserve_alignment large.");
2124     return false;
2125   }
2126 
2127   // Reserve the space
2128   size_t vs_byte_size = vs_word_size * BytesPerWord;
2129   assert_is_aligned(vs_byte_size, Metaspace::reserve_alignment());
2130 
2131   // Allocate the meta virtual space and initialize it.
2132   VirtualSpaceNode* new_entry = new VirtualSpaceNode(is_class(), vs_byte_size);
2133   if (!new_entry->initialize()) {
2134     delete new_entry;
2135     return false;
2136   } else {
2137     assert(new_entry->reserved_words() == vs_word_size,
2138         "Reserved memory size differs from requested memory size");
2139     // ensure lock-free iteration sees fully initialized node
2140     OrderAccess::storestore();
2141     link_vs(new_entry);

2142     return true;
2143   }
2144 }
2145 
2146 void VirtualSpaceList::link_vs(VirtualSpaceNode* new_entry) {
2147   if (virtual_space_list() == NULL) {
2148       set_virtual_space_list(new_entry);
2149   } else {
2150     current_virtual_space()->set_next(new_entry);
2151   }
2152   set_current_virtual_space(new_entry);
2153   inc_reserved_words(new_entry->reserved_words());
2154   inc_committed_words(new_entry->committed_words());
2155   inc_virtual_space_count();
2156 #ifdef ASSERT
2157   new_entry->mangle();
2158 #endif
2159   LogTarget(Trace, gc, metaspace) lt;
2160   if (lt.is_enabled()) {
2161     LogStream ls(lt);


2271   // additional reqired padding chunks.
2272   const size_t size_for_padding = largest_possible_padding_size_for_chunk(chunk_word_size, this->is_class());
2273 
2274   size_t min_word_size       = align_up(chunk_word_size + size_for_padding, Metaspace::commit_alignment_words());
2275   size_t preferred_word_size = align_up(suggested_commit_granularity, Metaspace::commit_alignment_words());
2276   if (min_word_size >= preferred_word_size) {
2277     // Can happen when humongous chunks are allocated.
2278     preferred_word_size = min_word_size;
2279   }
2280 
2281   bool expanded = expand_by(min_word_size, preferred_word_size);
2282   if (expanded) {
2283     next = current_virtual_space()->get_chunk_vs(chunk_word_size);
2284     assert(next != NULL, "The allocation was expected to succeed after the expansion");
2285   }
2286 
2287    return next;
2288 }
2289 
2290 void VirtualSpaceList::print_on(outputStream* st) const {






2291   VirtualSpaceListIterator iter(virtual_space_list());
2292   while (iter.repeat()) {

2293     VirtualSpaceNode* node = iter.get_next();
2294     node->print_on(st);
2295   }
2296 }
2297 
2298 void VirtualSpaceList::print_map(outputStream* st) const {
2299   VirtualSpaceNode* list = virtual_space_list();
2300   VirtualSpaceListIterator iter(list);
2301   unsigned i = 0;
2302   while (iter.repeat()) {
2303     st->print_cr("Node %u:", i);
2304     VirtualSpaceNode* node = iter.get_next();
2305     node->print_map(st, this->is_class());
2306     i ++;
2307   }
2308 }
2309 
2310 // MetaspaceGC methods
2311 
2312 // VM_CollectForMetadataAllocation is the vm operation used to GC.
2313 // Within the VM operation after the GC the attempt to allocate the metadata
2314 // should succeed.  If the GC did not free enough space for the metaspace


3055       size_chunks_returned += cur->word_size();
3056     }
3057     return_single_chunk(index, cur);
3058     cur = next;
3059   }
3060   if (log.is_enabled()) { // tracing
3061     log.print("returned %u %s chunks to freelist, total word size " SIZE_FORMAT ".",
3062         num_chunks_returned, chunk_size_name(index), size_chunks_returned);
3063     if (index != HumongousIndex) {
3064       log.print("updated freelist count: " SIZE_FORMAT ".", free_chunks(index)->size());
3065     } else {
3066       log.print("updated dictionary count " SIZE_FORMAT ".", _humongous_dictionary.total_count());
3067     }
3068   }
3069 }
3070 
3071 void ChunkManager::print_on(outputStream* out) const {
3072   _humongous_dictionary.report_statistics(out);
3073 }
3074 
3075 void ChunkManager::locked_get_statistics(ChunkManagerStatistics* stat) const {
3076   assert_lock_strong(MetaspaceExpand_lock);
3077   for (ChunkIndex i = ZeroIndex; i < NumberOfFreeLists; i = next_chunk_index(i)) {
3078     stat->num_by_type[i] = num_free_chunks(i);
3079     stat->single_size_by_type[i] = size_by_index(i);
3080     stat->total_size_by_type[i] = size_free_chunks_in_bytes(i);
3081   }
3082   stat->num_humongous_chunks = num_free_chunks(HumongousIndex);
3083   stat->total_size_humongous_chunks = size_free_chunks_in_bytes(HumongousIndex);
3084 }
3085 
3086 void ChunkManager::get_statistics(ChunkManagerStatistics* stat) const {
3087   MutexLockerEx cl(MetaspaceExpand_lock,
3088                    Mutex::_no_safepoint_check_flag);
3089   locked_get_statistics(stat);
3090 }
3091 
3092 void ChunkManager::print_statistics(const ChunkManagerStatistics* stat, outputStream* out, size_t scale) {
3093   size_t total = 0;
3094   assert(scale == 1 || scale == K || scale == M || scale == G, "Invalid scale");
3095 
3096   const char* unit = scale_unit(scale);
3097   for (ChunkIndex i = ZeroIndex; i < NumberOfFreeLists; i = next_chunk_index(i)) {
3098     out->print("  " SIZE_FORMAT " %s (" SIZE_FORMAT " bytes) chunks, total ",
3099                    stat->num_by_type[i], chunk_size_name(i),
3100                    stat->single_size_by_type[i]);
3101     if (scale == 1) {
3102       out->print_cr(SIZE_FORMAT " bytes", stat->total_size_by_type[i]);
3103     } else {
3104       out->print_cr("%.2f%s", (float)stat->total_size_by_type[i] / scale, unit);
3105     }
3106 
3107     total += stat->total_size_by_type[i];
3108   }
3109 
3110 
3111   total += stat->total_size_humongous_chunks;
3112 
3113   if (scale == 1) {
3114     out->print_cr("  " SIZE_FORMAT " humongous chunks, total " SIZE_FORMAT " bytes",
3115     stat->num_humongous_chunks, stat->total_size_humongous_chunks);
3116 
3117     out->print_cr("  total size: " SIZE_FORMAT " bytes.", total);
3118   } else {
3119     out->print_cr("  " SIZE_FORMAT " humongous chunks, total %.2f%s",
3120     stat->num_humongous_chunks,
3121     (float)stat->total_size_humongous_chunks / scale, unit);
3122 
3123     out->print_cr("  total size: %.2f%s.", (float)total / scale, unit);
3124   }
3125 
3126 }
3127 
3128 void ChunkManager::print_all_chunkmanagers(outputStream* out, size_t scale) {
3129   assert(scale == 1 || scale == K || scale == M || scale == G, "Invalid scale");
3130 
3131   // Note: keep lock protection only to retrieving statistics; keep printing
3132   // out of lock protection
3133   ChunkManagerStatistics stat;
3134   out->print_cr("Chunkmanager (non-class):");
3135   const ChunkManager* const non_class_cm = Metaspace::chunk_manager_metadata();
3136   if (non_class_cm != NULL) {
3137     non_class_cm->get_statistics(&stat);
3138     ChunkManager::print_statistics(&stat, out, scale);
3139   } else {
3140     out->print_cr("unavailable.");
3141   }
3142   out->print_cr("Chunkmanager (class):");
3143   const ChunkManager* const class_cm = Metaspace::chunk_manager_class();
3144   if (class_cm != NULL) {
3145     class_cm->get_statistics(&stat);
3146     ChunkManager::print_statistics(&stat, out, scale);
3147   } else {
3148     out->print_cr("unavailable.");
3149   }
3150 }
3151 
3152 // SpaceManager methods
3153 
3154 size_t SpaceManager::adjust_initial_chunk_size(size_t requested, bool is_class_space) {
3155   size_t chunk_sizes[] = {
3156       specialized_chunk_size(is_class_space),
3157       small_chunk_size(is_class_space),
3158       medium_chunk_size(is_class_space)
3159   };
3160 
3161   // Adjust up to one of the fixed chunk sizes ...
3162   for (size_t i = 0; i < ARRAY_SIZE(chunk_sizes); i++) {
3163     if (requested <= chunk_sizes[i]) {
3164       return chunk_sizes[i];
3165     }
3166   }
3167 
3168   // ... or return the size as a humongous chunk.


3184     default:                                 requested = ClassSmallChunk; break;
3185     }
3186   } else {
3187     switch (type) {
3188     case Metaspace::BootMetaspaceType:       requested = Metaspace::first_chunk_word_size(); break;
3189     case Metaspace::AnonymousMetaspaceType:  requested = SpecializedChunk; break;
3190     case Metaspace::ReflectionMetaspaceType: requested = SpecializedChunk; break;
3191     default:                                 requested = SmallChunk; break;
3192     }
3193   }
3194 
3195   // Adjust to one of the fixed chunk sizes (unless humongous)
3196   const size_t adjusted = adjust_initial_chunk_size(requested);
3197 
3198   assert(adjusted != 0, "Incorrect initial chunk size. Requested: "
3199          SIZE_FORMAT " adjusted: " SIZE_FORMAT, requested, adjusted);
3200 
3201   return adjusted;
3202 }
3203 
3204 size_t SpaceManager::sum_free_in_chunks_in_use() const {
3205   MutexLockerEx cl(lock(), Mutex::_no_safepoint_check_flag);
3206   size_t free = 0;
3207   for (ChunkIndex i = ZeroIndex; i < NumberOfInUseLists; i = next_chunk_index(i)) {
3208     Metachunk* chunk = chunks_in_use(i);
3209     while (chunk != NULL) {
3210       free += chunk->free_word_size();
3211       chunk = chunk->next();
3212     }
3213   }
3214   return free;
3215 }
3216 
3217 size_t SpaceManager::sum_waste_in_chunks_in_use() const {
3218   MutexLockerEx cl(lock(), Mutex::_no_safepoint_check_flag);
3219   size_t result = 0;
3220   for (ChunkIndex i = ZeroIndex; i < NumberOfInUseLists; i = next_chunk_index(i)) {
3221    result += sum_waste_in_chunks_in_use(i);
3222   }
3223 
3224   return result;
3225 }
3226 
3227 size_t SpaceManager::sum_waste_in_chunks_in_use(ChunkIndex index) const {
3228   size_t result = 0;
3229   Metachunk* chunk = chunks_in_use(index);
3230   // Count the free space in all the chunk but not the
3231   // current chunk from which allocations are still being done.
3232   while (chunk != NULL) {
3233     if (chunk != current_chunk()) {
3234       result += chunk->free_word_size();
3235     }
3236     chunk = chunk->next();
3237   }
3238   return result;
3239 }
3240 
3241 size_t SpaceManager::sum_capacity_in_chunks_in_use() const {
3242   // For CMS use "allocated_chunks_words()" which does not need the
3243   // Metaspace lock.  For the other collectors sum over the
3244   // lists.  Use both methods as a check that "allocated_chunks_words()"
3245   // is correct.  That is, sum_capacity_in_chunks() is too expensive
3246   // to use in the product and allocated_chunks_words() should be used
3247   // but allow for  checking that allocated_chunks_words() returns the same
3248   // value as sum_capacity_in_chunks_in_use() which is the definitive
3249   // answer.
3250   if (UseConcMarkSweepGC) {
3251     return allocated_chunks_words();
3252   } else {
3253     MutexLockerEx cl(lock(), Mutex::_no_safepoint_check_flag);
3254     size_t sum = 0;
3255     for (ChunkIndex i = ZeroIndex; i < NumberOfInUseLists; i = next_chunk_index(i)) {
3256       Metachunk* chunk = chunks_in_use(i);
3257       while (chunk != NULL) {
3258         sum += chunk->word_size();
3259         chunk = chunk->next();
3260       }
3261     }
3262   return sum;
3263   }
3264 }
3265 
3266 size_t SpaceManager::sum_count_in_chunks_in_use() {
3267   size_t count = 0;
3268   for (ChunkIndex i = ZeroIndex; i < NumberOfInUseLists; i = next_chunk_index(i)) {
3269     count = count + sum_count_in_chunks_in_use(i);
3270   }
3271 
3272   return count;
3273 }
3274 
3275 size_t SpaceManager::sum_count_in_chunks_in_use(ChunkIndex i) {
3276   size_t count = 0;
3277   Metachunk* chunk = chunks_in_use(i);
3278   while (chunk != NULL) {
3279     count++;
3280     chunk = chunk->next();
3281   }
3282   return count;
3283 }
3284 
3285 
3286 size_t SpaceManager::sum_used_in_chunks_in_use() const {
3287   MutexLockerEx cl(lock(), Mutex::_no_safepoint_check_flag);
3288   size_t used = 0;
3289   for (ChunkIndex i = ZeroIndex; i < NumberOfInUseLists; i = next_chunk_index(i)) {
3290     Metachunk* chunk = chunks_in_use(i);
3291     while (chunk != NULL) {
3292       used += chunk->used_word_size();
3293       chunk = chunk->next();
3294     }
3295   }
3296   return used;
3297 }
3298 
3299 void SpaceManager::locked_print_chunks_in_use_on(outputStream* st) const {
3300 
3301   for (ChunkIndex i = ZeroIndex; i < NumberOfInUseLists; i = next_chunk_index(i)) {
3302     Metachunk* chunk = chunks_in_use(i);
3303     st->print("SpaceManager: %s " PTR_FORMAT,
3304                  chunk_size_name(i), p2i(chunk));
3305     if (chunk != NULL) {
3306       st->print_cr(" free " SIZE_FORMAT,
3307                    chunk->free_word_size());
3308     } else {
3309       st->cr();
3310     }
3311   }
3312 
3313   chunk_manager()->locked_print_free_chunks(st);
3314   chunk_manager()->locked_print_sum_free_chunks(st);
3315 }
3316 
3317 size_t SpaceManager::calc_chunk_size(size_t word_size) {
3318 


3400   size_t chunk_word_size = calc_chunk_size(word_size);
3401   Metachunk* next = get_new_chunk(chunk_word_size);
3402 
3403   MetaWord* mem = NULL;
3404 
3405   // If a chunk was available, add it to the in-use chunk list
3406   // and do an allocation from it.
3407   if (next != NULL) {
3408     // Add to this manager's list of chunks in use.
3409     add_chunk(next, false);
3410     mem = next->allocate(word_size);
3411   }
3412 
3413   // Track metaspace memory usage statistic.
3414   track_metaspace_memory_usage();
3415 
3416   return mem;
3417 }
3418 
3419 void SpaceManager::print_on(outputStream* st) const {
3420 
3421   for (ChunkIndex i = ZeroIndex;
3422        i < NumberOfInUseLists ;
3423        i = next_chunk_index(i) ) {
3424     st->print_cr("  chunks_in_use " PTR_FORMAT " chunk size " SIZE_FORMAT,
3425                  p2i(chunks_in_use(i)),
3426                  chunks_in_use(i) == NULL ? 0 : chunks_in_use(i)->word_size());
3427   }
3428   st->print_cr("    waste:  Small " SIZE_FORMAT " Medium " SIZE_FORMAT
3429                " Humongous " SIZE_FORMAT,
3430                sum_waste_in_chunks_in_use(SmallIndex),
3431                sum_waste_in_chunks_in_use(MediumIndex),
3432                sum_waste_in_chunks_in_use(HumongousIndex));
3433   // block free lists
3434   if (block_freelists() != NULL) {
3435     st->print_cr("total in block free lists " SIZE_FORMAT,
3436       block_freelists()->total_size());
3437   }
3438 }
3439 
3440 SpaceManager::SpaceManager(Metaspace::MetadataType mdtype,
3441                            Metaspace::MetaspaceType space_type,
3442                            Mutex* lock) :
3443   _mdtype(mdtype),
3444   _space_type(space_type),
3445   _allocated_blocks_words(0),
3446   _allocated_chunks_words(0),
3447   _allocated_chunks_count(0),
3448   _block_freelists(NULL),
3449   _lock(lock)
3450 {
3451   initialize();
3452 }
3453 
3454 void SpaceManager::inc_size_metrics(size_t words) {
3455   assert_lock_strong(MetaspaceExpand_lock);
3456   // Total of allocated Metachunks and allocated Metachunks count
3457   // for each SpaceManager
3458   _allocated_chunks_words = _allocated_chunks_words + words;
3459   _allocated_chunks_count++;

3460   // Global total of capacity in allocated Metachunks
3461   MetaspaceUtils::inc_capacity(mdtype(), words);
3462   // Global total of allocated Metablocks.
3463   // used_words_slow() includes the overhead in each
3464   // Metachunk so include it in the used when the
3465   // Metachunk is first added (so only added once per
3466   // Metachunk).
3467   MetaspaceUtils::inc_used(mdtype(), Metachunk::overhead());
3468 }
3469 
3470 void SpaceManager::inc_used_metrics(size_t words) {
3471   // Add to the per SpaceManager total
3472   Atomic::add(words, &_allocated_blocks_words);
3473   // Add to the global total
3474   MetaspaceUtils::inc_used(mdtype(), words);
3475 }
3476 
3477 void SpaceManager::dec_total_from_size_metrics() {
3478   MetaspaceUtils::dec_capacity(mdtype(), allocated_chunks_words());
3479   MetaspaceUtils::dec_used(mdtype(), allocated_blocks_words());
3480   // Also deduct the overhead per Metachunk
3481   MetaspaceUtils::dec_used(mdtype(), allocated_chunks_count() * Metachunk::overhead());
3482 }
3483 
3484 void SpaceManager::initialize() {
3485   Metadebug::init_allocation_fail_alot_count();
3486   for (ChunkIndex i = ZeroIndex; i < NumberOfInUseLists; i = next_chunk_index(i)) {
3487     _chunks_in_use[i] = NULL;
3488   }
3489   _current_chunk = NULL;
3490   log_trace(gc, metaspace, freelist)("SpaceManager(): " PTR_FORMAT, p2i(this));
3491 }
3492 
3493 SpaceManager::~SpaceManager() {

3494   // This call this->_lock which can't be done while holding MetaspaceExpand_lock
3495   assert(sum_capacity_in_chunks_in_use() == allocated_chunks_words(),
3496          "sum_capacity_in_chunks_in_use() " SIZE_FORMAT
3497          " allocated_chunks_words() " SIZE_FORMAT,
3498          sum_capacity_in_chunks_in_use(), allocated_chunks_words());
3499 
3500   MutexLockerEx fcl(MetaspaceExpand_lock,
3501                     Mutex::_no_safepoint_check_flag);
3502 
3503   assert(sum_count_in_chunks_in_use() == allocated_chunks_count(),
3504          "sum_count_in_chunks_in_use() " SIZE_FORMAT
3505          " allocated_chunks_count() " SIZE_FORMAT,
3506          sum_count_in_chunks_in_use(), allocated_chunks_count());
3507 
3508   chunk_manager()->slow_locked_verify();
3509 
3510   dec_total_from_size_metrics();
3511 
3512   Log(gc, metaspace, freelist) log;
3513   if (log.is_trace()) {
3514     log.trace("~SpaceManager(): " PTR_FORMAT, p2i(this));
3515     ResourceMark rm;
3516     LogStream ls(log.trace());
3517     locked_print_chunks_in_use_on(&ls);
3518     if (block_freelists() != NULL) {
3519       block_freelists()->print_on(&ls);
3520     }
3521   }
3522 
3523   // Add all the chunks in use by this space manager
3524   // to the global list of free chunks.
3525 
3526   // Follow each list of chunks-in-use and add them to the
3527   // free lists.  Each list is NULL terminated.
3528 
3529   for (ChunkIndex i = ZeroIndex; i <= HumongousIndex; i = next_chunk_index(i)) {
3530     Metachunk* chunks = chunks_in_use(i);
3531     chunk_manager()->return_chunk_list(i, chunks);
3532     set_chunks_in_use(i, NULL);
3533   }
3534 
3535   chunk_manager()->slow_locked_verify();
3536 
3537   if (_block_freelists != NULL) {
3538     delete _block_freelists;
3539   }
3540 }
3541 
3542 void SpaceManager::deallocate(MetaWord* p, size_t word_size) {
3543   assert_lock_strong(_lock);
3544   // Allocations and deallocations are in raw_word_size
3545   size_t raw_word_size = get_allocation_word_size(word_size);
3546   // Lazily create a block_freelist
3547   if (block_freelists() == NULL) {
3548     _block_freelists = new BlockFreelist();
3549   }
3550   block_freelists()->return_block(p, raw_word_size);
3551 }
3552 
3553 // Adds a chunk to the list of chunks in use.
3554 void SpaceManager::add_chunk(Metachunk* new_chunk, bool make_current) {
3555 
3556   assert(new_chunk != NULL, "Should not be NULL");
3557   assert(new_chunk->next() == NULL, "Should not be on a list");
3558 
3559   new_chunk->reset_empty();
3560 
3561   // Find the correct list and and set the current
3562   // chunk for that list.
3563   ChunkIndex index = chunk_manager()->list_index(new_chunk->word_size());


3574     if (make_current) {
3575       // Set as the current chunk but otherwise treat as a humongous chunk.
3576       set_current_chunk(new_chunk);
3577     }
3578     // Link at head.  The _current_chunk only points to a humongous chunk for
3579     // the null class loader metaspace (class and data virtual space managers)
3580     // any humongous chunks so will not point to the tail
3581     // of the humongous chunks list.
3582     new_chunk->set_next(chunks_in_use(HumongousIndex));
3583     set_chunks_in_use(HumongousIndex, new_chunk);
3584 
3585     assert(new_chunk->word_size() > medium_chunk_size(), "List inconsistency");
3586   }
3587 
3588   // Add to the running sum of capacity
3589   inc_size_metrics(new_chunk->word_size());
3590 
3591   assert(new_chunk->is_empty(), "Not ready for reuse");
3592   Log(gc, metaspace, freelist) log;
3593   if (log.is_trace()) {
3594     log.trace("SpaceManager::add_chunk: " SIZE_FORMAT ") ", sum_count_in_chunks_in_use());
3595     ResourceMark rm;
3596     LogStream ls(log.trace());
3597     new_chunk->print_on(&ls);
3598     chunk_manager()->locked_print_free_chunks(&ls);
3599   }
3600 }
3601 
3602 void SpaceManager::retire_current_chunk() {
3603   if (current_chunk() != NULL) {
3604     size_t remaining_words = current_chunk()->free_word_size();
3605     if (remaining_words >= BlockFreelist::min_dictionary_size()) {
3606       MetaWord* ptr = current_chunk()->allocate(remaining_words);
3607       deallocate(ptr, remaining_words);
3608       inc_used_metrics(remaining_words);
3609     }
3610   }
3611 }
3612 
3613 Metachunk* SpaceManager::get_new_chunk(size_t chunk_word_size) {
3614   // Get a chunk from the chunk freelist


3623   if (log.is_debug() && next != NULL &&
3624       SpaceManager::is_humongous(next->word_size())) {
3625     log.debug("  new humongous chunk word size " PTR_FORMAT, next->word_size());
3626   }
3627 
3628   return next;
3629 }
3630 
3631 MetaWord* SpaceManager::allocate(size_t word_size) {
3632   MutexLockerEx cl(lock(), Mutex::_no_safepoint_check_flag);
3633   size_t raw_word_size = get_allocation_word_size(word_size);
3634   BlockFreelist* fl =  block_freelists();
3635   MetaWord* p = NULL;
3636   // Allocation from the dictionary is expensive in the sense that
3637   // the dictionary has to be searched for a size.  Don't allocate
3638   // from the dictionary until it starts to get fat.  Is this
3639   // a reasonable policy?  Maybe an skinny dictionary is fast enough
3640   // for allocations.  Do some profiling.  JJJ
3641   if (fl != NULL && fl->total_size() > allocation_from_dictionary_limit) {
3642     p = fl->get_block(raw_word_size);



3643   }
3644   if (p == NULL) {
3645     p = allocate_work(raw_word_size);
3646   }
3647 
3648   return p;
3649 }
3650 
3651 // Returns the address of spaced allocated for "word_size".
3652 // This methods does not know about blocks (Metablocks)
3653 MetaWord* SpaceManager::allocate_work(size_t word_size) {
3654   assert_lock_strong(_lock);
3655 #ifdef ASSERT
3656   if (Metadebug::test_metadata_failure()) {
3657     return NULL;
3658   }
3659 #endif
3660   // Is there space in the current chunk?
3661   MetaWord* result = NULL;
3662 
3663   if (current_chunk() != NULL) {
3664     result = current_chunk()->allocate(word_size);
3665   }
3666 
3667   if (result == NULL) {
3668     result = grow_and_allocate(word_size);
3669   }
3670 
3671   if (result != NULL) {
3672     inc_used_metrics(word_size);
3673     assert(result != (MetaWord*) chunks_in_use(MediumIndex),
3674            "Head of the list is being allocated");


3680 void SpaceManager::verify() {
3681   for (ChunkIndex i = ZeroIndex; i < NumberOfInUseLists; i = next_chunk_index(i)) {
3682     Metachunk* curr = chunks_in_use(i);
3683     while (curr != NULL) {
3684       DEBUG_ONLY(do_verify_chunk(curr);)
3685       assert(curr->is_tagged_free() == false, "Chunk should be tagged as in use.");
3686       curr = curr->next();
3687     }
3688   }
3689 }
3690 
3691 void SpaceManager::verify_chunk_size(Metachunk* chunk) {
3692   assert(is_humongous(chunk->word_size()) ||
3693          chunk->word_size() == medium_chunk_size() ||
3694          chunk->word_size() == small_chunk_size() ||
3695          chunk->word_size() == specialized_chunk_size(),
3696          "Chunk size is wrong");
3697   return;
3698 }
3699 
3700 #ifdef ASSERT
3701 void SpaceManager::verify_allocated_blocks_words() {
3702   // Verification is only guaranteed at a safepoint.
3703   assert(SafepointSynchronize::is_at_safepoint() || !Universe::is_fully_initialized(),
3704     "Verification can fail if the applications is running");
3705   assert(allocated_blocks_words() == sum_used_in_chunks_in_use(),
3706          "allocation total is not consistent " SIZE_FORMAT
3707          " vs " SIZE_FORMAT,
3708          allocated_blocks_words(), sum_used_in_chunks_in_use());











3709 }
3710 
3711 #endif



3712 
3713 void SpaceManager::dump(outputStream* const out) const {
3714   size_t curr_total = 0;
3715   size_t waste = 0;
3716   uint i = 0;
3717   size_t used = 0;
3718   size_t capacity = 0;
3719 
3720   // Add up statistics for all chunks in this SpaceManager.
3721   for (ChunkIndex index = ZeroIndex;
3722        index < NumberOfInUseLists;
3723        index = next_chunk_index(index)) {
3724     for (Metachunk* curr = chunks_in_use(index);
3725          curr != NULL;
3726          curr = curr->next()) {
3727       out->print("%d) ", i++);
3728       curr->print_on(out);
3729       curr_total += curr->word_size();
3730       used += curr->used_word_size();
3731       capacity += curr->word_size();
3732       waste += curr->free_word_size() + curr->overhead();;
3733     }
3734   }
3735 
3736   if (log_is_enabled(Trace, gc, metaspace, freelist)) {
3737     if (block_freelists() != NULL) block_freelists()->print_on(out);
3738   }

3739 
3740   size_t free = current_chunk() == NULL ? 0 : current_chunk()->free_word_size();
3741   // Free space isn't wasted.
3742   waste -= free;

3743 
3744   out->print_cr("total of all chunks "  SIZE_FORMAT " used " SIZE_FORMAT
3745                 " free " SIZE_FORMAT " capacity " SIZE_FORMAT
3746                 " waste " SIZE_FORMAT, curr_total, used, free, capacity, waste);
3747 }


3748 
3749 // MetaspaceUtils
3750 
3751 
3752 size_t MetaspaceUtils::_capacity_words[] = {0, 0};
3753 volatile size_t MetaspaceUtils::_used_words[] = {0, 0};
3754 















3755 size_t MetaspaceUtils::free_bytes(Metaspace::MetadataType mdtype) {
3756   VirtualSpaceList* list = Metaspace::get_space_list(mdtype);
3757   return list == NULL ? 0 : list->free_bytes();
3758 }
3759 
3760 size_t MetaspaceUtils::free_bytes() {
3761   return free_bytes(Metaspace::ClassType) + free_bytes(Metaspace::NonClassType);
3762 }
3763 
3764 void MetaspaceUtils::dec_capacity(Metaspace::MetadataType mdtype, size_t words) {
3765   assert_lock_strong(MetaspaceExpand_lock);
3766   assert(words <= capacity_words(mdtype),
3767          "About to decrement below 0: words " SIZE_FORMAT
3768          " is greater than _capacity_words[%u] " SIZE_FORMAT,
3769          words, mdtype, capacity_words(mdtype));

3770   _capacity_words[mdtype] -= words;
3771 }
3772 
3773 void MetaspaceUtils::inc_capacity(Metaspace::MetadataType mdtype, size_t words) {
3774   assert_lock_strong(MetaspaceExpand_lock);
3775   // Needs to be atomic
3776   _capacity_words[mdtype] += words;
3777 }
3778 
3779 void MetaspaceUtils::dec_used(Metaspace::MetadataType mdtype, size_t words) {
3780   assert(words <= used_words(mdtype),
3781          "About to decrement below 0: words " SIZE_FORMAT
3782          " is greater than _used_words[%u] " SIZE_FORMAT,
3783          words, mdtype, used_words(mdtype));
3784   // For CMS deallocation of the Metaspaces occurs during the
3785   // sweep which is a concurrent phase.  Protection by the MetaspaceExpand_lock
3786   // is not enough since allocation is on a per Metaspace basis
3787   // and protected by the Metaspace lock.
3788   Atomic::sub(words, &_used_words[mdtype]);
3789 }
3790 
3791 void MetaspaceUtils::inc_used(Metaspace::MetadataType mdtype, size_t words) {
3792   // _used_words tracks allocations for
3793   // each piece of metadata.  Those allocations are
3794   // generally done concurrently by different application
3795   // threads so must be done atomically.
3796   Atomic::add(words, &_used_words[mdtype]);
3797 }
3798 
3799 size_t MetaspaceUtils::used_bytes_slow(Metaspace::MetadataType mdtype) {
3800   size_t used = 0;
3801   ClassLoaderDataGraphMetaspaceIterator iter;
3802   while (iter.repeat()) {
3803     ClassLoaderMetaspace* msp = iter.get_next();
3804     // Sum allocated_blocks_words for each metaspace
3805     if (msp != NULL) {
3806       used += msp->used_words_slow(mdtype);
3807     }
3808   }
3809   return used * BytesPerWord;
3810 }
3811 
3812 size_t MetaspaceUtils::free_bytes_slow(Metaspace::MetadataType mdtype) {
3813   size_t free = 0;
3814   ClassLoaderDataGraphMetaspaceIterator iter;
3815   while (iter.repeat()) {
3816     ClassLoaderMetaspace* msp = iter.get_next();
3817     if (msp != NULL) {
3818       free += msp->free_words_slow(mdtype);
3819     }
3820   }
3821   return free * BytesPerWord;
3822 }
3823 
3824 size_t MetaspaceUtils::capacity_bytes_slow(Metaspace::MetadataType mdtype) {
3825   if ((mdtype == Metaspace::ClassType) && !Metaspace::using_class_space()) {
3826     return 0;
3827   }
3828   // Don't count the space in the freelists.  That space will be
3829   // added to the capacity calculation as needed.
3830   size_t capacity = 0;
3831   ClassLoaderDataGraphMetaspaceIterator iter;
3832   while (iter.repeat()) {
3833     ClassLoaderMetaspace* msp = iter.get_next();
3834     if (msp != NULL) {
3835       capacity += msp->capacity_words_slow(mdtype);
3836     }
3837   }
3838   return capacity * BytesPerWord;
3839 }
3840 
3841 size_t MetaspaceUtils::capacity_bytes_slow() {
3842 #ifdef PRODUCT
3843   // Use capacity_bytes() in PRODUCT instead of this function.
3844   guarantee(false, "Should not call capacity_bytes_slow() in the PRODUCT");
3845 #endif
3846   size_t class_capacity = capacity_bytes_slow(Metaspace::ClassType);
3847   size_t non_class_capacity = capacity_bytes_slow(Metaspace::NonClassType);
3848   assert(capacity_bytes() == class_capacity + non_class_capacity,
3849          "bad accounting: capacity_bytes() " SIZE_FORMAT
3850          " class_capacity + non_class_capacity " SIZE_FORMAT
3851          " class_capacity " SIZE_FORMAT " non_class_capacity " SIZE_FORMAT,
3852          capacity_bytes(), class_capacity + non_class_capacity,
3853          class_capacity, non_class_capacity);
3854 
3855   return class_capacity + non_class_capacity;
3856 }
3857 
3858 size_t MetaspaceUtils::reserved_bytes(Metaspace::MetadataType mdtype) {
3859   VirtualSpaceList* list = Metaspace::get_space_list(mdtype);
3860   return list == NULL ? 0 : list->reserved_bytes();
3861 }
3862 
3863 size_t MetaspaceUtils::committed_bytes(Metaspace::MetadataType mdtype) {
3864   VirtualSpaceList* list = Metaspace::get_space_list(mdtype);
3865   return list == NULL ? 0 : list->committed_bytes();
3866 }
3867 
3868 size_t MetaspaceUtils::min_chunk_size_words() { return Metaspace::first_chunk_word_size(); }
3869 
3870 size_t MetaspaceUtils::free_chunks_total_words(Metaspace::MetadataType mdtype) {
3871   ChunkManager* chunk_manager = Metaspace::get_chunk_manager(mdtype);
3872   if (chunk_manager == NULL) {
3873     return 0;
3874   }
3875   chunk_manager->slow_verify();
3876   return chunk_manager->free_chunks_total_words();
3877 }


3917                 "reserved "  SIZE_FORMAT "K",
3918                 used_bytes()/K,
3919                 capacity_bytes()/K,
3920                 committed_bytes()/K,
3921                 reserved_bytes()/K);
3922 
3923   if (Metaspace::using_class_space()) {
3924     Metaspace::MetadataType ct = Metaspace::ClassType;
3925     out->print_cr("  class space    "
3926                   "used "      SIZE_FORMAT "K, "
3927                   "capacity "  SIZE_FORMAT "K, "
3928                   "committed " SIZE_FORMAT "K, "
3929                   "reserved "  SIZE_FORMAT "K",
3930                   used_bytes(ct)/K,
3931                   capacity_bytes(ct)/K,
3932                   committed_bytes(ct)/K,
3933                   reserved_bytes(ct)/K);
3934   }
3935 }
3936 
3937 // Print information for class space and data space separately.
3938 // This is almost the same as above.
3939 void MetaspaceUtils::print_on(outputStream* out, Metaspace::MetadataType mdtype) {
3940   size_t free_chunks_capacity_bytes = free_chunks_total_bytes(mdtype);
3941   size_t capacity_bytes = capacity_bytes_slow(mdtype);
3942   size_t used_bytes = used_bytes_slow(mdtype);
3943   size_t free_bytes = free_bytes_slow(mdtype);
3944   size_t used_and_free = used_bytes + free_bytes +
3945                            free_chunks_capacity_bytes;
3946   out->print_cr("  Chunk accounting: (used in chunks " SIZE_FORMAT
3947              "K + unused in chunks " SIZE_FORMAT "K  + "
3948              " capacity in free chunks " SIZE_FORMAT "K) = " SIZE_FORMAT
3949              "K  capacity in allocated chunks " SIZE_FORMAT "K",
3950              used_bytes / K,
3951              free_bytes / K,
3952              free_chunks_capacity_bytes / K,
3953              used_and_free / K,
3954              capacity_bytes / K);
3955   // Accounting can only be correct if we got the values during a safepoint
3956   assert(!SafepointSynchronize::is_at_safepoint() || used_and_free == capacity_bytes, "Accounting is wrong");
3957 }
3958 
3959 // Print total fragmentation for class metaspaces
3960 void MetaspaceUtils::print_class_waste(outputStream* out) {
3961   assert(Metaspace::using_class_space(), "class metaspace not used");
3962   size_t cls_specialized_waste = 0, cls_small_waste = 0, cls_medium_waste = 0;
3963   size_t cls_specialized_count = 0, cls_small_count = 0, cls_medium_count = 0, cls_humongous_count = 0;
3964   ClassLoaderDataGraphMetaspaceIterator iter;
3965   while (iter.repeat()) {
3966     ClassLoaderMetaspace* msp = iter.get_next();
3967     if (msp != NULL) {
3968       cls_specialized_waste += msp->class_vsm()->sum_waste_in_chunks_in_use(SpecializedIndex);
3969       cls_specialized_count += msp->class_vsm()->sum_count_in_chunks_in_use(SpecializedIndex);
3970       cls_small_waste += msp->class_vsm()->sum_waste_in_chunks_in_use(SmallIndex);
3971       cls_small_count += msp->class_vsm()->sum_count_in_chunks_in_use(SmallIndex);
3972       cls_medium_waste += msp->class_vsm()->sum_waste_in_chunks_in_use(MediumIndex);
3973       cls_medium_count += msp->class_vsm()->sum_count_in_chunks_in_use(MediumIndex);
3974       cls_humongous_count += msp->class_vsm()->sum_count_in_chunks_in_use(HumongousIndex);
3975     }
3976   }
3977   out->print_cr(" class: " SIZE_FORMAT " specialized(s) " SIZE_FORMAT ", "
3978                 SIZE_FORMAT " small(s) " SIZE_FORMAT ", "
3979                 SIZE_FORMAT " medium(s) " SIZE_FORMAT ", "
3980                 "large count " SIZE_FORMAT,
3981                 cls_specialized_count, cls_specialized_waste,
3982                 cls_small_count, cls_small_waste,
3983                 cls_medium_count, cls_medium_waste, cls_humongous_count);
3984 }
3985 
3986 // Print total fragmentation for data and class metaspaces separately
3987 void MetaspaceUtils::print_waste(outputStream* out) {
3988   size_t specialized_waste = 0, small_waste = 0, medium_waste = 0;
3989   size_t specialized_count = 0, small_count = 0, medium_count = 0, humongous_count = 0;
3990 
3991   ClassLoaderDataGraphMetaspaceIterator iter;
3992   while (iter.repeat()) {
3993     ClassLoaderMetaspace* msp = iter.get_next();
3994     if (msp != NULL) {
3995       specialized_waste += msp->vsm()->sum_waste_in_chunks_in_use(SpecializedIndex);
3996       specialized_count += msp->vsm()->sum_count_in_chunks_in_use(SpecializedIndex);
3997       small_waste += msp->vsm()->sum_waste_in_chunks_in_use(SmallIndex);
3998       small_count += msp->vsm()->sum_count_in_chunks_in_use(SmallIndex);
3999       medium_waste += msp->vsm()->sum_waste_in_chunks_in_use(MediumIndex);
4000       medium_count += msp->vsm()->sum_count_in_chunks_in_use(MediumIndex);
4001       humongous_count += msp->vsm()->sum_count_in_chunks_in_use(HumongousIndex);
4002     }
4003   }
4004   out->print_cr("Total fragmentation waste (words) doesn't count free space");
4005   out->print_cr("  data: " SIZE_FORMAT " specialized(s) " SIZE_FORMAT ", "
4006                         SIZE_FORMAT " small(s) " SIZE_FORMAT ", "
4007                         SIZE_FORMAT " medium(s) " SIZE_FORMAT ", "
4008                         "large count " SIZE_FORMAT,
4009              specialized_count, specialized_waste, small_count,
4010              small_waste, medium_count, medium_waste, humongous_count);
4011   if (Metaspace::using_class_space()) {
4012     print_class_waste(out);
4013   }
4014 }
4015 
4016 class MetadataStats {
4017 private:
4018   size_t _capacity;
4019   size_t _used;
4020   size_t _free;
4021   size_t _waste;
4022 
4023 public:
4024   MetadataStats() : _capacity(0), _used(0), _free(0), _waste(0) { }
4025   MetadataStats(size_t capacity, size_t used, size_t free, size_t waste)
4026   : _capacity(capacity), _used(used), _free(free), _waste(waste) { }
4027 
4028   void add(const MetadataStats& stats) {
4029     _capacity += stats.capacity();
4030     _used += stats.used();
4031     _free += stats.free();
4032     _waste += stats.waste();
4033   }
4034 
4035   size_t capacity() const { return _capacity; }
4036   size_t used() const     { return _used; }
4037   size_t free() const     { return _free; }
4038   size_t waste() const    { return _waste; }
4039 
4040   void print_on(outputStream* out, size_t scale) const;
4041 };

4042 
4043 
4044 void MetadataStats::print_on(outputStream* out, size_t scale) const {
4045   const char* unit = scale_unit(scale);
4046   out->print_cr("capacity=%10.2f%s used=%10.2f%s free=%10.2f%s waste=%10.2f%s",
4047     (float)capacity() / scale, unit,
4048     (float)used() / scale, unit,
4049     (float)free() / scale, unit,
4050     (float)waste() / scale, unit);
4051 }
4052 
4053 class PrintCLDMetaspaceInfoClosure : public CLDClosure {
4054 private:
4055   outputStream*  _out;
4056   size_t         _scale;
4057 
4058   size_t         _total_count;
4059   MetadataStats  _total_metadata;
4060   MetadataStats  _total_class;
4061 
4062   size_t         _total_anon_count;
4063   MetadataStats  _total_anon_metadata;
4064   MetadataStats  _total_anon_class;
4065 
4066 public:
4067   PrintCLDMetaspaceInfoClosure(outputStream* out, size_t scale = K)
4068   : _out(out), _scale(scale), _total_count(0), _total_anon_count(0) { }
4069 
4070   ~PrintCLDMetaspaceInfoClosure() {
4071     print_summary();

4072   }
4073 
4074   void do_cld(ClassLoaderData* cld) {

4075     assert(SafepointSynchronize::is_at_safepoint(), "Must be at a safepoint");
4076 
4077     if (cld->is_unloading()) return;
4078     ClassLoaderMetaspace* msp = cld->metaspace_or_null();
4079     if (msp == NULL) {

4080       return;
4081     }
4082 
4083     bool anonymous = false;














4084     if (cld->is_anonymous()) {
4085       _out->print_cr("ClassLoader: for anonymous class");
4086       anonymous = true;
4087     } else {
4088       ResourceMark rm;
4089       _out->print_cr("ClassLoader: %s", cld->loader_name());
4090     }
4091 
4092     print_metaspace(msp, anonymous);
4093     _out->cr();
4094   }
4095 
4096 private:
4097   void print_metaspace(ClassLoaderMetaspace* msp, bool anonymous);
4098   void print_summary() const;
4099 };
4100 
4101 void PrintCLDMetaspaceInfoClosure::print_metaspace(ClassLoaderMetaspace* msp, bool anonymous){
4102   assert(msp != NULL, "Sanity");
4103   SpaceManager* vsm = msp->vsm();
4104   const char* unit = scale_unit(_scale);
4105 
4106   size_t capacity = vsm->sum_capacity_in_chunks_in_use() * BytesPerWord;
4107   size_t used = vsm->sum_used_in_chunks_in_use() * BytesPerWord;
4108   size_t free = vsm->sum_free_in_chunks_in_use() * BytesPerWord;
4109   size_t waste = vsm->sum_waste_in_chunks_in_use() * BytesPerWord;
4110 
4111   _total_count ++;
4112   MetadataStats metadata_stats(capacity, used, free, waste);
4113   _total_metadata.add(metadata_stats);
4114 
4115   if (anonymous) {
4116     _total_anon_count ++;
4117     _total_anon_metadata.add(metadata_stats);
4118   }
4119 
4120   _out->print("  Metadata   ");
4121   metadata_stats.print_on(_out, _scale);

4122 
4123   if (Metaspace::using_class_space()) {
4124     vsm = msp->class_vsm();

4125 
4126     capacity = vsm->sum_capacity_in_chunks_in_use() * BytesPerWord;
4127     used = vsm->sum_used_in_chunks_in_use() * BytesPerWord;
4128     free = vsm->sum_free_in_chunks_in_use() * BytesPerWord;
4129     waste = vsm->sum_waste_in_chunks_in_use() * BytesPerWord;
4130 
4131     MetadataStats class_stats(capacity, used, free, waste);
4132     _total_class.add(class_stats);
4133 
4134     if (anonymous) {
4135       _total_anon_class.add(class_stats);













4136     }
4137 
4138     _out->print("  Class data ");
4139     class_stats.print_on(_out, _scale);
















4140   }
4141 }
4142 
4143 void PrintCLDMetaspaceInfoClosure::print_summary() const {
4144   const char* unit = scale_unit(_scale);
4145   _out->cr();
4146   _out->print_cr("Summary:");
4147 
4148   MetadataStats total;
4149   total.add(_total_metadata);
4150   total.add(_total_class);

















4151 
4152   _out->print("  Total class loaders=" SIZE_FORMAT_W(6) " ", _total_count);
4153   total.print_on(_out, _scale);























4154 
4155   _out->print("                    Metadata ");
4156   _total_metadata.print_on(_out, _scale);
















4157 
4158   if (Metaspace::using_class_space()) {
4159     _out->print("                  Class data ");
4160     _total_class.print_on(_out, _scale);









4161   }
4162   _out->cr();

4163 
4164   MetadataStats total_anon;
4165   total_anon.add(_total_anon_metadata);
4166   total_anon.add(_total_anon_class);












4167 
4168   _out->print("For anonymous classes=" SIZE_FORMAT_W(6) " ", _total_anon_count);
4169   total_anon.print_on(_out, _scale);


















4170 
4171   _out->print("                    Metadata ");
4172   _total_anon_metadata.print_on(_out, _scale);



4173 
4174   if (Metaspace::using_class_space()) {
4175     _out->print("                  Class data ");
4176     _total_anon_class.print_on(_out, _scale);
4177   }
4178 }
4179 
4180 void MetaspaceUtils::print_metadata_for_nmt(outputStream* out, size_t scale) {
4181   const char* unit = scale_unit(scale);
4182   out->print_cr("Metaspaces:");
4183   out->print_cr("  Metadata space: reserved=" SIZE_FORMAT_W(10) "%s committed=" SIZE_FORMAT_W(10) "%s",
4184     reserved_bytes(Metaspace::NonClassType) / scale, unit,
4185     committed_bytes(Metaspace::NonClassType) / scale, unit);
4186   if (Metaspace::using_class_space()) {
4187     out->print_cr("  Class    space: reserved=" SIZE_FORMAT_W(10) "%s committed=" SIZE_FORMAT_W(10) "%s",
4188     reserved_bytes(Metaspace::ClassType) / scale, unit,
4189     committed_bytes(Metaspace::ClassType) / scale, unit);














4190   }
4191 

















4192   out->cr();
4193   ChunkManager::print_all_chunkmanagers(out, scale);









4194 










4195   out->cr();
4196   out->print_cr("Per-classloader metadata:");








4197   out->cr();

4198 
4199   PrintCLDMetaspaceInfoClosure cl(out, scale);
4200   ClassLoaderDataGraph::cld_do(&cl);
4201 }













4202 










4203 
4204 // Dump global metaspace things from the end of ClassLoaderDataGraph
4205 void MetaspaceUtils::dump(outputStream* out) {
4206   out->print_cr("All Metaspace:");
4207   out->print("data space: "); print_on(out, Metaspace::NonClassType);
4208   out->print("class space: "); print_on(out, Metaspace::ClassType);
4209   print_waste(out);
4210 }
4211 
4212 // Prints an ASCII representation of the given space.
4213 void MetaspaceUtils::print_metaspace_map(outputStream* out, Metaspace::MetadataType mdtype) {
4214   MutexLockerEx cl(MetaspaceExpand_lock, Mutex::_no_safepoint_check_flag);
4215   const bool for_class = mdtype == Metaspace::ClassType ? true : false;
4216   VirtualSpaceList* const vsl = for_class ? Metaspace::class_space_list() : Metaspace::space_list();
4217   if (vsl != NULL) {
4218     if (for_class) {
4219       if (!Metaspace::using_class_space()) {
4220         out->print_cr("No Class Space.");
4221         return;
4222       }
4223       out->print_raw("---- Metaspace Map (Class Space) ----");
4224     } else {
4225       out->print_raw("---- Metaspace Map (Non-Class Space) ----");
4226     }
4227     // Print legend:
4228     out->cr();
4229     out->print_cr("Chunk Types (uppercase chunks are in use): x-specialized, s-small, m-medium, h-humongous.");
4230     out->cr();
4231     VirtualSpaceList* const vsl = for_class ? Metaspace::class_space_list() : Metaspace::space_list();
4232     vsl->print_map(out);
4233     out->cr();
4234   }
4235 }
4236 
4237 void MetaspaceUtils::verify_free_chunks() {
4238   Metaspace::chunk_manager_metadata()->verify();
4239   if (Metaspace::using_class_space()) {
4240     Metaspace::chunk_manager_class()->verify();
4241   }
4242 }
4243 
4244 void MetaspaceUtils::verify_capacity() {
4245 #ifdef ASSERT
4246   size_t running_sum_capacity_bytes = capacity_bytes();
4247   // For purposes of the running sum of capacity, verify against capacity
4248   size_t capacity_in_use_bytes = capacity_bytes_slow();
4249   assert(running_sum_capacity_bytes == capacity_in_use_bytes,
4250          "capacity_words() * BytesPerWord " SIZE_FORMAT
4251          " capacity_bytes_slow()" SIZE_FORMAT,
4252          running_sum_capacity_bytes, capacity_in_use_bytes);
4253   for (Metaspace::MetadataType i = Metaspace::ClassType;
4254        i < Metaspace:: MetadataTypeCount;
4255        i = (Metaspace::MetadataType)(i + 1)) {
4256     size_t capacity_in_use_bytes = capacity_bytes_slow(i);
4257     assert(capacity_bytes(i) == capacity_in_use_bytes,
4258            "capacity_bytes(%u) " SIZE_FORMAT
4259            " capacity_bytes_slow(%u)" SIZE_FORMAT,
4260            i, capacity_bytes(i), i, capacity_in_use_bytes);
4261   }
4262 #endif
4263 }
4264 
4265 void MetaspaceUtils::verify_used() {
4266 #ifdef ASSERT
4267   size_t running_sum_used_bytes = used_bytes();
4268   // For purposes of the running sum of used, verify against used
4269   size_t used_in_use_bytes = used_bytes_slow();
4270   assert(used_bytes() == used_in_use_bytes,
4271          "used_bytes() " SIZE_FORMAT
4272          " used_bytes_slow()" SIZE_FORMAT,
4273          used_bytes(), used_in_use_bytes);
4274   for (Metaspace::MetadataType i = Metaspace::ClassType;
4275        i < Metaspace:: MetadataTypeCount;
4276        i = (Metaspace::MetadataType)(i + 1)) {
4277     size_t used_in_use_bytes = used_bytes_slow(i);
4278     assert(used_bytes(i) == used_in_use_bytes,
4279            "used_bytes(%u) " SIZE_FORMAT
4280            " used_bytes_slow(%u)" SIZE_FORMAT,
4281            i, used_bytes(i), i, used_in_use_bytes);











4282   }

4283 #endif
4284 }
4285 
4286 void MetaspaceUtils::verify_metrics() {
4287   verify_capacity();
4288   verify_used();
4289 }
4290 
4291 
4292 // Metaspace methods
4293 
4294 size_t Metaspace::_first_chunk_word_size = 0;
4295 size_t Metaspace::_first_class_chunk_word_size = 0;
4296 
4297 size_t Metaspace::_commit_alignment = 0;
4298 size_t Metaspace::_reserve_alignment = 0;
4299 
4300 VirtualSpaceList* Metaspace::_space_list = NULL;
4301 VirtualSpaceList* Metaspace::_class_space_list = NULL;
4302 
4303 ChunkManager* Metaspace::_chunk_manager_metadata = NULL;
4304 ChunkManager* Metaspace::_chunk_manager_class = NULL;
4305 
4306 #define VIRTUALSPACEMULTIPLIER 2
4307 
4308 #ifdef _LP64
4309 static const uint64_t UnscaledClassSpaceMax = (uint64_t(max_juint) + 1);
4310 


4693 
4694   return result;
4695 }
4696 
4697 void Metaspace::report_metadata_oome(ClassLoaderData* loader_data, size_t word_size, MetaspaceObj::Type type, MetadataType mdtype, TRAPS) {
4698   tracer()->report_metadata_oom(loader_data, word_size, type, mdtype);
4699 
4700   // If result is still null, we are out of memory.
4701   Log(gc, metaspace, freelist) log;
4702   if (log.is_info()) {
4703     log.info("Metaspace (%s) allocation failed for size " SIZE_FORMAT,
4704              is_class_space_allocation(mdtype) ? "class" : "data", word_size);
4705     ResourceMark rm;
4706     if (log.is_debug()) {
4707       if (loader_data->metaspace_or_null() != NULL) {
4708         LogStream ls(log.debug());
4709         loader_data->print_value_on(&ls);
4710       }
4711     }
4712     LogStream ls(log.info());
4713     MetaspaceUtils::dump(&ls);
4714     MetaspaceUtils::print_metaspace_map(&ls, mdtype);
4715     ChunkManager::print_all_chunkmanagers(&ls);
4716   }
4717 
4718   bool out_of_compressed_class_space = false;
4719   if (is_class_space_allocation(mdtype)) {
4720     ClassLoaderMetaspace* metaspace = loader_data->metaspace_non_null();
4721     out_of_compressed_class_space =
4722       MetaspaceUtils::committed_bytes(Metaspace::ClassType) +
4723       (metaspace->class_chunk_size(word_size) * BytesPerWord) >
4724       CompressedClassSpaceSize;
4725   }
4726 
4727   // -XX:+HeapDumpOnOutOfMemoryError and -XX:OnOutOfMemoryError support
4728   const char* space_string = out_of_compressed_class_space ?
4729     "Compressed class space" : "Metaspace";
4730 
4731   report_java_out_of_memory(space_string);
4732 
4733   if (JvmtiExport::should_post_resource_exhausted()) {
4734     JvmtiExport::post_resource_exhausted(
4735         JVMTI_RESOURCE_EXHAUSTED_OOM_ERROR,


4770   }
4771 }
4772 
4773 bool Metaspace::contains(const void* ptr) {
4774   if (MetaspaceShared::is_in_shared_metaspace(ptr)) {
4775     return true;
4776   }
4777   return contains_non_shared(ptr);
4778 }
4779 
4780 bool Metaspace::contains_non_shared(const void* ptr) {
4781   if (using_class_space() && get_space_list(ClassType)->contains(ptr)) {
4782      return true;
4783   }
4784 
4785   return get_space_list(NonClassType)->contains(ptr);
4786 }
4787 
4788 // ClassLoaderMetaspace
4789 
4790 ClassLoaderMetaspace::ClassLoaderMetaspace(Mutex* lock, Metaspace::MetaspaceType type) {





4791   initialize(lock, type);
4792 }
4793 
4794 ClassLoaderMetaspace::~ClassLoaderMetaspace() {

4795   delete _vsm;
4796   if (Metaspace::using_class_space()) {
4797     delete _class_vsm;
4798   }
4799 }

4800 void ClassLoaderMetaspace::initialize_first_chunk(Metaspace::MetaspaceType type, Metaspace::MetadataType mdtype) {
4801   Metachunk* chunk = get_initialization_chunk(type, mdtype);
4802   if (chunk != NULL) {
4803     // Add to this manager's list of chunks in use and current_chunk().
4804     get_space_manager(mdtype)->add_chunk(chunk, true);
4805   }
4806 }
4807 
4808 Metachunk* ClassLoaderMetaspace::get_initialization_chunk(Metaspace::MetaspaceType type, Metaspace::MetadataType mdtype) {
4809   size_t chunk_word_size = get_space_manager(mdtype)->get_initial_chunk_size(type);
4810 
4811   // Get a chunk from the chunk freelist
4812   Metachunk* chunk = Metaspace::get_chunk_manager(mdtype)->chunk_freelist_allocate(chunk_word_size);
4813 
4814   if (chunk == NULL) {
4815     chunk = Metaspace::get_space_list(mdtype)->get_new_chunk(chunk_word_size,
4816                                                   get_space_manager(mdtype)->medium_chunk_bunch());
4817   }
4818 
4819   return chunk;
4820 }
4821 
4822 void ClassLoaderMetaspace::initialize(Mutex* lock, Metaspace::MetaspaceType type) {
4823   Metaspace::verify_global_initialization();
4824 


4825   // Allocate SpaceManager for metadata objects.
4826   _vsm = new SpaceManager(Metaspace::NonClassType, type, lock);
4827 
4828   if (Metaspace::using_class_space()) {
4829     // Allocate SpaceManager for classes.
4830     _class_vsm = new SpaceManager(Metaspace::ClassType, type, lock);
4831   }
4832 
4833   MutexLockerEx cl(MetaspaceExpand_lock, Mutex::_no_safepoint_check_flag);
4834 
4835   // Allocate chunk for metadata objects
4836   initialize_first_chunk(type, Metaspace::NonClassType);
4837 
4838   // Allocate chunk for class metadata objects
4839   if (Metaspace::using_class_space()) {
4840     initialize_first_chunk(type, Metaspace::ClassType);
4841   }
4842 }
4843 
4844 MetaWord* ClassLoaderMetaspace::allocate(size_t word_size, Metaspace::MetadataType mdtype) {
4845   Metaspace::assert_not_frozen();



4846   // Don't use class_vsm() unless UseCompressedClassPointers is true.
4847   if (Metaspace::is_class_space_allocation(mdtype)) {
4848     return  class_vsm()->allocate(word_size);
4849   } else {
4850     return  vsm()->allocate(word_size);
4851   }
4852 }
4853 
4854 MetaWord* ClassLoaderMetaspace::expand_and_allocate(size_t word_size, Metaspace::MetadataType mdtype) {
4855   Metaspace::assert_not_frozen();
4856   size_t delta_bytes = MetaspaceGC::delta_capacity_until_GC(word_size * BytesPerWord);
4857   assert(delta_bytes > 0, "Must be");
4858 
4859   size_t before = 0;
4860   size_t after = 0;
4861   MetaWord* res;
4862   bool incremented;
4863 
4864   // Each thread increments the HWM at most once. Even if the thread fails to increment
4865   // the HWM, an allocation is still attempted. This is because another thread must then
4866   // have incremented the HWM and therefore the allocation might still succeed.
4867   do {
4868     incremented = MetaspaceGC::inc_capacity_until_GC(delta_bytes, &after, &before);
4869     res = allocate(word_size, mdtype);
4870   } while (!incremented && res == NULL);
4871 
4872   if (incremented) {
4873     Metaspace::tracer()->report_gc_threshold(before, after,
4874                                   MetaspaceGCThresholdUpdater::ExpandAndAllocate);
4875     log_trace(gc, metaspace)("Increase capacity to GC from " SIZE_FORMAT " to " SIZE_FORMAT, before, after);
4876   }
4877 
4878   return res;
4879 }
4880 
4881 size_t ClassLoaderMetaspace::used_words_slow(Metaspace::MetadataType mdtype) const {
4882   if (mdtype == Metaspace::ClassType) {
4883     return Metaspace::using_class_space() ? class_vsm()->sum_used_in_chunks_in_use() : 0;
4884   } else {
4885     return vsm()->sum_used_in_chunks_in_use();  // includes overhead!
4886   }
4887 }
4888 
4889 size_t ClassLoaderMetaspace::free_words_slow(Metaspace::MetadataType mdtype) const {
4890   Metaspace::assert_not_frozen();
4891   if (mdtype == Metaspace::ClassType) {
4892     return Metaspace::using_class_space() ? class_vsm()->sum_free_in_chunks_in_use() : 0;
4893   } else {
4894     return vsm()->sum_free_in_chunks_in_use();
4895   }
4896 }
4897 
4898 // Space capacity in the Metaspace.  It includes
4899 // space in the list of chunks from which allocations
4900 // have been made. Don't include space in the global freelist and
4901 // in the space available in the dictionary which
4902 // is already counted in some chunk.
4903 size_t ClassLoaderMetaspace::capacity_words_slow(Metaspace::MetadataType mdtype) const {
4904   if (mdtype == Metaspace::ClassType) {
4905     return Metaspace::using_class_space() ? class_vsm()->sum_capacity_in_chunks_in_use() : 0;
4906   } else {
4907     return vsm()->sum_capacity_in_chunks_in_use();
4908   }
4909 }
4910 
4911 size_t ClassLoaderMetaspace::used_bytes_slow(Metaspace::MetadataType mdtype) const {
4912   return used_words_slow(mdtype) * BytesPerWord;
4913 }
4914 
4915 size_t ClassLoaderMetaspace::capacity_bytes_slow(Metaspace::MetadataType mdtype) const {
4916   return capacity_words_slow(mdtype) * BytesPerWord;
4917 }
4918 
4919 size_t ClassLoaderMetaspace::allocated_blocks_bytes() const {
4920   return vsm()->allocated_blocks_bytes() +
4921       (Metaspace::using_class_space() ? class_vsm()->allocated_blocks_bytes() : 0);
4922 }
4923 
4924 size_t ClassLoaderMetaspace::allocated_chunks_bytes() const {
4925   return vsm()->allocated_chunks_bytes() +
4926       (Metaspace::using_class_space() ? class_vsm()->allocated_chunks_bytes() : 0);
4927 }
4928 
4929 void ClassLoaderMetaspace::deallocate(MetaWord* ptr, size_t word_size, bool is_class) {
4930   Metaspace::assert_not_frozen();
4931   assert(!SafepointSynchronize::is_at_safepoint()
4932          || Thread::current()->is_VM_thread(), "should be the VM thread");
4933 


4934   MutexLockerEx ml(vsm()->lock(), Mutex::_no_safepoint_check_flag);
4935 
4936   if (is_class && Metaspace::using_class_space()) {
4937     class_vsm()->deallocate(ptr, word_size);
4938   } else {
4939     vsm()->deallocate(ptr, word_size);
4940   }
4941 }
4942 
4943 size_t ClassLoaderMetaspace::class_chunk_size(size_t word_size) {
4944   assert(Metaspace::using_class_space(), "Has to use class space");
4945   return class_vsm()->calc_chunk_size(word_size);
4946 }
4947 
4948 void ClassLoaderMetaspace::print_on(outputStream* out) const {
4949   // Print both class virtual space counts and metaspace.
4950   if (Verbose) {
4951     vsm()->print_on(out);
4952     if (Metaspace::using_class_space()) {
4953       class_vsm()->print_on(out);
4954     }
4955   }
4956 }
4957 
4958 void ClassLoaderMetaspace::verify() {
4959   vsm()->verify();
4960   if (Metaspace::using_class_space()) {
4961     class_vsm()->verify();
4962   }
4963 }
4964 
4965 void ClassLoaderMetaspace::dump(outputStream* const out) const {
4966   out->print_cr("\nVirtual space manager: " INTPTR_FORMAT, p2i(vsm()));
4967   vsm()->dump(out);
4968   if (Metaspace::using_class_space()) {
4969     out->print_cr("\nClass space manager: " INTPTR_FORMAT, p2i(class_vsm()));
4970     class_vsm()->dump(out);
4971   }
4972 }
4973 
4974 



4975 
4976 #ifdef ASSERT
4977 static void do_verify_chunk(Metachunk* chunk) {
4978   guarantee(chunk != NULL, "Sanity");
4979   // Verify chunk itself; then verify that it is consistent with the
4980   // occupany map of its containing node.
4981   chunk->verify();
4982   VirtualSpaceNode* const vsn = chunk->container();
4983   OccupancyMap* const ocmap = vsn->occupancy_map();
4984   ocmap->verify_for_chunk(chunk);
4985 }
4986 #endif
4987 
4988 static void do_update_in_use_info_for_chunk(Metachunk* chunk, bool inuse) {
4989   chunk->set_is_tagged_free(!inuse);
4990   OccupancyMap* const ocmap = chunk->container()->occupancy_map();
4991   ocmap->set_region_in_use((MetaWord*)chunk, chunk->word_size(), inuse);
4992 }
4993 
4994 /////////////// Unit tests ///////////////


5300     test_adjust_initial_chunk_size(false);
5301     test_adjust_initial_chunk_size(true);
5302   }
5303 };
5304 
5305 void SpaceManager_test_adjust_initial_chunk_size() {
5306   SpaceManagerTest::test_adjust_initial_chunk_size();
5307 }
5308 
5309 #endif // ASSERT
5310 
5311 struct chunkmanager_statistics_t {
5312   int num_specialized_chunks;
5313   int num_small_chunks;
5314   int num_medium_chunks;
5315   int num_humongous_chunks;
5316 };
5317 
5318 extern void test_metaspace_retrieve_chunkmanager_statistics(Metaspace::MetadataType mdType, chunkmanager_statistics_t* out) {
5319   ChunkManager* const chunk_manager = Metaspace::get_chunk_manager(mdType);
5320   ChunkManager::ChunkManagerStatistics stat;
5321   chunk_manager->get_statistics(&stat);
5322   out->num_specialized_chunks = (int)stat.num_by_type[SpecializedIndex];
5323   out->num_small_chunks = (int)stat.num_by_type[SmallIndex];
5324   out->num_medium_chunks = (int)stat.num_by_type[MediumIndex];
5325   out->num_humongous_chunks = (int)stat.num_humongous_chunks;
5326 }
5327 
5328 struct chunk_geometry_t {
5329   size_t specialized_chunk_word_size;
5330   size_t small_chunk_word_size;
5331   size_t medium_chunk_word_size;
5332 };
5333 
5334 extern void test_metaspace_retrieve_chunk_geometry(Metaspace::MetadataType mdType, chunk_geometry_t* out) {
5335   if (mdType == Metaspace::NonClassType) {
5336     out->specialized_chunk_word_size = SpecializedChunk;
5337     out->small_chunk_word_size = SmallChunk;
5338     out->medium_chunk_word_size = MediumChunk;
5339   } else {
5340     out->specialized_chunk_word_size = ClassSpecializedChunk;
5341     out->small_chunk_word_size = ClassSmallChunk;
5342     out->medium_chunk_word_size = ClassMediumChunk;
5343   }
5344 }


  26 #include "gc/shared/collectedHeap.hpp"
  27 #include "gc/shared/collectorPolicy.hpp"
  28 #include "logging/log.hpp"
  29 #include "logging/logStream.hpp"
  30 #include "memory/allocation.hpp"
  31 #include "memory/binaryTreeDictionary.inline.hpp"
  32 #include "memory/filemap.hpp"
  33 #include "memory/freeList.inline.hpp"
  34 #include "memory/metachunk.hpp"
  35 #include "memory/metaspace.hpp"
  36 #include "memory/metaspaceGCThresholdUpdater.hpp"
  37 #include "memory/metaspaceShared.hpp"
  38 #include "memory/metaspaceTracer.hpp"
  39 #include "memory/resourceArea.hpp"
  40 #include "memory/universe.hpp"
  41 #include "runtime/atomic.hpp"
  42 #include "runtime/globals.hpp"
  43 #include "runtime/init.hpp"
  44 #include "runtime/java.hpp"
  45 #include "runtime/mutex.hpp"
  46 #include "runtime/mutexLocker.hpp"
  47 #include "runtime/orderAccess.inline.hpp"
  48 #include "services/memTracker.hpp"
  49 #include "services/memoryService.hpp"
  50 #include "utilities/align.hpp"
  51 #include "utilities/copy.hpp"
  52 #include "utilities/debug.hpp"
  53 #include "utilities/macros.hpp"
  54 
  55 typedef BinaryTreeDictionary<Metablock, FreeList<Metablock> > BlockTreeDictionary;
  56 typedef BinaryTreeDictionary<Metachunk, FreeList<Metachunk> > ChunkTreeDictionary;
  57 
  58 // Helper function that does a bunch of checks for a chunk.
  59 DEBUG_ONLY(static void do_verify_chunk(Metachunk* chunk);)
  60 
  61 // Given a Metachunk, update its in-use information (both in the
  62 // chunk and the occupancy map).
  63 static void do_update_in_use_info_for_chunk(Metachunk* chunk, bool inuse);
  64 
  65 size_t const allocation_from_dictionary_limit = 4 * K;
  66 
  67 MetaWord* last_allocated = 0;
  68 
  69 size_t Metaspace::_compressed_class_space_size;
  70 const MetaspaceTracer* Metaspace::_tracer = NULL;
  71 
  72 DEBUG_ONLY(bool Metaspace::_frozen = false;)
  73 
  74 // Internal statistics.
  75 #ifdef ASSERT
  76 struct {
  77   // Number of allocations (from outside)
  78   uintx num_allocs;
  79   // Number of times a ClassLoaderMetaspace was born.
  80   uintx num_metaspace_births;
  81   // Number of times a ClassLoaderMetaspace died.
  82   uintx num_metaspace_deaths;
  83   // Number of times VirtualSpaceListNodes were created...
  84   uintx num_vsnodes_created;
  85   // ... and purged.
  86   uintx num_vsnodes_purged;
  87   // Number of times we expanded the committed section of the space.
  88   uintx num_committed_space_expanded;
  89   // Number of deallocations (e.g. retransformClasses etc)
  90   uintx num_deallocs;
  91   // Number of times an alloc was satisfied from deallocated blocks.
  92   uintx num_allocs_from_deallocated_blocks;
  93 } g_internal_statistics;
  94 #endif
  95 
  96 enum ChunkSizes {    // in words.
  97   ClassSpecializedChunk = 128,
  98   SpecializedChunk = 128,
  99   ClassSmallChunk = 256,
 100   SmallChunk = 512,
 101   ClassMediumChunk = 4 * K,
 102   MediumChunk = 8 * K
 103 };
 104 
 105 // Returns size of this chunk type.
 106 size_t get_size_for_nonhumongous_chunktype(ChunkIndex chunktype, bool is_class) {
 107   assert(is_valid_nonhumongous_chunktype(chunktype), "invalid chunk type.");
 108   size_t size = 0;
 109   if (is_class) {
 110     switch(chunktype) {
 111       case SpecializedIndex: size = ClassSpecializedChunk; break;
 112       case SmallIndex: size = ClassSmallChunk; break;
 113       case MediumIndex: size = ClassMediumChunk; break;
 114       default:
 115         ShouldNotReachHere();


 150       // A valid humongous chunk size is a multiple of the smallest chunk size.
 151       assert(is_aligned(size, SpecializedChunk), "Invalid chunk size");
 152       return HumongousIndex;
 153     }
 154   }
 155   ShouldNotReachHere();
 156   return (ChunkIndex)-1;
 157 }
 158 
 159 
 160 static ChunkIndex next_chunk_index(ChunkIndex i) {
 161   assert(i < NumberOfInUseLists, "Out of bound");
 162   return (ChunkIndex) (i+1);
 163 }
 164 
 165 static ChunkIndex prev_chunk_index(ChunkIndex i) {
 166   assert(i > ZeroIndex, "Out of bound");
 167   return (ChunkIndex) (i-1);
 168 }
 169 
 170 static const char* space_type_name(Metaspace::MetaspaceType t) {
 171   const char* s = NULL;
 172   switch (t) {
 173   case Metaspace::StandardMetaspaceType: s = "Standard"; break;
 174   case Metaspace::BootMetaspaceType: s = "Boot"; break;
 175   case Metaspace::AnonymousMetaspaceType: s = "Anonymous"; break;
 176   case Metaspace::ReflectionMetaspaceType: s = "Reflection"; break;
 177   }
 178   assert(s != NULL, "Invalid space type");
 179   return s;
 180 }
 181 
 182 static const char* scale_unit(size_t scale) {
 183   switch(scale) {
 184     case 1: return "bytes";
 185     case sizeof(MetaWord): return "words";
 186     case K: return "KB";
 187     case M: return "MB";
 188     case G: return "GB";
 189     default:
 190       ShouldNotReachHere();
 191       return NULL;
 192   }
 193 }
 194 
 195 // Print a size, in bytes, scaled.
 196 static void print_scaled_bytes(outputStream* st, size_t byte_size, size_t scale = 0, int width = -1) {
 197   if (scale == 0) {
 198     // Dynamic mode. Choose scale for this value.
 199     if (byte_size == 0) {
 200       // Zero values are printed as bytes.
 201       scale = 1;
 202     } else {
 203       if (byte_size >= G) {
 204         scale = G;
 205       } else if (byte_size >= M) {
 206         scale = M;
 207       } else if (byte_size >= K) {
 208         scale = K;
 209       } else {
 210         scale = 1;
 211       }
 212     }
 213     return print_scaled_bytes(st, byte_size, scale, width);
 214   }
 215 
 216 #ifdef ASSERT
 217   assert(scale == 1 || scale == sizeof(MetaWord) || scale == K || scale == M || scale == G, "Invalid scale");
 218   // Special case: printing wordsize should only be done with word-sized values
 219   if (scale == sizeof(MetaWord)) {
 220     assert(byte_size % sizeof(MetaWord) == 0, "not word sized");
 221   }
 222 #endif
 223 
 224   if (scale == 1) {
 225     st->print("%*" PRIuPTR " bytes", width, byte_size);
 226   } else if (scale == sizeof(MetaWord)) {
 227     st->print("%*" PRIuPTR " words", width, byte_size / sizeof(MetaWord));
 228   } else {
 229     const char* const unit = scale_unit(scale);
 230     float display_value = (float) byte_size / scale;
 231     // Since we use width to display a number with two trailing digits, increase it a bit.
 232     width += 3;
 233     // Prevent very small but non-null values showing up as 0.00.
 234     if (byte_size > 0 && display_value < 0.01f) {
 235       st->print("%*s %s", width, "<0.01", unit);
 236     } else {
 237       st->print("%*.2f %s", width, display_value, unit);
 238     }
 239   }
 240 }
 241 
 242 // Print a size, in words, scaled.
 243 static void print_scaled_words(outputStream* st, size_t word_size, size_t scale = 0, int width = -1) {
 244   print_scaled_bytes(st, word_size * sizeof(MetaWord), scale, width);
 245 }
 246 
 247 static void print_percentage(outputStream* st, size_t total, size_t part) {
 248   if (total == 0) {
 249     st->print("  ?%%");
 250   } else if (part == 0) {
 251     st->print("  0%%");
 252   } else {
 253     float p = ((float)part / total) * 100.0f;
 254     if (p < 1.0f) {
 255       st->print(" <1%%");
 256     } else {
 257       st->print("%3.0f%%", p);
 258     }
 259   }
 260 }
 261 
 262 // Convenience helper: prints a size value and a percentage.
 263 static void print_scaled_words_and_percentage(outputStream* st, size_t word_size, size_t compare_word_size, size_t scale = 0, int width = -1) {
 264   print_scaled_words(st, word_size, scale, width);
 265   st->print(" (");
 266   print_percentage(st, compare_word_size, word_size);
 267   st->print(")");
 268 }
 269 
 270 volatile intptr_t MetaspaceGC::_capacity_until_GC = 0;
 271 uint MetaspaceGC::_shrink_factor = 0;
 272 bool MetaspaceGC::_should_concurrent_collect = false;
 273 
 274 /// statistics ///////
 275 
 276 // Contains statistics for a number of free chunks.
 277 class FreeChunksStatistics {
 278   uintx _num;         // Number of chunks
 279   size_t _cap;        // Total capacity, in words
 280 
 281 public:
 282   FreeChunksStatistics() : _num(0), _cap(0) {}
 283 
 284   void reset() {
 285     _num = 0; _cap = 0;
 286   }
 287 
 288   uintx num() const { return _num; }
 289   size_t cap() const { return _cap; }
 290 
 291   void add(uintx n, size_t s) { _num += n; _cap += s; }
 292   void add(const FreeChunksStatistics& other) {
 293     _num += other._num;
 294     _cap += other._cap;
 295   }
 296 
 297   void print_on(outputStream* st, size_t scale) const {
 298     st->print(UINTX_FORMAT, _num);
 299     st->print(" chunks, total capacity ");
 300     print_scaled_words(st, _cap, scale);
 301   }
 302 
 303 }; // end: FreeChunksStatistics
 304 
 305 // Contains statistics for a ChunkManager.
 306 class ChunkManagerStatistics {
 307 
 308   FreeChunksStatistics _chunk_stats[NumberOfInUseLists];
 309 
 310 public:
 311 
 312   // Free chunk statistics, by chunk index.
 313   const FreeChunksStatistics& chunk_stats(ChunkIndex index) const   { return _chunk_stats[index]; }
 314   FreeChunksStatistics& chunk_stats(ChunkIndex index)               { return _chunk_stats[index]; }
 315 
 316   void reset() {
 317     for (ChunkIndex i = ZeroIndex; i < NumberOfInUseLists; i = next_chunk_index(i)) {
 318       _chunk_stats[i].reset();
 319     }
 320   }
 321 
 322   size_t total_capacity() const {
 323     return _chunk_stats[SpecializedIndex].cap() +
 324         _chunk_stats[SmallIndex].cap() +
 325         _chunk_stats[MediumIndex].cap() +
 326         _chunk_stats[HumongousIndex].cap();
 327   }
 328 
 329   void print_on(outputStream* st, size_t scale) const {
 330     FreeChunksStatistics totals;
 331     for (ChunkIndex i = ZeroIndex; i < NumberOfInUseLists; i = next_chunk_index(i)) {
 332       st->cr();
 333       st->print("%12s chunks: ", chunk_size_name(i));
 334       if (_chunk_stats[i].num() > 0) {
 335         st->print(UINTX_FORMAT_W(4) ", capacity ", _chunk_stats[i].num());
 336         print_scaled_words(st, _chunk_stats[i].cap(), scale);
 337       } else {
 338         st->print("(none)");
 339       }
 340       totals.add(_chunk_stats[i]);
 341     }
 342     st->cr();
 343     st->print("%19s: " UINTX_FORMAT_W(4) ", capacity=", "Total", totals.num());
 344     print_scaled_words(st, totals.cap(), scale);
 345     st->cr();
 346   }
 347 
 348 }; // ChunkManagerStatistics
 349 
 350 // Contains statistics for a number of chunks in use.
 351 // Each chunk has a used and free portion; however, there are current chunks (serving
 352 // potential future metaspace allocations) and non-current chunks. Unused portion of the
 353 // former is counted as free, unused portion of the latter counts as waste.
 354 class UsedChunksStatistics {
 355   uintx _num;     // Number of chunks
 356   size_t _cap;    // Total capacity, in words
 357   size_t _used;   // Total used area, in words
 358   size_t _free;   // Total free area (unused portions of current chunks), in words
 359   size_t _waste;  // Total waste area (unused portions of non-current chunks), in words
 360 
 361 public:
 362 
 363   UsedChunksStatistics()
 364     : _num(0), _cap(0), _used(0), _free(0), _waste(0)
 365   {}
 366 
 367   void reset() {
 368     _num = 0;
 369     _cap = _used = _free = _waste = 0;
 370   }
 371 
 372   uintx num() const { return _num; }
 373 
 374   // Total capacity, in words
 375   size_t cap() const { return _cap; }
 376 
 377   // Total used area, in words
 378   size_t used() const { return _used; }
 379 
 380   // Total free area (unused portions of current chunks), in words
 381   size_t free() const { return _free; }
 382 
 383   // Total waste area (unused portions of non-current chunks), in words
 384   size_t waste() const { return _waste; }
 385 
 386   void add_num(uintx n) { _num += n; }
 387   void add_cap(size_t s) { _cap += s; }
 388   void add_used(size_t s) { _used += s; }
 389   void add_free(size_t s) { _free += s; }
 390   void add_waste(size_t s) { _waste += s; }
 391 
 392   void add(const UsedChunksStatistics& other) {
 393     _num += other._num;
 394     _cap += other._cap;
 395     _used += other._used;
 396     _free += other._free;
 397     _waste += other._waste;
 398   }
 399 
 400   void print_on(outputStream* st, size_t scale) const {
 401     int col = st->position();
 402     st->print(UINTX_FORMAT_W(3) " chunk%s, ", _num, _num != 1 ? "s" : "");
 403     if (_num > 0) {
 404       col += 12; st->fill_to(col);
 405 
 406       print_scaled_words(st, _cap, scale, 5);
 407       st->print(" capacity, ");
 408 
 409       col += 18; st->fill_to(col);
 410       print_scaled_words_and_percentage(st, _used, _cap, scale, 5);
 411       st->print(" used, ");
 412 
 413       col += 20; st->fill_to(col);
 414       print_scaled_words_and_percentage(st, _free, _cap, scale, 5);
 415       st->print(" free, ");
 416 
 417       col += 20; st->fill_to(col);
 418       print_scaled_words_and_percentage(st, _waste, _cap, scale, 5);
 419       st->print(" waste");
 420     }
 421   }
 422 
 423 }; // UsedChunksStatistics
 424 
 425 // Class containing statistics for one or more space managers.
 426 class SpaceManagerStatistics {
 427 
 428   UsedChunksStatistics _chunk_stats[NumberOfInUseLists];
 429   uintx _free_blocks_num;
 430   size_t _free_blocks_cap_words;
 431 
 432 public:
 433 
 434   SpaceManagerStatistics() { reset(); }
 435 
 436   void reset() {
 437     for (int i = 0; i < NumberOfInUseLists; i ++) {
 438       _chunk_stats[i].reset();
 439       _free_blocks_num = 0; _free_blocks_cap_words = 0;
 440     }
 441   }
 442 
 443   void add_free_blocks(uintx num, size_t cap) {
 444     _free_blocks_num += num;
 445     _free_blocks_cap_words += cap;
 446   }
 447 
 448   // Chunk statistics by chunk index
 449   const UsedChunksStatistics& chunk_stats(ChunkIndex index) const   { return _chunk_stats[index]; }
 450   UsedChunksStatistics& chunk_stats(ChunkIndex index)               { return _chunk_stats[index]; }
 451 
 452   uintx free_blocks_num () const { return _free_blocks_num; }
 453   size_t free_blocks_cap_words () const { return _free_blocks_cap_words; }
 454 
 455   // Returns total chunk statistics over all chunk types.
 456   UsedChunksStatistics totals() const {
 457     UsedChunksStatistics stat;
 458     for (ChunkIndex i = ZeroIndex; i < NumberOfInUseLists; i = next_chunk_index(i)) {
 459       stat.add(_chunk_stats[i]);
 460     }
 461     return stat;
 462   }
 463 
 464   void add(const SpaceManagerStatistics& other) {
 465     for (ChunkIndex i = ZeroIndex; i < NumberOfInUseLists; i = next_chunk_index(i)) {
 466       _chunk_stats[i].add(other._chunk_stats[i]);
 467     }
 468     _free_blocks_num += other._free_blocks_num;
 469     _free_blocks_cap_words += other._free_blocks_cap_words;
 470   }
 471 
 472   void print_on(outputStream* st, size_t scale,  bool detailed) const {
 473     UsedChunksStatistics totals;
 474     for (ChunkIndex i = ZeroIndex; i < NumberOfInUseLists; i = next_chunk_index(i)) {
 475       totals.add(_chunk_stats[i]);
 476     }
 477     streamIndentor sti(st);
 478     if (detailed) {
 479       st->cr_indent();
 480     }
 481     totals.print_on(st, scale);
 482     if (_free_blocks_num > 0) {
 483       if (detailed) {
 484         st->cr_indent();
 485       } else {
 486         st->print(", ");
 487       }
 488       st->print("deallocated: " UINTX_FORMAT " blocks with ", _free_blocks_num);
 489       print_scaled_words(st, _free_blocks_cap_words, scale);
 490     }
 491     if (detailed) {
 492       st->cr_indent();
 493       st->print("By chunk type:");
 494       {
 495         streamIndentor sti2(st);
 496         for (ChunkIndex i = ZeroIndex; i < NumberOfInUseLists; i = next_chunk_index(i)) {
 497           st->cr_indent();
 498           st->print("%15s: ", chunk_size_name(i));
 499           if (_chunk_stats[i].num() == 0) {
 500             st->print(" (none)");
 501           } else {
 502             _chunk_stats[i].print_on(st, scale);
 503           }
 504         }
 505       }
 506     }
 507   }
 508 
 509 }; // SpaceManagerStatistics
 510 
 511 class ClassLoaderMetaspaceStatistics {
 512 
 513   SpaceManagerStatistics _sm_stats[Metaspace::MetadataTypeCount];
 514 
 515 public:
 516 
 517   ClassLoaderMetaspaceStatistics() { reset(); }
 518 
 519   void reset() {
 520     nonclass_sm_stats().reset();
 521     if (Metaspace::using_class_space()) {
 522       class_sm_stats().reset();
 523     }
 524   }
 525 
 526   const SpaceManagerStatistics& sm_stats(Metaspace::MetadataType mdType) const { return _sm_stats[mdType]; }
 527   SpaceManagerStatistics& sm_stats(Metaspace::MetadataType mdType)             { return _sm_stats[mdType]; }
 528 
 529   const SpaceManagerStatistics& nonclass_sm_stats() const { return sm_stats(Metaspace::NonClassType); }
 530   SpaceManagerStatistics& nonclass_sm_stats()             { return sm_stats(Metaspace::NonClassType); }
 531   const SpaceManagerStatistics& class_sm_stats() const    { return sm_stats(Metaspace::ClassType); }
 532   SpaceManagerStatistics& class_sm_stats()                { return sm_stats(Metaspace::ClassType); }
 533 
 534   // Returns total space manager statistics for both class and non-class metaspace
 535   SpaceManagerStatistics totals() const {
 536     SpaceManagerStatistics stats;
 537     stats.add(nonclass_sm_stats());
 538     if (Metaspace::using_class_space()) {
 539       stats.add(class_sm_stats());
 540     }
 541     return stats;
 542   }
 543 
 544   void add(const ClassLoaderMetaspaceStatistics& other) {
 545     nonclass_sm_stats().add(other.nonclass_sm_stats());
 546     if (Metaspace::using_class_space()) {
 547       class_sm_stats().add(other.class_sm_stats());
 548     }
 549   }
 550 
 551   void print_on(outputStream* st, size_t scale, bool detailed) const {
 552     streamIndentor sti(st);
 553     st->cr_indent();
 554     if (Metaspace::using_class_space()) {
 555       st->print("Non-Class: ");
 556     }
 557     nonclass_sm_stats().print_on(st, scale, detailed);
 558     if (Metaspace::using_class_space()) {
 559       st->cr_indent();
 560       st->print("Class:     ");
 561       class_sm_stats().print_on(st, scale, detailed);
 562     }
 563     st->cr();
 564   }
 565 
 566 }; // ClassLoaderMetaspaceStatistics
 567 
 568 
 569 typedef class FreeList<Metachunk> ChunkList;
 570 
 571 // Manages the global free lists of chunks.
 572 class ChunkManager : public CHeapObj<mtInternal> {
 573   friend class TestVirtualSpaceNodeTest;
 574 
 575   // Free list of chunks of different sizes.
 576   //   SpecializedChunk
 577   //   SmallChunk
 578   //   MediumChunk
 579   ChunkList _free_chunks[NumberOfFreeLists];
 580 
 581   // Whether or not this is the class chunkmanager.
 582   const bool _is_class;
 583 
 584   // Return non-humongous chunk list by its index.
 585   ChunkList* free_chunks(ChunkIndex index);
 586 
 587   // Returns non-humongous chunk list for the given chunk word size.
 588   ChunkList* find_free_chunks_list(size_t word_size);


 629 
 630   // Helper for chunk merging:
 631   //  Given an address range with 1-n chunks which are all supposed to be
 632   //  free and hence currently managed by this ChunkManager, remove them
 633   //  from this ChunkManager and mark them as invalid.
 634   // - This does not correct the occupancy map.
 635   // - This does not adjust the counters in ChunkManager.
 636   // - Does not adjust container count counter in containing VirtualSpaceNode.
 637   // Returns number of chunks removed.
 638   int remove_chunks_in_area(MetaWord* p, size_t word_size);
 639 
 640   // Helper for chunk splitting: given a target chunk size and a larger free chunk,
 641   // split up the larger chunk into n smaller chunks, at least one of which should be
 642   // the target chunk of target chunk size. The smaller chunks, including the target
 643   // chunk, are returned to the freelist. The pointer to the target chunk is returned.
 644   // Note that this chunk is supposed to be removed from the freelist right away.
 645   Metachunk* split_chunk(size_t target_chunk_word_size, Metachunk* chunk);
 646 
 647  public:
 648 













 649   ChunkManager(bool is_class)
 650       : _is_class(is_class), _free_chunks_total(0), _free_chunks_count(0) {
 651     _free_chunks[SpecializedIndex].set_size(get_size_for_nonhumongous_chunktype(SpecializedIndex, is_class));
 652     _free_chunks[SmallIndex].set_size(get_size_for_nonhumongous_chunktype(SmallIndex, is_class));
 653     _free_chunks[MediumIndex].set_size(get_size_for_nonhumongous_chunktype(MediumIndex, is_class));
 654   }
 655 
 656   // Add or delete (return) a chunk to the global freelist.
 657   Metachunk* chunk_freelist_allocate(size_t word_size);
 658 
 659   // Map a size to a list index assuming that there are lists
 660   // for special, small, medium, and humongous chunks.
 661   ChunkIndex list_index(size_t size);
 662 
 663   // Map a given index to the chunk size.
 664   size_t size_by_index(ChunkIndex index) const;
 665 
 666   bool is_class() const { return _is_class; }
 667 
 668   // Convenience accessors.


 734   // Debug support
 735   void verify();
 736   void slow_verify() {
 737     if (VerifyMetaspace) {
 738       verify();
 739     }
 740   }
 741   void locked_verify();
 742   void slow_locked_verify() {
 743     if (VerifyMetaspace) {
 744       locked_verify();
 745     }
 746   }
 747   void verify_free_chunks_total();
 748 
 749   void locked_print_free_chunks(outputStream* st);
 750   void locked_print_sum_free_chunks(outputStream* st);
 751 
 752   void print_on(outputStream* st) const;
 753 
 754   void get_statistics(ChunkManagerStatistics* out) const;
 755 

 756 };
 757 
 758 class SmallBlocks : public CHeapObj<mtClass> {
 759   const static uint _small_block_max_size = sizeof(TreeChunk<Metablock,  FreeList<Metablock> >)/HeapWordSize;
 760   const static uint _small_block_min_size = sizeof(Metablock)/HeapWordSize;
 761 
 762  private:
 763   FreeList<Metablock> _small_lists[_small_block_max_size - _small_block_min_size];
 764 
 765   FreeList<Metablock>& list_at(size_t word_size) {
 766     assert(word_size >= _small_block_min_size, "There are no metaspace objects less than %u words", _small_block_min_size);
 767     return _small_lists[word_size - _small_block_min_size];
 768   }
 769 
 770  public:
 771   SmallBlocks() {
 772     for (uint i = _small_block_min_size; i < _small_block_max_size; i++) {
 773       uint k = i - _small_block_min_size;
 774       _small_lists[k].set_size(i);
 775     }
 776   }
 777 
 778   size_t total_size() const {
 779     size_t result = 0;
 780     for (uint i = _small_block_min_size; i < _small_block_max_size; i++) {
 781       uint k = i - _small_block_min_size;
 782       result = result + _small_lists[k].count() * _small_lists[k].size();
 783     }
 784     return result;
 785   }
 786 
 787   uintx total_num_blocks() const {
 788     uintx result = 0;
 789     for (uint i = _small_block_min_size; i < _small_block_max_size; i++) {
 790       uint k = i - _small_block_min_size;
 791       result = result + _small_lists[k].count();
 792     }
 793     return result;
 794   }
 795 
 796   static uint small_block_max_size() { return _small_block_max_size; }
 797   static uint small_block_min_size() { return _small_block_min_size; }
 798 
 799   MetaWord* get_block(size_t word_size) {
 800     if (list_at(word_size).count() > 0) {
 801       MetaWord* new_block = (MetaWord*) list_at(word_size).get_chunk_at_head();
 802       return new_block;
 803     } else {
 804       return NULL;
 805     }
 806   }
 807   void return_block(Metablock* free_chunk, size_t word_size) {
 808     list_at(word_size).return_chunk_at_head(free_chunk, false);
 809     assert(list_at(word_size).count() > 0, "Should have a chunk");
 810   }
 811 
 812   void print_on(outputStream* st) const {
 813     st->print_cr("SmallBlocks:");
 814     for (uint i = _small_block_min_size; i < _small_block_max_size; i++) {
 815       uint k = i - _small_block_min_size;


 836     }
 837     return _small_blocks;
 838   }
 839 
 840  public:
 841   BlockFreelist();
 842   ~BlockFreelist();
 843 
 844   // Get and return a block to the free list
 845   MetaWord* get_block(size_t word_size);
 846   void return_block(MetaWord* p, size_t word_size);
 847 
 848   size_t total_size() const  {
 849     size_t result = dictionary()->total_size();
 850     if (_small_blocks != NULL) {
 851       result = result + _small_blocks->total_size();
 852     }
 853     return result;
 854   }
 855 
 856   uintx num_blocks() const {
 857     uintx result = dictionary()->total_free_blocks();
 858     if (_small_blocks != NULL) {
 859       result = result + _small_blocks->total_num_blocks();
 860     }
 861     return result;
 862   }
 863 
 864   static size_t min_dictionary_size()   { return TreeChunk<Metablock, FreeList<Metablock> >::min_size(); }
 865   void print_on(outputStream* st) const;
 866 };
 867 
 868 // Helper for Occupancy Bitmap. A type trait to give an all-bits-are-one-unsigned constant.
 869 template <typename T> struct all_ones  { static const T value; };
 870 template <> struct all_ones <uint64_t> { static const uint64_t value = 0xFFFFFFFFFFFFFFFFULL; };
 871 template <> struct all_ones <uint32_t> { static const uint32_t value = 0xFFFFFFFF; };
 872 
 873 // The OccupancyMap is a bitmap which, for a given VirtualSpaceNode,
 874 // keeps information about
 875 // - where a chunk starts
 876 // - whether a chunk is in-use or free
 877 // A bit in this bitmap represents one range of memory in the smallest
 878 // chunk size (SpecializedChunk or ClassSpecializedChunk).
 879 class OccupancyMap : public CHeapObj<mtInternal> {
 880 
 881   // The address range this map covers.
 882   const MetaWord* const _reference_address;
 883   const size_t _word_size;


1250   // Allocate a chunk from the virtual space and return it.
1251   Metachunk* get_chunk_vs(size_t chunk_word_size);
1252 
1253   // Expands/shrinks the committed space in a virtual space.  Delegates
1254   // to Virtualspace
1255   bool expand_by(size_t min_words, size_t preferred_words);
1256 
1257   // In preparation for deleting this node, remove all the chunks
1258   // in the node from any freelist.
1259   void purge(ChunkManager* chunk_manager);
1260 
1261   // If an allocation doesn't fit in the current node a new node is created.
1262   // Allocate chunks out of the remaining committed space in this node
1263   // to avoid wasting that memory.
1264   // This always adds up because all the chunk sizes are multiples of
1265   // the smallest chunk size.
1266   void retire(ChunkManager* chunk_manager);
1267 
1268 
1269   void print_on(outputStream* st) const;
1270   void print_on(outputStream* st, size_t scale) const;
1271   void print_map(outputStream* st, bool is_class) const;
1272 
1273   // Debug support
1274   DEBUG_ONLY(void mangle();)
1275   // Verify counters, all chunks in this list node and the occupancy map.
1276   DEBUG_ONLY(void verify();)
1277   // Verify that all free chunks in this node are ideally merged
1278   // (there not should be multiple small chunks where a large chunk could exist.)
1279   DEBUG_ONLY(void verify_free_chunks_are_ideally_merged();)
1280 
1281 };
1282 
1283 #define assert_is_aligned(value, alignment)                  \
1284   assert(is_aligned((value), (alignment)),                   \
1285          SIZE_FORMAT_HEX " is not aligned to "               \
1286          SIZE_FORMAT, (size_t)(uintptr_t)value, (alignment))
1287 
1288 #define assert_counter(expected_value, real_value, msg) \
1289   assert( (expected_value) == (real_value),             \
1290          "Counter mismatch (%s): expected " SIZE_FORMAT \
1291          ", but got: " SIZE_FORMAT ".", msg, expected_value, \
1292          real_value);
1293 
1294 // Decide if large pages should be committed when the memory is reserved.
1295 static bool should_commit_large_pages_when_reserving(size_t bytes) {
1296   if (UseLargePages && UseLargePagesInMetaspace && !os::can_commit_large_page_memory()) {
1297     size_t words = bytes / BytesPerWord;
1298     bool is_class = false; // We never reserve large pages for the class space.
1299     if (MetaspaceGC::can_expand(words, is_class) &&
1300         MetaspaceGC::allowed_expansion() >= words) {
1301       return true;
1302     }
1303   }
1304 
1305   return false;
1306 }
1307 
1308   // byte_size is the size of the associated virtualspace.
1309 VirtualSpaceNode::VirtualSpaceNode(bool is_class, size_t bytes) :
1310   _is_class(is_class), _top(NULL), _next(NULL), _rs(), _container_count(0), _occupancy_map(NULL) {
1311   assert_is_aligned(bytes, Metaspace::reserve_alignment());
1312   bool large_pages = should_commit_large_pages_when_reserving(bytes);
1313   _rs = ReservedSpace(bytes, Metaspace::reserve_alignment(), large_pages);


1581   bool initialization_succeeded() { return _virtual_space_list != NULL; }
1582 
1583   size_t reserved_words()  { return _reserved_words; }
1584   size_t reserved_bytes()  { return reserved_words() * BytesPerWord; }
1585   size_t committed_words() { return _committed_words; }
1586   size_t committed_bytes() { return committed_words() * BytesPerWord; }
1587 
1588   void inc_reserved_words(size_t v);
1589   void dec_reserved_words(size_t v);
1590   void inc_committed_words(size_t v);
1591   void dec_committed_words(size_t v);
1592   void inc_virtual_space_count();
1593   void dec_virtual_space_count();
1594 
1595   bool contains(const void* ptr);
1596 
1597   // Unlink empty VirtualSpaceNodes and free it.
1598   void purge(ChunkManager* chunk_manager);
1599 
1600   void print_on(outputStream* st) const;
1601   void print_on(outputStream* st, size_t scale) const;
1602   void print_map(outputStream* st) const;
1603 
1604   class VirtualSpaceListIterator : public StackObj {
1605     VirtualSpaceNode* _virtual_spaces;
1606    public:
1607     VirtualSpaceListIterator(VirtualSpaceNode* virtual_spaces) :
1608       _virtual_spaces(virtual_spaces) {}
1609 
1610     bool repeat() {
1611       return _virtual_spaces != NULL;
1612     }
1613 
1614     VirtualSpaceNode* get_next() {
1615       VirtualSpaceNode* result = _virtual_spaces;
1616       if (_virtual_spaces != NULL) {
1617         _virtual_spaces = _virtual_spaces->next();
1618       }
1619       return result;
1620     }
1621   };
1622 };
1623 
1624 class Metadebug : AllStatic {
1625   // Debugging support for Metaspaces
1626   static int _allocation_fail_alot_count;
1627 
1628  public:
1629 
1630   static void init_allocation_fail_alot_count();
1631 #ifdef ASSERT
1632   static bool test_metadata_failure();
1633 #endif
1634 };
1635 
1636 int Metadebug::_allocation_fail_alot_count = 0;
1637 
1638 
1639 //  SpaceManager - used by Metaspace to handle allocations
1640 class SpaceManager : public CHeapObj<mtClass> {
1641   friend class ClassLoaderMetaspace;
1642   friend class Metadebug;
1643 
1644  private:
1645 
1646   // protects allocations
1647   Mutex* const _lock;
1648 
1649   // Type of metadata allocated.
1650   const Metaspace::MetadataType   _mdtype;
1651 
1652   // Type of metaspace
1653   const Metaspace::MetaspaceType  _space_type;
1654 
1655   // List of chunks in use by this SpaceManager.  Allocations
1656   // are done from the current chunk.  The list is used for deallocating
1657   // chunks when the SpaceManager is freed.
1658   Metachunk* _chunks_in_use[NumberOfInUseLists];
1659   Metachunk* _current_chunk;
1660 
1661   // Maximum number of small chunks to allocate to a SpaceManager
1662   static uint const _small_chunk_limit;
1663 
1664   // Maximum number of specialize chunks to allocate for anonymous and delegating
1665   // metadata space to a SpaceManager
1666   static uint const _anon_and_delegating_metadata_specialize_chunk_limit;
1667 
1668   // Sum of used space in chunks, including overhead incurred by chunk headers.
1669   size_t _allocated_block_words;
1670 
1671   // Sum of all allocated chunks
1672   size_t _allocated_chunks_words;
1673   size_t _allocated_chunks_count;
1674 
1675   // Free lists of blocks are per SpaceManager since they
1676   // are assumed to be in chunks in use by the SpaceManager
1677   // and all chunks in use by a SpaceManager are freed when
1678   // the class loader using the SpaceManager is collected.
1679   BlockFreelist* _block_freelists;
1680 
1681  private:
1682   // Accessors
1683   Metachunk* chunks_in_use(ChunkIndex index) const { return _chunks_in_use[index]; }
1684   void set_chunks_in_use(ChunkIndex index, Metachunk* v) {
1685     _chunks_in_use[index] = v;
1686   }
1687 
1688   BlockFreelist* block_freelists() const { return _block_freelists; }
1689 
1690   Metaspace::MetadataType mdtype() { return _mdtype; }
1691 
1692   VirtualSpaceList* vs_list()   const { return Metaspace::get_space_list(_mdtype); }
1693   ChunkManager* chunk_manager() const { return Metaspace::get_chunk_manager(_mdtype); }
1694 
1695   Metachunk* current_chunk() const { return _current_chunk; }
1696   void set_current_chunk(Metachunk* v) {
1697     _current_chunk = v;
1698   }
1699 
1700   Metachunk* find_current_chunk(size_t word_size);
1701 
1702   // Add chunk to the list of chunks in use
1703   void add_chunk(Metachunk* v, bool make_current);
1704   void retire_current_chunk();
1705 
1706   Mutex* lock() const { return _lock; }
1707 
1708   // Adds to the given statistic object. Must be locked with CLD metaspace lock.
1709   void add_to_statistics_locked(SpaceManagerStatistics* out) const;
1710 
1711   // Verify internal counters against the current state. Must be locked with CLD metaspace lock.
1712   DEBUG_ONLY(void verify_metrics_locked() const;)
1713 
1714  protected:
1715   void initialize();
1716 
1717  public:
1718   SpaceManager(Metaspace::MetadataType mdtype,
1719                Metaspace::MetaspaceType space_type,
1720                Mutex* lock);
1721   ~SpaceManager();
1722 
1723   enum ChunkMultiples {
1724     MediumChunkMultiple = 4
1725   };
1726 
1727   static size_t specialized_chunk_size(bool is_class) { return is_class ? ClassSpecializedChunk : SpecializedChunk; }
1728   static size_t small_chunk_size(bool is_class)       { return is_class ? ClassSmallChunk : SmallChunk; }
1729   static size_t medium_chunk_size(bool is_class)      { return is_class ? ClassMediumChunk : MediumChunk; }
1730 
1731   static size_t smallest_chunk_size(bool is_class)    { return specialized_chunk_size(is_class); }
1732 
1733   // Accessors
1734   bool is_class() const { return _mdtype == Metaspace::ClassType; }
1735 
1736   size_t specialized_chunk_size() const { return specialized_chunk_size(is_class()); }
1737   size_t small_chunk_size()       const { return small_chunk_size(is_class()); }
1738   size_t medium_chunk_size()      const { return medium_chunk_size(is_class()); }
1739 
1740   size_t smallest_chunk_size()    const { return smallest_chunk_size(is_class()); }
1741 
1742   size_t medium_chunk_bunch()     const { return medium_chunk_size() * MediumChunkMultiple; }
1743 
1744   size_t allocated_blocks_words() const { return _allocated_block_words; }
1745   size_t allocated_blocks_bytes() const { return _allocated_block_words * BytesPerWord; }
1746   size_t allocated_chunks_words() const { return _allocated_chunks_words; }
1747   size_t allocated_chunks_bytes() const { return _allocated_chunks_words * BytesPerWord; }
1748   size_t allocated_chunks_count() const { return _allocated_chunks_count; }
1749 
1750   bool is_humongous(size_t word_size) { return word_size > medium_chunk_size(); }
1751 
1752   // Increment the per Metaspace and global running sums for Metachunks
1753   // by the given size.  This is used when a Metachunk to added to
1754   // the in-use list.
1755   void inc_size_metrics(size_t words);
1756   // Increment the per Metaspace and global running sums Metablocks by the given
1757   // size.  This is used when a Metablock is allocated.
1758   void inc_used_metrics(size_t words);
1759   // Delete the portion of the running sums for this SpaceManager. That is,
1760   // the globals running sums for the Metachunks and Metablocks are
1761   // decremented for all the Metachunks in-use by this SpaceManager.
1762   void dec_total_from_size_metrics();
1763 
1764   // Adjust the initial chunk size to match one of the fixed chunk list sizes,
1765   // or return the unadjusted size if the requested size is humongous.
1766   static size_t adjust_initial_chunk_size(size_t requested, bool is_class_space);
1767   size_t adjust_initial_chunk_size(size_t requested) const;
1768 
1769   // Get the initial chunks size for this metaspace type.
1770   size_t get_initial_chunk_size(Metaspace::MetaspaceType type) const;
1771 
1772   // Todo: remove this if we have counters by chunk type.






1773   size_t sum_count_in_chunks_in_use(ChunkIndex i);
1774 
1775   Metachunk* get_new_chunk(size_t chunk_word_size);
1776 
1777   // Block allocation and deallocation.
1778   // Allocates a block from the current chunk
1779   MetaWord* allocate(size_t word_size);
1780 
1781   // Helper for allocations
1782   MetaWord* allocate_work(size_t word_size);
1783 
1784   // Returns a block to the per manager freelist
1785   void deallocate(MetaWord* p, size_t word_size);
1786 
1787   // Based on the allocation size and a minimum chunk size,
1788   // returned chunk size (for expanding space for chunk allocation).
1789   size_t calc_chunk_size(size_t allocation_word_size);
1790 
1791   // Called when an allocation from the current chunk fails.
1792   // Gets a new chunk (may require getting a new virtual space),
1793   // and allocates from that chunk.
1794   MetaWord* grow_and_allocate(size_t word_size);
1795 
1796   // Notify memory usage to MemoryService.
1797   void track_metaspace_memory_usage();
1798 
1799   // debugging support.
1800 

1801   void print_on(outputStream* st) const;
1802   void locked_print_chunks_in_use_on(outputStream* st) const;
1803 
1804   void verify();
1805   void verify_chunk_size(Metachunk* chunk);



1806 
1807   // This adjusts the size given to be greater than the minimum allocation size in
1808   // words for data in metaspace.  Esentially the minimum size is currently 3 words.
1809   size_t get_allocation_word_size(size_t word_size) {
1810     size_t byte_size = word_size * BytesPerWord;
1811 
1812     size_t raw_bytes_size = MAX2(byte_size, sizeof(Metablock));
1813     raw_bytes_size = align_up(raw_bytes_size, Metachunk::object_alignment());
1814 
1815     size_t raw_word_size = raw_bytes_size / BytesPerWord;
1816     assert(raw_word_size * BytesPerWord == raw_bytes_size, "Size problem");
1817 
1818     return raw_word_size;
1819   }
1820 
1821   // Adds to the given statistic object. Will lock with CLD metaspace lock.
1822   void add_to_statistics(SpaceManagerStatistics* out) const;
1823 
1824   // Verify internal counters against the current state. Will lock with CLD metaspace lock.
1825   DEBUG_ONLY(void verify_metrics() const;)
1826 
1827 };
1828 
1829 uint const SpaceManager::_small_chunk_limit = 4;
1830 uint const SpaceManager::_anon_and_delegating_metadata_specialize_chunk_limit = 4;
1831 
1832 void VirtualSpaceNode::inc_container_count() {
1833   assert_lock_strong(MetaspaceExpand_lock);
1834   _container_count++;
1835 }
1836 
1837 void VirtualSpaceNode::dec_container_count() {
1838   assert_lock_strong(MetaspaceExpand_lock);
1839   _container_count--;
1840 }
1841 
1842 #ifdef ASSERT
1843 void VirtualSpaceNode::verify_container_count() {
1844   assert(_container_count == container_count_slow(),
1845          "Inconsistency in container_count _container_count " UINTX_FORMAT
1846          " container_count_slow() " UINTX_FORMAT, _container_count, container_count_slow());


2107 }
2108 
2109 
2110 // Expand the virtual space (commit more of the reserved space)
2111 bool VirtualSpaceNode::expand_by(size_t min_words, size_t preferred_words) {
2112   size_t min_bytes = min_words * BytesPerWord;
2113   size_t preferred_bytes = preferred_words * BytesPerWord;
2114 
2115   size_t uncommitted = virtual_space()->reserved_size() - virtual_space()->actual_committed_size();
2116 
2117   if (uncommitted < min_bytes) {
2118     return false;
2119   }
2120 
2121   size_t commit = MIN2(preferred_bytes, uncommitted);
2122   bool result = virtual_space()->expand_by(commit, false);
2123 
2124   if (result) {
2125     log_trace(gc, metaspace, freelist)("Expanded %s virtual space list node by " SIZE_FORMAT " words.",
2126               (is_class() ? "class" : "non-class"), commit);
2127     DEBUG_ONLY(Atomic::inc(&g_internal_statistics.num_committed_space_expanded));
2128   } else {
2129     log_trace(gc, metaspace, freelist)("Failed to expand %s virtual space list node by " SIZE_FORMAT " words.",
2130               (is_class() ? "class" : "non-class"), commit);
2131   }
2132 
2133   assert(result, "Failed to commit memory");
2134 
2135   return result;
2136 }
2137 
2138 Metachunk* VirtualSpaceNode::get_chunk_vs(size_t chunk_word_size) {
2139   assert_lock_strong(MetaspaceExpand_lock);
2140   Metachunk* result = take_from_committed(chunk_word_size);
2141   return result;
2142 }
2143 
2144 bool VirtualSpaceNode::initialize() {
2145 
2146   if (!_rs.is_reserved()) {
2147     return false;


2168     set_reserved(MemRegion((HeapWord*)_rs.base(),
2169                  (HeapWord*)(_rs.base() + _rs.size())));
2170 
2171     assert(reserved()->start() == (HeapWord*) _rs.base(),
2172            "Reserved start was not set properly " PTR_FORMAT
2173            " != " PTR_FORMAT, p2i(reserved()->start()), p2i(_rs.base()));
2174     assert(reserved()->word_size() == _rs.size() / BytesPerWord,
2175            "Reserved size was not set properly " SIZE_FORMAT
2176            " != " SIZE_FORMAT, reserved()->word_size(),
2177            _rs.size() / BytesPerWord);
2178   }
2179 
2180   // Initialize Occupancy Map.
2181   const size_t smallest_chunk_size = is_class() ? ClassSpecializedChunk : SpecializedChunk;
2182   _occupancy_map = new OccupancyMap(bottom(), reserved_words(), smallest_chunk_size);
2183 
2184   return result;
2185 }
2186 
2187 void VirtualSpaceNode::print_on(outputStream* st) const {
2188   print_on(st, K);
2189 }
2190 
2191 void VirtualSpaceNode::print_on(outputStream* st, size_t scale) const {
2192   size_t used_words = used_words_in_vs();
2193   size_t commit_words = committed_words();
2194   size_t res_words = reserved_words();
2195   VirtualSpace* vs = virtual_space();
2196 
2197   st->print("node @" PTR_FORMAT ": ", p2i(this));
2198   st->print("reserved=");
2199   print_scaled_words(st, res_words, scale);
2200   st->print(", committed=");
2201   print_scaled_words_and_percentage(st, commit_words, res_words, scale);
2202   st->print(", used=");
2203   print_scaled_words_and_percentage(st, used_words, res_words, scale);
2204   st->cr();
2205   st->print("   [" PTR_FORMAT ", " PTR_FORMAT ", "
2206            PTR_FORMAT ", " PTR_FORMAT ")",


2207            p2i(bottom()), p2i(top()), p2i(end()),
2208            p2i(vs->high_boundary()));
2209 }
2210 
2211 #ifdef ASSERT
2212 void VirtualSpaceNode::mangle() {
2213   size_t word_size = capacity_words_in_vs();
2214   Copy::fill_to_words((HeapWord*) low(), word_size, 0xf1f1f1f1);
2215 }
2216 #endif // ASSERT
2217 
2218 // VirtualSpaceList methods
2219 // Space allocated from the VirtualSpace
2220 
2221 VirtualSpaceList::~VirtualSpaceList() {
2222   VirtualSpaceListIterator iter(virtual_space_list());
2223   while (iter.repeat()) {
2224     VirtualSpaceNode* vsl = iter.get_next();
2225     delete vsl;
2226   }


2408 // Walk the list of VirtualSpaceNodes and delete
2409 // nodes with a 0 container_count.  Remove Metachunks in
2410 // the node from their respective freelists.
2411 void VirtualSpaceList::purge(ChunkManager* chunk_manager) {
2412   assert(SafepointSynchronize::is_at_safepoint(), "must be called at safepoint for contains to work");
2413   assert_lock_strong(MetaspaceExpand_lock);
2414   // Don't use a VirtualSpaceListIterator because this
2415   // list is being changed and a straightforward use of an iterator is not safe.
2416   VirtualSpaceNode* purged_vsl = NULL;
2417   VirtualSpaceNode* prev_vsl = virtual_space_list();
2418   VirtualSpaceNode* next_vsl = prev_vsl;
2419   while (next_vsl != NULL) {
2420     VirtualSpaceNode* vsl = next_vsl;
2421     DEBUG_ONLY(vsl->verify_container_count();)
2422     next_vsl = vsl->next();
2423     // Don't free the current virtual space since it will likely
2424     // be needed soon.
2425     if (vsl->container_count() == 0 && vsl != current_virtual_space()) {
2426       log_trace(gc, metaspace, freelist)("Purging VirtualSpaceNode " PTR_FORMAT " (capacity: " SIZE_FORMAT
2427                                          ", used: " SIZE_FORMAT ").", p2i(vsl), vsl->capacity_words_in_vs(), vsl->used_words_in_vs());
2428       DEBUG_ONLY(Atomic::inc(&g_internal_statistics.num_vsnodes_purged));
2429       // Unlink it from the list
2430       if (prev_vsl == vsl) {
2431         // This is the case of the current node being the first node.
2432         assert(vsl == virtual_space_list(), "Expected to be the first node");
2433         set_virtual_space_list(vsl->next());
2434       } else {
2435         prev_vsl->set_next(vsl->next());
2436       }
2437 
2438       vsl->purge(chunk_manager);
2439       dec_reserved_words(vsl->reserved_words());
2440       dec_committed_words(vsl->committed_words());
2441       dec_virtual_space_count();
2442       purged_vsl = vsl;
2443       delete vsl;
2444     } else {
2445       prev_vsl = vsl;
2446     }
2447   }
2448 #ifdef ASSERT


2556   if (vs_word_size == 0) {
2557     assert(false, "vs_word_size should always be at least _reserve_alignment large.");
2558     return false;
2559   }
2560 
2561   // Reserve the space
2562   size_t vs_byte_size = vs_word_size * BytesPerWord;
2563   assert_is_aligned(vs_byte_size, Metaspace::reserve_alignment());
2564 
2565   // Allocate the meta virtual space and initialize it.
2566   VirtualSpaceNode* new_entry = new VirtualSpaceNode(is_class(), vs_byte_size);
2567   if (!new_entry->initialize()) {
2568     delete new_entry;
2569     return false;
2570   } else {
2571     assert(new_entry->reserved_words() == vs_word_size,
2572         "Reserved memory size differs from requested memory size");
2573     // ensure lock-free iteration sees fully initialized node
2574     OrderAccess::storestore();
2575     link_vs(new_entry);
2576     DEBUG_ONLY(Atomic::inc(&g_internal_statistics.num_vsnodes_created));
2577     return true;
2578   }
2579 }
2580 
2581 void VirtualSpaceList::link_vs(VirtualSpaceNode* new_entry) {
2582   if (virtual_space_list() == NULL) {
2583       set_virtual_space_list(new_entry);
2584   } else {
2585     current_virtual_space()->set_next(new_entry);
2586   }
2587   set_current_virtual_space(new_entry);
2588   inc_reserved_words(new_entry->reserved_words());
2589   inc_committed_words(new_entry->committed_words());
2590   inc_virtual_space_count();
2591 #ifdef ASSERT
2592   new_entry->mangle();
2593 #endif
2594   LogTarget(Trace, gc, metaspace) lt;
2595   if (lt.is_enabled()) {
2596     LogStream ls(lt);


2706   // additional reqired padding chunks.
2707   const size_t size_for_padding = largest_possible_padding_size_for_chunk(chunk_word_size, this->is_class());
2708 
2709   size_t min_word_size       = align_up(chunk_word_size + size_for_padding, Metaspace::commit_alignment_words());
2710   size_t preferred_word_size = align_up(suggested_commit_granularity, Metaspace::commit_alignment_words());
2711   if (min_word_size >= preferred_word_size) {
2712     // Can happen when humongous chunks are allocated.
2713     preferred_word_size = min_word_size;
2714   }
2715 
2716   bool expanded = expand_by(min_word_size, preferred_word_size);
2717   if (expanded) {
2718     next = current_virtual_space()->get_chunk_vs(chunk_word_size);
2719     assert(next != NULL, "The allocation was expected to succeed after the expansion");
2720   }
2721 
2722    return next;
2723 }
2724 
2725 void VirtualSpaceList::print_on(outputStream* st) const {
2726   print_on(st, K);
2727 }
2728 
2729 void VirtualSpaceList::print_on(outputStream* st, size_t scale) const {
2730   st->print_cr(SIZE_FORMAT " nodes, current node: " PTR_FORMAT,
2731       _virtual_space_count, p2i(_current_virtual_space));
2732   VirtualSpaceListIterator iter(virtual_space_list());
2733   while (iter.repeat()) {
2734     st->cr();
2735     VirtualSpaceNode* node = iter.get_next();
2736     node->print_on(st, scale);
2737   }
2738 }
2739 
2740 void VirtualSpaceList::print_map(outputStream* st) const {
2741   VirtualSpaceNode* list = virtual_space_list();
2742   VirtualSpaceListIterator iter(list);
2743   unsigned i = 0;
2744   while (iter.repeat()) {
2745     st->print_cr("Node %u:", i);
2746     VirtualSpaceNode* node = iter.get_next();
2747     node->print_map(st, this->is_class());
2748     i ++;
2749   }
2750 }
2751 
2752 // MetaspaceGC methods
2753 
2754 // VM_CollectForMetadataAllocation is the vm operation used to GC.
2755 // Within the VM operation after the GC the attempt to allocate the metadata
2756 // should succeed.  If the GC did not free enough space for the metaspace


3497       size_chunks_returned += cur->word_size();
3498     }
3499     return_single_chunk(index, cur);
3500     cur = next;
3501   }
3502   if (log.is_enabled()) { // tracing
3503     log.print("returned %u %s chunks to freelist, total word size " SIZE_FORMAT ".",
3504         num_chunks_returned, chunk_size_name(index), size_chunks_returned);
3505     if (index != HumongousIndex) {
3506       log.print("updated freelist count: " SIZE_FORMAT ".", free_chunks(index)->size());
3507     } else {
3508       log.print("updated dictionary count " SIZE_FORMAT ".", _humongous_dictionary.total_count());
3509     }
3510   }
3511 }
3512 
3513 void ChunkManager::print_on(outputStream* out) const {
3514   _humongous_dictionary.report_statistics(out);
3515 }
3516 
3517 void ChunkManager::get_statistics(ChunkManagerStatistics* out) const {
3518   MutexLockerEx cl(MetaspaceExpand_lock, Mutex::_no_safepoint_check_flag);
3519   for (ChunkIndex i = ZeroIndex; i < NumberOfInUseLists; i = next_chunk_index(i)) {
3520     out->chunk_stats(i).add(num_free_chunks(i), size_free_chunks_in_bytes(i) / sizeof(MetaWord));






































































3521   }
3522 }
3523 
3524 // SpaceManager methods
3525 
3526 size_t SpaceManager::adjust_initial_chunk_size(size_t requested, bool is_class_space) {
3527   size_t chunk_sizes[] = {
3528       specialized_chunk_size(is_class_space),
3529       small_chunk_size(is_class_space),
3530       medium_chunk_size(is_class_space)
3531   };
3532 
3533   // Adjust up to one of the fixed chunk sizes ...
3534   for (size_t i = 0; i < ARRAY_SIZE(chunk_sizes); i++) {
3535     if (requested <= chunk_sizes[i]) {
3536       return chunk_sizes[i];
3537     }
3538   }
3539 
3540   // ... or return the size as a humongous chunk.


3556     default:                                 requested = ClassSmallChunk; break;
3557     }
3558   } else {
3559     switch (type) {
3560     case Metaspace::BootMetaspaceType:       requested = Metaspace::first_chunk_word_size(); break;
3561     case Metaspace::AnonymousMetaspaceType:  requested = SpecializedChunk; break;
3562     case Metaspace::ReflectionMetaspaceType: requested = SpecializedChunk; break;
3563     default:                                 requested = SmallChunk; break;
3564     }
3565   }
3566 
3567   // Adjust to one of the fixed chunk sizes (unless humongous)
3568   const size_t adjusted = adjust_initial_chunk_size(requested);
3569 
3570   assert(adjusted != 0, "Incorrect initial chunk size. Requested: "
3571          SIZE_FORMAT " adjusted: " SIZE_FORMAT, requested, adjusted);
3572 
3573   return adjusted;
3574 }
3575 







































































3576 size_t SpaceManager::sum_count_in_chunks_in_use(ChunkIndex i) {
3577   size_t count = 0;
3578   Metachunk* chunk = chunks_in_use(i);
3579   while (chunk != NULL) {
3580     count++;
3581     chunk = chunk->next();
3582   }
3583   return count;
3584 }
3585 














3586 void SpaceManager::locked_print_chunks_in_use_on(outputStream* st) const {
3587 
3588   for (ChunkIndex i = ZeroIndex; i < NumberOfInUseLists; i = next_chunk_index(i)) {
3589     Metachunk* chunk = chunks_in_use(i);
3590     st->print("SpaceManager: %s " PTR_FORMAT,
3591                  chunk_size_name(i), p2i(chunk));
3592     if (chunk != NULL) {
3593       st->print_cr(" free " SIZE_FORMAT,
3594                    chunk->free_word_size());
3595     } else {
3596       st->cr();
3597     }
3598   }
3599 
3600   chunk_manager()->locked_print_free_chunks(st);
3601   chunk_manager()->locked_print_sum_free_chunks(st);
3602 }
3603 
3604 size_t SpaceManager::calc_chunk_size(size_t word_size) {
3605 


3687   size_t chunk_word_size = calc_chunk_size(word_size);
3688   Metachunk* next = get_new_chunk(chunk_word_size);
3689 
3690   MetaWord* mem = NULL;
3691 
3692   // If a chunk was available, add it to the in-use chunk list
3693   // and do an allocation from it.
3694   if (next != NULL) {
3695     // Add to this manager's list of chunks in use.
3696     add_chunk(next, false);
3697     mem = next->allocate(word_size);
3698   }
3699 
3700   // Track metaspace memory usage statistic.
3701   track_metaspace_memory_usage();
3702 
3703   return mem;
3704 }
3705 
3706 void SpaceManager::print_on(outputStream* st) const {
3707   SpaceManagerStatistics stat;
3708   add_to_statistics(&stat); // will lock _lock.
3709   stat.print_on(st, 1*K, false);















3710 }
3711 
3712 SpaceManager::SpaceManager(Metaspace::MetadataType mdtype,
3713                            Metaspace::MetaspaceType space_type,
3714                            Mutex* lock) :
3715   _mdtype(mdtype),
3716   _space_type(space_type),
3717   _allocated_block_words(0),
3718   _allocated_chunks_words(0),
3719   _allocated_chunks_count(0),
3720   _block_freelists(NULL),
3721   _lock(lock)
3722 {
3723   initialize();
3724 }
3725 
3726 void SpaceManager::inc_size_metrics(size_t words) {
3727   assert_lock_strong(MetaspaceExpand_lock);
3728   // Total of allocated Metachunks and allocated Metachunks count
3729   // for each SpaceManager
3730   _allocated_chunks_words = _allocated_chunks_words + words;
3731   _allocated_chunks_count++;
3732 
3733   // Global total of capacity in allocated Metachunks
3734   MetaspaceUtils::inc_capacity(mdtype(), words);
3735   // Global total of allocated Metablocks.
3736   // used_words_slow() includes the overhead in each
3737   // Metachunk so include it in the used when the
3738   // Metachunk is first added (so only added once per
3739   // Metachunk).
3740   MetaspaceUtils::inc_used(mdtype(), Metachunk::overhead());
3741 }
3742 
3743 void SpaceManager::inc_used_metrics(size_t words) {
3744   // Add to the per SpaceManager total
3745   Atomic::add(words, &_allocated_block_words);
3746   // Add to the global total
3747   MetaspaceUtils::inc_used(mdtype(), words);
3748 }
3749 
3750 void SpaceManager::dec_total_from_size_metrics() {
3751   MetaspaceUtils::dec_capacity(mdtype(), allocated_chunks_words());
3752   MetaspaceUtils::dec_used(mdtype(), allocated_blocks_words());


3753 }
3754 
3755 void SpaceManager::initialize() {
3756   Metadebug::init_allocation_fail_alot_count();
3757   for (ChunkIndex i = ZeroIndex; i < NumberOfInUseLists; i = next_chunk_index(i)) {
3758     _chunks_in_use[i] = NULL;
3759   }
3760   _current_chunk = NULL;
3761   log_trace(gc, metaspace, freelist)("SpaceManager(): " PTR_FORMAT, p2i(this));
3762 }
3763 
3764 SpaceManager::~SpaceManager() {
3765 
3766   // This call this->_lock which can't be done while holding MetaspaceExpand_lock
3767   DEBUG_ONLY(verify_metrics());



3768 
3769   MutexLockerEx fcl(MetaspaceExpand_lock,
3770                     Mutex::_no_safepoint_check_flag);
3771 





3772   chunk_manager()->slow_locked_verify();
3773 
3774   dec_total_from_size_metrics();
3775 
3776   Log(gc, metaspace, freelist) log;
3777   if (log.is_trace()) {
3778     log.trace("~SpaceManager(): " PTR_FORMAT, p2i(this));
3779     ResourceMark rm;
3780     LogStream ls(log.trace());
3781     locked_print_chunks_in_use_on(&ls);
3782     if (block_freelists() != NULL) {
3783       block_freelists()->print_on(&ls);
3784     }
3785   }
3786 
3787   // Add all the chunks in use by this space manager
3788   // to the global list of free chunks.
3789 
3790   // Follow each list of chunks-in-use and add them to the
3791   // free lists.  Each list is NULL terminated.
3792 
3793   for (ChunkIndex i = ZeroIndex; i <= HumongousIndex; i = next_chunk_index(i)) {
3794     Metachunk* chunks = chunks_in_use(i);
3795     chunk_manager()->return_chunk_list(i, chunks);
3796     set_chunks_in_use(i, NULL);
3797   }
3798 
3799   chunk_manager()->slow_locked_verify();
3800 
3801   if (_block_freelists != NULL) {
3802     delete _block_freelists;
3803   }
3804 }
3805 
3806 void SpaceManager::deallocate(MetaWord* p, size_t word_size) {
3807   assert_lock_strong(lock());
3808   // Allocations and deallocations are in raw_word_size
3809   size_t raw_word_size = get_allocation_word_size(word_size);
3810   // Lazily create a block_freelist
3811   if (block_freelists() == NULL) {
3812     _block_freelists = new BlockFreelist();
3813   }
3814   block_freelists()->return_block(p, raw_word_size);
3815 }
3816 
3817 // Adds a chunk to the list of chunks in use.
3818 void SpaceManager::add_chunk(Metachunk* new_chunk, bool make_current) {
3819 
3820   assert(new_chunk != NULL, "Should not be NULL");
3821   assert(new_chunk->next() == NULL, "Should not be on a list");
3822 
3823   new_chunk->reset_empty();
3824 
3825   // Find the correct list and and set the current
3826   // chunk for that list.
3827   ChunkIndex index = chunk_manager()->list_index(new_chunk->word_size());


3838     if (make_current) {
3839       // Set as the current chunk but otherwise treat as a humongous chunk.
3840       set_current_chunk(new_chunk);
3841     }
3842     // Link at head.  The _current_chunk only points to a humongous chunk for
3843     // the null class loader metaspace (class and data virtual space managers)
3844     // any humongous chunks so will not point to the tail
3845     // of the humongous chunks list.
3846     new_chunk->set_next(chunks_in_use(HumongousIndex));
3847     set_chunks_in_use(HumongousIndex, new_chunk);
3848 
3849     assert(new_chunk->word_size() > medium_chunk_size(), "List inconsistency");
3850   }
3851 
3852   // Add to the running sum of capacity
3853   inc_size_metrics(new_chunk->word_size());
3854 
3855   assert(new_chunk->is_empty(), "Not ready for reuse");
3856   Log(gc, metaspace, freelist) log;
3857   if (log.is_trace()) {
3858     log.trace("SpaceManager::add_chunk: " SIZE_FORMAT ") ", _allocated_chunks_count);
3859     ResourceMark rm;
3860     LogStream ls(log.trace());
3861     new_chunk->print_on(&ls);
3862     chunk_manager()->locked_print_free_chunks(&ls);
3863   }
3864 }
3865 
3866 void SpaceManager::retire_current_chunk() {
3867   if (current_chunk() != NULL) {
3868     size_t remaining_words = current_chunk()->free_word_size();
3869     if (remaining_words >= BlockFreelist::min_dictionary_size()) {
3870       MetaWord* ptr = current_chunk()->allocate(remaining_words);
3871       deallocate(ptr, remaining_words);
3872       inc_used_metrics(remaining_words);
3873     }
3874   }
3875 }
3876 
3877 Metachunk* SpaceManager::get_new_chunk(size_t chunk_word_size) {
3878   // Get a chunk from the chunk freelist


3887   if (log.is_debug() && next != NULL &&
3888       SpaceManager::is_humongous(next->word_size())) {
3889     log.debug("  new humongous chunk word size " PTR_FORMAT, next->word_size());
3890   }
3891 
3892   return next;
3893 }
3894 
3895 MetaWord* SpaceManager::allocate(size_t word_size) {
3896   MutexLockerEx cl(lock(), Mutex::_no_safepoint_check_flag);
3897   size_t raw_word_size = get_allocation_word_size(word_size);
3898   BlockFreelist* fl =  block_freelists();
3899   MetaWord* p = NULL;
3900   // Allocation from the dictionary is expensive in the sense that
3901   // the dictionary has to be searched for a size.  Don't allocate
3902   // from the dictionary until it starts to get fat.  Is this
3903   // a reasonable policy?  Maybe an skinny dictionary is fast enough
3904   // for allocations.  Do some profiling.  JJJ
3905   if (fl != NULL && fl->total_size() > allocation_from_dictionary_limit) {
3906     p = fl->get_block(raw_word_size);
3907     if (p != NULL) {
3908       DEBUG_ONLY(Atomic::inc(&g_internal_statistics.num_allocs_from_deallocated_blocks));
3909     }
3910   }
3911   if (p == NULL) {
3912     p = allocate_work(raw_word_size);
3913   }
3914 
3915   return p;
3916 }
3917 
3918 // Returns the address of spaced allocated for "word_size".
3919 // This methods does not know about blocks (Metablocks)
3920 MetaWord* SpaceManager::allocate_work(size_t word_size) {
3921   assert_lock_strong(lock());
3922 #ifdef ASSERT
3923   if (Metadebug::test_metadata_failure()) {
3924     return NULL;
3925   }
3926 #endif
3927   // Is there space in the current chunk?
3928   MetaWord* result = NULL;
3929 
3930   if (current_chunk() != NULL) {
3931     result = current_chunk()->allocate(word_size);
3932   }
3933 
3934   if (result == NULL) {
3935     result = grow_and_allocate(word_size);
3936   }
3937 
3938   if (result != NULL) {
3939     inc_used_metrics(word_size);
3940     assert(result != (MetaWord*) chunks_in_use(MediumIndex),
3941            "Head of the list is being allocated");


3947 void SpaceManager::verify() {
3948   for (ChunkIndex i = ZeroIndex; i < NumberOfInUseLists; i = next_chunk_index(i)) {
3949     Metachunk* curr = chunks_in_use(i);
3950     while (curr != NULL) {
3951       DEBUG_ONLY(do_verify_chunk(curr);)
3952       assert(curr->is_tagged_free() == false, "Chunk should be tagged as in use.");
3953       curr = curr->next();
3954     }
3955   }
3956 }
3957 
3958 void SpaceManager::verify_chunk_size(Metachunk* chunk) {
3959   assert(is_humongous(chunk->word_size()) ||
3960          chunk->word_size() == medium_chunk_size() ||
3961          chunk->word_size() == small_chunk_size() ||
3962          chunk->word_size() == specialized_chunk_size(),
3963          "Chunk size is wrong");
3964   return;
3965 }
3966 
3967 void SpaceManager::add_to_statistics_locked(SpaceManagerStatistics* out) const {
3968   assert_lock_strong(lock());
3969   for (ChunkIndex i = ZeroIndex; i < NumberOfInUseLists; i = next_chunk_index(i)) {
3970     UsedChunksStatistics& chunk_stat = out->chunk_stats(i);
3971     Metachunk* chunk = chunks_in_use(i);
3972     while (chunk != NULL) {
3973       chunk_stat.add_num(1);
3974       chunk_stat.add_cap(chunk->word_size());
3975       chunk_stat.add_used(chunk->used_word_size());
3976       if (chunk != current_chunk()) {
3977         chunk_stat.add_waste(chunk->free_word_size());
3978       } else {
3979         chunk_stat.add_free(chunk->free_word_size());
3980       }
3981       chunk = chunk->next();
3982     }
3983   }
3984   if (block_freelists() != NULL) {
3985     out->add_free_blocks(block_freelists()->num_blocks(), block_freelists()->total_size());
3986   }
3987 }
3988 
3989 void SpaceManager::add_to_statistics(SpaceManagerStatistics* out) const {
3990   MutexLockerEx cl(lock(), Mutex::_no_safepoint_check_flag);
3991   add_to_statistics_locked(out);
3992 }
3993 
3994 #ifdef ASSERT
3995 void SpaceManager::verify_metrics_locked() const {
3996   assert_lock_strong(lock());



















3997 
3998   SpaceManagerStatistics stat;
3999   add_to_statistics_locked(&stat);
4000 
4001   UsedChunksStatistics chunk_stats = stat.totals();
4002 
4003   assert_counter(_allocated_block_words, chunk_stats.used(), "SpaceManager::_allocated_blocks_words");
4004   assert_counter(_allocated_chunks_words, chunk_stats.cap(), "SpaceManager::_allocated_chunks_words");
4005   assert_counter(_allocated_chunks_count, chunk_stats.num(), "SpaceManager::_allocated_chunks_count");
4006 }
4007 
4008 void SpaceManager::verify_metrics() const {
4009   MutexLockerEx cl(lock(), Mutex::_no_safepoint_check_flag);
4010   verify_metrics_locked();
4011 }
4012 #endif // ASSERT
4013 
4014 
4015 // MetaspaceUtils
4016 
4017 
4018 size_t MetaspaceUtils::_capacity_words[] = {0, 0};
4019 volatile size_t MetaspaceUtils::_used_words[] = {0, 0};
4020 
4021 
4022 // Collect used metaspace statistics. This involves walking the CLDG. The resulting
4023 // output will be the accumulated values for all live metaspaces.
4024 // Note: method does not do any locking.
4025 void MetaspaceUtils::collect_statistics(ClassLoaderMetaspaceStatistics* out) {
4026   out->reset();
4027   ClassLoaderDataGraphMetaspaceIterator iter;
4028    while (iter.repeat()) {
4029      ClassLoaderMetaspace* msp = iter.get_next();
4030      if (msp != NULL) {
4031        msp->add_to_statistics(out);
4032      }
4033    }
4034 }
4035 
4036 size_t MetaspaceUtils::free_bytes(Metaspace::MetadataType mdtype) {
4037   VirtualSpaceList* list = Metaspace::get_space_list(mdtype);
4038   return list == NULL ? 0 : list->free_bytes();
4039 }
4040 
4041 size_t MetaspaceUtils::free_bytes() {
4042   return free_bytes(Metaspace::ClassType) + free_bytes(Metaspace::NonClassType);
4043 }
4044 
4045 void MetaspaceUtils::dec_capacity(Metaspace::MetadataType mdtype, size_t words) {
4046   assert_lock_strong(MetaspaceExpand_lock);
4047   assert(words <= capacity_words(mdtype),
4048          "About to decrement below 0: words " SIZE_FORMAT
4049          " is greater than _capacity_words[%u] " SIZE_FORMAT,
4050          words, mdtype, capacity_words(mdtype));
4051 
4052   _capacity_words[mdtype] -= words;
4053 }
4054 
4055 void MetaspaceUtils::inc_capacity(Metaspace::MetadataType mdtype, size_t words) {
4056   assert_lock_strong(MetaspaceExpand_lock);
4057   // Needs to be atomic
4058   _capacity_words[mdtype] += words;
4059 }
4060 
4061 void MetaspaceUtils::dec_used(Metaspace::MetadataType mdtype, size_t words) {
4062   assert(words <= used_words(mdtype),
4063          "About to decrement below 0: words " SIZE_FORMAT
4064          " is greater than _used_words[%u] " SIZE_FORMAT,
4065          words, mdtype, used_words(mdtype));
4066   // For CMS deallocation of the Metaspaces occurs during the
4067   // sweep which is a concurrent phase.  Protection by the MetaspaceExpand_lock
4068   // is not enough since allocation is on a per Metaspace basis
4069   // and protected by the Metaspace lock.
4070   Atomic::sub(words, &_used_words[mdtype]);
4071 }
4072 
4073 void MetaspaceUtils::inc_used(Metaspace::MetadataType mdtype, size_t words) {
4074   // _used_words tracks allocations for
4075   // each piece of metadata.  Those allocations are
4076   // generally done concurrently by different application
4077   // threads so must be done atomically.
4078   Atomic::add(words, &_used_words[mdtype]);
4079 }
4080 



























































4081 size_t MetaspaceUtils::reserved_bytes(Metaspace::MetadataType mdtype) {
4082   VirtualSpaceList* list = Metaspace::get_space_list(mdtype);
4083   return list == NULL ? 0 : list->reserved_bytes();
4084 }
4085 
4086 size_t MetaspaceUtils::committed_bytes(Metaspace::MetadataType mdtype) {
4087   VirtualSpaceList* list = Metaspace::get_space_list(mdtype);
4088   return list == NULL ? 0 : list->committed_bytes();
4089 }
4090 
4091 size_t MetaspaceUtils::min_chunk_size_words() { return Metaspace::first_chunk_word_size(); }
4092 
4093 size_t MetaspaceUtils::free_chunks_total_words(Metaspace::MetadataType mdtype) {
4094   ChunkManager* chunk_manager = Metaspace::get_chunk_manager(mdtype);
4095   if (chunk_manager == NULL) {
4096     return 0;
4097   }
4098   chunk_manager->slow_verify();
4099   return chunk_manager->free_chunks_total_words();
4100 }


4140                 "reserved "  SIZE_FORMAT "K",
4141                 used_bytes()/K,
4142                 capacity_bytes()/K,
4143                 committed_bytes()/K,
4144                 reserved_bytes()/K);
4145 
4146   if (Metaspace::using_class_space()) {
4147     Metaspace::MetadataType ct = Metaspace::ClassType;
4148     out->print_cr("  class space    "
4149                   "used "      SIZE_FORMAT "K, "
4150                   "capacity "  SIZE_FORMAT "K, "
4151                   "committed " SIZE_FORMAT "K, "
4152                   "reserved "  SIZE_FORMAT "K",
4153                   used_bytes(ct)/K,
4154                   capacity_bytes(ct)/K,
4155                   committed_bytes(ct)/K,
4156                   reserved_bytes(ct)/K);
4157   }
4158 }
4159 
4160 class PrintCLDMetaspaceInfoClosure : public CLDClosure {















































































4161 private:
4162   outputStream* const _out;
4163   const size_t        _scale;
4164   const bool          _do_print;
4165   const bool          _break_down_by_chunktype;
4166 
4167 public:















4168 
4169   uintx                           _num_loaders_with_metaspace;
4170   uintx                           _num_loaders_without_metaspace;
4171   ClassLoaderMetaspaceStatistics  _stats_total;
4172 
4173   uintx                           _num_loaders_by_spacetype [Metaspace::MetaspaceTypeCount];
4174   ClassLoaderMetaspaceStatistics  _stats_by_spacetype [Metaspace::MetaspaceTypeCount];




















4175 
4176 public:
4177   PrintCLDMetaspaceInfoClosure(outputStream* out, size_t scale, bool do_print, bool break_down_by_chunktype)
4178     : _out(out), _scale(scale), _do_print(do_print), _break_down_by_chunktype(break_down_by_chunktype)
4179     , _num_loaders_with_metaspace(0)
4180     , _num_loaders_without_metaspace(0)
4181   {
4182     memset(_num_loaders_by_spacetype, 0, sizeof(_num_loaders_by_spacetype));
4183   }
4184 
4185   void do_cld(ClassLoaderData* cld) {
4186 
4187     assert(SafepointSynchronize::is_at_safepoint(), "Must be at a safepoint");
4188 

4189     ClassLoaderMetaspace* msp = cld->metaspace_or_null();
4190     if (msp == NULL) {
4191       _num_loaders_without_metaspace ++;
4192       return;
4193     }
4194 
4195     // Collect statistics for this class loader metaspace
4196     ClassLoaderMetaspaceStatistics this_cld_stat;
4197     msp->add_to_statistics(&this_cld_stat);
4198 
4199     // And add it to the running totals
4200     _stats_total.add(this_cld_stat);
4201     _num_loaders_with_metaspace ++;
4202     _stats_by_spacetype[msp->space_type()].add(this_cld_stat);
4203     _num_loaders_by_spacetype[msp->space_type()] ++;
4204 
4205     // Optionally, print.
4206     if (_do_print) {
4207 
4208       _out->print(UINTX_FORMAT_W(4) ": ", _num_loaders_with_metaspace);
4209 
4210       if (cld->is_anonymous()) {
4211         _out->print("ClassLoaderData " PTR_FORMAT " for anonymous class", p2i(cld));

4212       } else {
4213         ResourceMark rm;
4214         _out->print("ClassLoaderData " PTR_FORMAT " for %s", p2i(cld), cld->loader_name());
4215       }
4216 
4217       if (msp->space_type() != Metaspace::StandardMetaspaceType) {
4218         _out->print(", %s loader", space_type_name(msp->space_type()));
4219       }
4220 
4221       if (cld->is_unloading()) {
4222         _out->print(", unloading");




















4223       }
4224 
4225       _out->cr();
4226       this_cld_stat.print_on(_out, _scale, _break_down_by_chunktype);
4227       _out->cr();
4228 
4229     }
4230 
4231   } // do_cld
4232 
4233 };



4234 
4235 void MetaspaceUtils::print_report(outputStream* out, size_t scale, int flags) {

4236 
4237   const bool print_loaders = (flags & rf_show_loaders) > 0;
4238   const bool print_by_chunktype = (flags & rf_break_down_by_chunktype) > 0;
4239   const bool print_by_spacetype = (flags & rf_break_down_by_spacetype) > 0;
4240   bool have_detailed_cl_data = false;
4241 
4242   // Some report options require walking the class loader data graph.
4243   PrintCLDMetaspaceInfoClosure cl(out, scale, print_loaders, print_by_chunktype);
4244   if (print_loaders) {
4245     out->cr();
4246     out->print_cr("Usage per loader:");
4247     out->cr();
4248   }
4249   if (print_loaders || print_by_chunktype || print_by_spacetype) {
4250     ClassLoaderDataGraph::cld_do(&cl); // collect data and optionally print
4251     have_detailed_cl_data = true;
4252   }
4253 
4254   // Print totals, broken up by space type.
4255   if (print_by_spacetype) {
4256     out->cr();
4257     out->print_cr("Usage per space type:");
4258     out->cr();
4259     for (int space_type = (int)Metaspace::ZeroMetaspaceType;
4260          space_type < (int)Metaspace::MetaspaceTypeCount; space_type ++)
4261     {
4262       uintx num = cl._num_loaders_by_spacetype[space_type];
4263       out->print("%s: " UINTX_FORMAT " spaces%c",
4264         space_type_name((Metaspace::MetaspaceType)space_type),
4265         num, num > 0 ? ':' : '.');
4266       if (num > 0) {
4267         out->cr();
4268         cl._stats_by_spacetype[space_type].print_on(out, scale, print_by_chunktype);
4269       }
4270       out->cr();
4271     }
4272   }

4273 
4274   // Print totals for in-use data:
4275   out->cr();
4276   out->print_cr("Total Usage:");
4277   out->cr();
4278 
4279   if (have_detailed_cl_data) {
4280     out->print_cr(UINTX_FORMAT " loaders (" UINTX_FORMAT " without metaspace)",
4281         cl._num_loaders_with_metaspace + cl._num_loaders_without_metaspace, cl._num_loaders_without_metaspace);
4282     out->cr();
4283     cl._stats_total.print_on(out, scale, print_by_chunktype);
4284   } else {
4285     // In its most basic form, we do not require walking the CLDG. Instead, just print the running totals from
4286     // MetaspaceUtils.
4287     const size_t cap_nonclass = MetaspaceUtils::capacity_words(Metaspace::NonClassType);
4288     const size_t used_nonclass = MetaspaceUtils::used_words(Metaspace::NonClassType);
4289     const size_t free_and_waste_nonclass = cap_nonclass - used_nonclass;
4290     if (Metaspace::using_class_space()) {
4291       out->print_cr("  Non-class space:");
4292     }
4293     print_scaled_words(out, cap_nonclass, scale, 6);
4294     out->print(" capacity, ");
4295     print_scaled_words_and_percentage(out, used_nonclass, cap_nonclass, scale, 6);
4296     out->print(" used, ");
4297     print_scaled_words_and_percentage(out, free_and_waste_nonclass, cap_nonclass, scale, 6);
4298     out->print(" free+waste. ");
4299 
4300     if (Metaspace::using_class_space()) {
4301       out->print_cr("      Class space:");
4302       const size_t cap_class = MetaspaceUtils::capacity_words(Metaspace::ClassType);
4303       const size_t used_class = MetaspaceUtils::used_words(Metaspace::ClassType);
4304       const size_t free_and_waste_class = cap_class - used_class;
4305       print_scaled_words(out, cap_class, scale, 6);
4306       out->print(" capacity, ");
4307       print_scaled_words_and_percentage(out, used_class, cap_class, scale, 6);
4308       out->print(" used, ");
4309       print_scaled_words_and_percentage(out, free_and_waste_class, cap_class, scale, 6);
4310       out->print(" free+waste. ");
4311 
4312       out->print_cr("            Total:");
4313       const size_t cap = cap_nonclass + cap_class;
4314       const size_t used = used_nonclass + used_class;
4315       const size_t free_and_waste = free_and_waste_nonclass + free_and_waste_class;
4316       print_scaled_words(out, cap, scale, 6);
4317       out->print(" capacity, ");
4318       print_scaled_words_and_percentage(out, used, cap, scale, 6);
4319       out->print(" used, ");
4320       print_scaled_words_and_percentage(out, free_and_waste, cap, scale, 6);
4321       out->print(" free+waste. ");
4322     }
4323     out->cr();
4324   }
4325 
4326   // -- Print Virtual space.
4327   out->cr();
4328   out->print_cr("Virtual Space:");
4329   out->cr();
4330   const size_t reserved_nonclass_words = reserved_bytes(Metaspace::NonClassType) / sizeof(MetaWord);
4331   const size_t committed_nonclass_words = committed_bytes(Metaspace::NonClassType) / sizeof(MetaWord);
4332   const size_t reserved_class_words = reserved_bytes(Metaspace::ClassType) / sizeof(MetaWord);
4333   const size_t committed_class_words = committed_bytes(Metaspace::ClassType) / sizeof(MetaWord);
4334   const size_t reserved_words = reserved_nonclass_words + reserved_class_words;
4335   const size_t committed_words = committed_nonclass_words + committed_class_words;
4336   {
4337     if (Metaspace::using_class_space()) {
4338       out->print_cr("  Non-class space:");
4339     }
4340     print_scaled_words(out, reserved_nonclass_words, scale, 7);
4341     out->print(" reserved, ");
4342     print_scaled_words_and_percentage(out, committed_nonclass_words, reserved_nonclass_words, scale, 7);
4343     out->print(" committed ");
4344 
4345     if (Metaspace::using_class_space()) {
4346       out->print_cr("      Class space:");
4347       print_scaled_words(out, reserved_nonclass_words, scale, 7);
4348       out->print(" reserved, ");
4349       print_scaled_words_and_percentage(out, committed_class_words, reserved_class_words, scale, 7);
4350       out->print(" committed ");
4351 
4352       out->print_cr("            Total:");
4353       print_scaled_words(out, reserved_words, scale, 7);
4354       out->print(" reserved, ");
4355       print_scaled_words_and_percentage(out, committed_words, reserved_words, scale, 7);
4356       out->print(" committed ");
4357     }
4358   }
4359   out->cr();
4360 
4361   // -- Print VirtualSpaceList details.
4362   if ((flags & rf_show_vslist) > 0) {
4363     out->cr();
4364     out->print_cr("Virtual Space List%s:", Metaspace::using_class_space() ? "s" : "");
4365     out->cr();
4366     if (Metaspace::using_class_space()) {
4367       out->print_cr("   Non-Class:");
4368     }
4369     Metaspace::space_list()->print_on(out, scale);
4370     if (Metaspace::using_class_space()) {
4371       out->print_cr("       Class:");
4372       Metaspace::class_space_list()->print_on(out, scale);
4373     }
4374   }
4375   out->cr();
4376 
4377   // -- Print VirtualSpaceList map.
4378   if ((flags & rf_show_vsmap) > 0) {
4379     out->cr();
4380     out->print_cr("Virtual Space Map:");
4381     out->cr();
4382     if (Metaspace::using_class_space()) {
4383       out->print_cr("   Non-Class:");
4384     }
4385     Metaspace::space_list()->print_map(out);
4386     if (Metaspace::using_class_space()) {
4387       out->print_cr("       Class:");
4388       Metaspace::class_space_list()->print_map(out);
4389     }
4390   }
4391   out->cr();
4392 
4393   // -- Print Freelists (ChunkManager) details
4394   out->cr();
4395   out->print("Free Chunk List%s:", Metaspace::using_class_space() ? "s" : "");
4396   out->cr();
4397 
4398   if ((flags & rf_show_chunk_freelist) > 0) {
4399     ChunkManagerStatistics non_class_cm_stat;
4400     Metaspace::chunk_manager_metadata()->get_statistics(&non_class_cm_stat);
4401     ChunkManagerStatistics class_cm_stat;
4402     Metaspace::chunk_manager_class()->get_statistics(&class_cm_stat);
4403 
4404     if (Metaspace::using_class_space()) {
4405       out->print_cr("   Non-Class:");

4406     }
4407     non_class_cm_stat.print_on(out, scale);
4408 






4409     if (Metaspace::using_class_space()) {
4410       out->print_cr("       Class:");
4411       class_cm_stat.print_on(out, scale);
4412     }
4413   } else {
4414     // In its basic form, report only capacity in free chunks, but take those numbers from the
4415     // running totals in the chunk managers to avoid locking.
4416     if (Metaspace::using_class_space()) {
4417       out->print_cr("   Non-Class:");
4418     }
4419     print_scaled_bytes(out, Metaspace::chunk_manager_metadata()->free_chunks_total_words(), scale);
4420     out->cr();
4421     if (Metaspace::using_class_space()) {
4422       out->print_cr("       Class:");
4423       print_scaled_bytes(out, Metaspace::chunk_manager_class()->free_chunks_total_words(), scale);
4424       out->cr();
4425     }
4426     out->cr();
4427   }
4428 
4429   // As a convenience, print a summary of common waste.
4430   out->cr();
4431   out->print_cr("Waste:");
4432   // For all wastages, print percentages from total. As total use the total size of memory committed for metaspace.
4433   out->print("  (Percentages are of total committed metaspace size (");
4434   print_scaled_words(out, committed_words, scale);
4435   out->print_cr(")");
4436 
4437   // Print waste for in-use chunks.
4438   if (have_detailed_cl_data) {
4439     UsedChunksStatistics ucs_nonclass = cl._stats_total.nonclass_sm_stats().totals();
4440     UsedChunksStatistics ucs_class = cl._stats_total.class_sm_stats().totals();
4441     UsedChunksStatistics ucs_all;
4442     ucs_all.add(ucs_nonclass);
4443     ucs_all.add(ucs_class);
4444     out->print("Waste in chunks in use:         ");
4445     print_scaled_words_and_percentage(out, ucs_all.waste(), committed_words, scale, 6);
4446     out->cr();
4447     out->print("Free in chunks in use:          ");
4448     print_scaled_words_and_percentage(out, ucs_all.free(), committed_words, scale, 6);
4449     out->cr();
4450   } else {
4451     // if we did not walk the CLDG, use the running numbers.
4452     size_t free_and_waste_words = MetaspaceUtils::capacity_words() - MetaspaceUtils::used_words();
4453     out->print("Free+Waste in chunks in use:    ");
4454     print_scaled_words_and_percentage(out, free_and_waste_words, committed_words, scale, 6);
4455     out->cr();
4456   }
4457 
4458   // Print waste in deallocated blocks.
4459   if (have_detailed_cl_data) {
4460     const uintx free_blocks_num =
4461         cl._stats_total.nonclass_sm_stats().free_blocks_num() +
4462         cl._stats_total.class_sm_stats().free_blocks_num();
4463     const size_t free_blocks_cap_words =
4464         cl._stats_total.nonclass_sm_stats().free_blocks_cap_words() +
4465         cl._stats_total.class_sm_stats().free_blocks_cap_words();
4466     out->print("Deallocated from chunks in use: " UINTX_FORMAT " blocks, total size ", free_blocks_num);
4467     print_scaled_words_and_percentage(out, free_blocks_cap_words, committed_words, scale, 6);
4468     out->cr();
4469   }
4470 
4471   // Print waste in free chunks.
4472   {
4473     const size_t total_capacity_in_free_chunks =
4474         Metaspace::chunk_manager_metadata()->free_chunks_total_words() +
4475         Metaspace::chunk_manager_class()->free_chunks_total_words();
4476     out->print("In free chunks:                 ");
4477     print_scaled_words_and_percentage(out, total_capacity_in_free_chunks, committed_words, scale, 6);
4478     out->cr();
4479   }
4480 
4481   // Print internal statistics
4482 #ifdef ASSERT
4483   out->cr();
4484   out->cr();
4485   out->print_cr("Internal statistics:");
4486   out->cr();
4487   out->print_cr("Number of allocations: " UINTX_FORMAT ".", g_internal_statistics.num_allocs);
4488   out->print_cr("Number of space births: " UINTX_FORMAT ".", g_internal_statistics.num_metaspace_births);
4489   out->print_cr("Number of space deaths: " UINTX_FORMAT ".", g_internal_statistics.num_metaspace_deaths);
4490   out->print_cr("Number of virtual space node births: " UINTX_FORMAT ".", g_internal_statistics.num_vsnodes_created);
4491   out->print_cr("Number of virtual space node deaths: " UINTX_FORMAT ".", g_internal_statistics.num_vsnodes_purged);
4492   out->print_cr("Number of times virtual space nodes were expanded: " UINTX_FORMAT ".", g_internal_statistics.num_committed_space_expanded);
4493   out->print_cr("Number of de-allocations: " UINTX_FORMAT ".", g_internal_statistics.num_deallocs);
4494   out->print_cr("Allocs statisfied from deallocated blocks: " UINTX_FORMAT ".", g_internal_statistics.num_allocs_from_deallocated_blocks);
4495   out->cr();
4496 #endif
4497 
4498   // Print some interesting settings
4499   out->cr();
4500   out->cr();
4501   out->print("MaxMetaspaceSize:           ");
4502   print_scaled_bytes(out, MaxMetaspaceSize, scale);
4503   out->cr();
4504   out->print("UseCompressedClassPointers: %s", UseCompressedClassPointers ? "true" : "false");
4505   out->cr();
4506   out->print("CompressedClassSpaceSize:   ");
4507   print_scaled_bytes(out, CompressedClassSpaceSize, scale);
4508 
4509   out->cr();
4510   out->cr();
4511 
4512 } // MetaspaceUtils::print_report()



4513 
4514 // Prints an ASCII representation of the given space.
4515 void MetaspaceUtils::print_metaspace_map(outputStream* out, Metaspace::MetadataType mdtype) {
4516   MutexLockerEx cl(MetaspaceExpand_lock, Mutex::_no_safepoint_check_flag);
4517   const bool for_class = mdtype == Metaspace::ClassType ? true : false;
4518   VirtualSpaceList* const vsl = for_class ? Metaspace::class_space_list() : Metaspace::space_list();
4519   if (vsl != NULL) {
4520     if (for_class) {
4521       if (!Metaspace::using_class_space()) {
4522         out->print_cr("No Class Space.");
4523         return;
4524       }
4525       out->print_raw("---- Metaspace Map (Class Space) ----");
4526     } else {
4527       out->print_raw("---- Metaspace Map (Non-Class Space) ----");
4528     }
4529     // Print legend:
4530     out->cr();
4531     out->print_cr("Chunk Types (uppercase chunks are in use): x-specialized, s-small, m-medium, h-humongous.");
4532     out->cr();
4533     VirtualSpaceList* const vsl = for_class ? Metaspace::class_space_list() : Metaspace::space_list();
4534     vsl->print_map(out);
4535     out->cr();
4536   }
4537 }
4538 
4539 void MetaspaceUtils::verify_free_chunks() {
4540   Metaspace::chunk_manager_metadata()->verify();
4541   if (Metaspace::using_class_space()) {
4542     Metaspace::chunk_manager_class()->verify();
4543   }
4544 }
4545 
4546 void MetaspaceUtils::verify_metrics() {





















4547 #ifdef ASSERT
4548   // Please note: there are time windows where the internal counters are out of sync with
4549   // reality. For example, when a newly created ClassLoaderMetaspace creates its first chunk -
4550   // the ClassLoaderMetaspace is not yet attached to its ClassLoaderData object and hence will
4551   // not be counted when iterating the CLDG. So be careful when you call this method.
4552   ClassLoaderMetaspaceStatistics total_stat;
4553   collect_statistics(&total_stat);
4554   UsedChunksStatistics nonclass_chunk_stat = total_stat.nonclass_sm_stats().totals();
4555   UsedChunksStatistics class_chunk_stat = total_stat.class_sm_stats().totals();
4556   bool mismatch =
4557       _capacity_words[Metaspace::NonClassType] != nonclass_chunk_stat.cap() ||
4558       _used_words[Metaspace::NonClassType] != nonclass_chunk_stat.used() ||
4559       _capacity_words[Metaspace::ClassType] != class_chunk_stat.cap() ||
4560       _used_words[Metaspace::ClassType] != class_chunk_stat.used();
4561   if (mismatch) {
4562     tty->print_cr("MetaspaceUtils::verify_metrics: counter mismatch.");
4563     tty->print_cr("Expected: non-class cap: " SIZE_FORMAT ", non-class used: " SIZE_FORMAT
4564                   ", class cap: " SIZE_FORMAT ", class used: " SIZE_FORMAT ".",
4565                   _capacity_words[Metaspace::NonClassType], _used_words[Metaspace::NonClassType],
4566                   _capacity_words[Metaspace::ClassType], _used_words[Metaspace::ClassType]);
4567     tty->print_cr("Got: non-class: ");
4568     nonclass_chunk_stat.print_on(tty, sizeof(MetaWord));
4569     tty->cr();
4570     tty->print_cr("         class: ");
4571     class_chunk_stat.print_on(tty, sizeof(MetaWord));
4572     tty->cr();
4573     tty->flush();
4574   }
4575   assert(mismatch == false, "MetaspaceUtils::verify_metrics: counter mismatch.");
4576 #endif
4577 }
4578 





4579 
4580 // Metaspace methods
4581 
4582 size_t Metaspace::_first_chunk_word_size = 0;
4583 size_t Metaspace::_first_class_chunk_word_size = 0;
4584 
4585 size_t Metaspace::_commit_alignment = 0;
4586 size_t Metaspace::_reserve_alignment = 0;
4587 
4588 VirtualSpaceList* Metaspace::_space_list = NULL;
4589 VirtualSpaceList* Metaspace::_class_space_list = NULL;
4590 
4591 ChunkManager* Metaspace::_chunk_manager_metadata = NULL;
4592 ChunkManager* Metaspace::_chunk_manager_class = NULL;
4593 
4594 #define VIRTUALSPACEMULTIPLIER 2
4595 
4596 #ifdef _LP64
4597 static const uint64_t UnscaledClassSpaceMax = (uint64_t(max_juint) + 1);
4598 


4981 
4982   return result;
4983 }
4984 
4985 void Metaspace::report_metadata_oome(ClassLoaderData* loader_data, size_t word_size, MetaspaceObj::Type type, MetadataType mdtype, TRAPS) {
4986   tracer()->report_metadata_oom(loader_data, word_size, type, mdtype);
4987 
4988   // If result is still null, we are out of memory.
4989   Log(gc, metaspace, freelist) log;
4990   if (log.is_info()) {
4991     log.info("Metaspace (%s) allocation failed for size " SIZE_FORMAT,
4992              is_class_space_allocation(mdtype) ? "class" : "data", word_size);
4993     ResourceMark rm;
4994     if (log.is_debug()) {
4995       if (loader_data->metaspace_or_null() != NULL) {
4996         LogStream ls(log.debug());
4997         loader_data->print_value_on(&ls);
4998       }
4999     }
5000     LogStream ls(log.info());
5001     // In case of an OOM, log out a short but still useful report.
5002     MetaspaceUtils::print_report(&ls);

5003   }
5004 
5005   bool out_of_compressed_class_space = false;
5006   if (is_class_space_allocation(mdtype)) {
5007     ClassLoaderMetaspace* metaspace = loader_data->metaspace_non_null();
5008     out_of_compressed_class_space =
5009       MetaspaceUtils::committed_bytes(Metaspace::ClassType) +
5010       (metaspace->class_chunk_size(word_size) * BytesPerWord) >
5011       CompressedClassSpaceSize;
5012   }
5013 
5014   // -XX:+HeapDumpOnOutOfMemoryError and -XX:OnOutOfMemoryError support
5015   const char* space_string = out_of_compressed_class_space ?
5016     "Compressed class space" : "Metaspace";
5017 
5018   report_java_out_of_memory(space_string);
5019 
5020   if (JvmtiExport::should_post_resource_exhausted()) {
5021     JvmtiExport::post_resource_exhausted(
5022         JVMTI_RESOURCE_EXHAUSTED_OOM_ERROR,


5057   }
5058 }
5059 
5060 bool Metaspace::contains(const void* ptr) {
5061   if (MetaspaceShared::is_in_shared_metaspace(ptr)) {
5062     return true;
5063   }
5064   return contains_non_shared(ptr);
5065 }
5066 
5067 bool Metaspace::contains_non_shared(const void* ptr) {
5068   if (using_class_space() && get_space_list(ClassType)->contains(ptr)) {
5069      return true;
5070   }
5071 
5072   return get_space_list(NonClassType)->contains(ptr);
5073 }
5074 
5075 // ClassLoaderMetaspace
5076 
5077 ClassLoaderMetaspace::ClassLoaderMetaspace(Mutex* lock, Metaspace::MetaspaceType type)
5078   : _lock(lock)
5079   , _space_type(type)
5080   , _vsm(NULL)
5081   , _class_vsm(NULL)
5082 {
5083   initialize(lock, type);
5084 }
5085 
5086 ClassLoaderMetaspace::~ClassLoaderMetaspace() {
5087   DEBUG_ONLY(Atomic::inc(&g_internal_statistics.num_metaspace_deaths));
5088   delete _vsm;
5089   if (Metaspace::using_class_space()) {
5090     delete _class_vsm;
5091   }
5092 }
5093 
5094 void ClassLoaderMetaspace::initialize_first_chunk(Metaspace::MetaspaceType type, Metaspace::MetadataType mdtype) {
5095   Metachunk* chunk = get_initialization_chunk(type, mdtype);
5096   if (chunk != NULL) {
5097     // Add to this manager's list of chunks in use and current_chunk().
5098     get_space_manager(mdtype)->add_chunk(chunk, true);
5099   }
5100 }
5101 
5102 Metachunk* ClassLoaderMetaspace::get_initialization_chunk(Metaspace::MetaspaceType type, Metaspace::MetadataType mdtype) {
5103   size_t chunk_word_size = get_space_manager(mdtype)->get_initial_chunk_size(type);
5104 
5105   // Get a chunk from the chunk freelist
5106   Metachunk* chunk = Metaspace::get_chunk_manager(mdtype)->chunk_freelist_allocate(chunk_word_size);
5107 
5108   if (chunk == NULL) {
5109     chunk = Metaspace::get_space_list(mdtype)->get_new_chunk(chunk_word_size,
5110                                                   get_space_manager(mdtype)->medium_chunk_bunch());
5111   }
5112 
5113   return chunk;
5114 }
5115 
5116 void ClassLoaderMetaspace::initialize(Mutex* lock, Metaspace::MetaspaceType type) {
5117   Metaspace::verify_global_initialization();
5118 
5119   DEBUG_ONLY(Atomic::inc(&g_internal_statistics.num_metaspace_births));
5120 
5121   // Allocate SpaceManager for metadata objects.
5122   _vsm = new SpaceManager(Metaspace::NonClassType, type, lock);
5123 
5124   if (Metaspace::using_class_space()) {
5125     // Allocate SpaceManager for classes.
5126     _class_vsm = new SpaceManager(Metaspace::ClassType, type, lock);
5127   }
5128 
5129   MutexLockerEx cl(MetaspaceExpand_lock, Mutex::_no_safepoint_check_flag);
5130 
5131   // Allocate chunk for metadata objects
5132   initialize_first_chunk(type, Metaspace::NonClassType);
5133 
5134   // Allocate chunk for class metadata objects
5135   if (Metaspace::using_class_space()) {
5136     initialize_first_chunk(type, Metaspace::ClassType);
5137   }
5138 }
5139 
5140 MetaWord* ClassLoaderMetaspace::allocate(size_t word_size, Metaspace::MetadataType mdtype) {
5141   Metaspace::assert_not_frozen();
5142 
5143   DEBUG_ONLY(Atomic::inc(&g_internal_statistics.num_allocs));
5144 
5145   // Don't use class_vsm() unless UseCompressedClassPointers is true.
5146   if (Metaspace::is_class_space_allocation(mdtype)) {
5147     return  class_vsm()->allocate(word_size);
5148   } else {
5149     return  vsm()->allocate(word_size);
5150   }
5151 }
5152 
5153 MetaWord* ClassLoaderMetaspace::expand_and_allocate(size_t word_size, Metaspace::MetadataType mdtype) {
5154   Metaspace::assert_not_frozen();
5155   size_t delta_bytes = MetaspaceGC::delta_capacity_until_GC(word_size * BytesPerWord);
5156   assert(delta_bytes > 0, "Must be");
5157 
5158   size_t before = 0;
5159   size_t after = 0;
5160   MetaWord* res;
5161   bool incremented;
5162 
5163   // Each thread increments the HWM at most once. Even if the thread fails to increment
5164   // the HWM, an allocation is still attempted. This is because another thread must then
5165   // have incremented the HWM and therefore the allocation might still succeed.
5166   do {
5167     incremented = MetaspaceGC::inc_capacity_until_GC(delta_bytes, &after, &before);
5168     res = allocate(word_size, mdtype);
5169   } while (!incremented && res == NULL);
5170 
5171   if (incremented) {
5172     Metaspace::tracer()->report_gc_threshold(before, after,
5173                                   MetaspaceGCThresholdUpdater::ExpandAndAllocate);
5174     log_trace(gc, metaspace)("Increase capacity to GC from " SIZE_FORMAT " to " SIZE_FORMAT, before, after);
5175   }
5176 
5177   return res;
5178 }
5179 






































5180 size_t ClassLoaderMetaspace::allocated_blocks_bytes() const {
5181   return vsm()->allocated_blocks_bytes() +
5182       (Metaspace::using_class_space() ? class_vsm()->allocated_blocks_bytes() : 0);
5183 }
5184 
5185 size_t ClassLoaderMetaspace::allocated_chunks_bytes() const {
5186   return vsm()->allocated_chunks_bytes() +
5187       (Metaspace::using_class_space() ? class_vsm()->allocated_chunks_bytes() : 0);
5188 }
5189 
5190 void ClassLoaderMetaspace::deallocate(MetaWord* ptr, size_t word_size, bool is_class) {
5191   Metaspace::assert_not_frozen();
5192   assert(!SafepointSynchronize::is_at_safepoint()
5193          || Thread::current()->is_VM_thread(), "should be the VM thread");
5194 
5195   DEBUG_ONLY(Atomic::inc(&g_internal_statistics.num_deallocs));
5196 
5197   MutexLockerEx ml(vsm()->lock(), Mutex::_no_safepoint_check_flag);
5198 
5199   if (is_class && Metaspace::using_class_space()) {
5200     class_vsm()->deallocate(ptr, word_size);
5201   } else {
5202     vsm()->deallocate(ptr, word_size);
5203   }
5204 }
5205 
5206 size_t ClassLoaderMetaspace::class_chunk_size(size_t word_size) {
5207   assert(Metaspace::using_class_space(), "Has to use class space");
5208   return class_vsm()->calc_chunk_size(word_size);
5209 }
5210 
5211 void ClassLoaderMetaspace::print_on(outputStream* out) const {
5212   // Print both class virtual space counts and metaspace.
5213   if (Verbose) {
5214     vsm()->print_on(out);
5215     if (Metaspace::using_class_space()) {
5216       class_vsm()->print_on(out);
5217     }
5218   }
5219 }
5220 
5221 void ClassLoaderMetaspace::verify() {
5222   vsm()->verify();
5223   if (Metaspace::using_class_space()) {
5224     class_vsm()->verify();
5225   }
5226 }
5227 
5228 void ClassLoaderMetaspace::add_to_statistics_locked(ClassLoaderMetaspaceStatistics* out) const {
5229   assert_lock_strong(lock());
5230   vsm()->add_to_statistics_locked(&out->nonclass_sm_stats());
5231   if (Metaspace::using_class_space()) {
5232     class_vsm()->add_to_statistics_locked(&out->class_sm_stats());

5233   }
5234 }
5235 
5236 void ClassLoaderMetaspace::add_to_statistics(ClassLoaderMetaspaceStatistics* out) const {
5237   MutexLockerEx cl(lock(), Mutex::_no_safepoint_check_flag);
5238   add_to_statistics_locked(out);
5239 }
5240 
5241 #ifdef ASSERT
5242 static void do_verify_chunk(Metachunk* chunk) {
5243   guarantee(chunk != NULL, "Sanity");
5244   // Verify chunk itself; then verify that it is consistent with the
5245   // occupany map of its containing node.
5246   chunk->verify();
5247   VirtualSpaceNode* const vsn = chunk->container();
5248   OccupancyMap* const ocmap = vsn->occupancy_map();
5249   ocmap->verify_for_chunk(chunk);
5250 }
5251 #endif
5252 
5253 static void do_update_in_use_info_for_chunk(Metachunk* chunk, bool inuse) {
5254   chunk->set_is_tagged_free(!inuse);
5255   OccupancyMap* const ocmap = chunk->container()->occupancy_map();
5256   ocmap->set_region_in_use((MetaWord*)chunk, chunk->word_size(), inuse);
5257 }
5258 
5259 /////////////// Unit tests ///////////////


5565     test_adjust_initial_chunk_size(false);
5566     test_adjust_initial_chunk_size(true);
5567   }
5568 };
5569 
5570 void SpaceManager_test_adjust_initial_chunk_size() {
5571   SpaceManagerTest::test_adjust_initial_chunk_size();
5572 }
5573 
5574 #endif // ASSERT
5575 
5576 struct chunkmanager_statistics_t {
5577   int num_specialized_chunks;
5578   int num_small_chunks;
5579   int num_medium_chunks;
5580   int num_humongous_chunks;
5581 };
5582 
5583 extern void test_metaspace_retrieve_chunkmanager_statistics(Metaspace::MetadataType mdType, chunkmanager_statistics_t* out) {
5584   ChunkManager* const chunk_manager = Metaspace::get_chunk_manager(mdType);
5585   ChunkManagerStatistics stat;
5586   chunk_manager->get_statistics(&stat);
5587   out->num_specialized_chunks = (int)stat.chunk_stats(SpecializedIndex).num();
5588   out->num_small_chunks = (int)stat.chunk_stats(SmallIndex).num();
5589   out->num_medium_chunks = (int)stat.chunk_stats(MediumIndex).num();
5590   out->num_humongous_chunks = (int)stat.chunk_stats(HumongousIndex).num();
5591 }
5592 
5593 struct chunk_geometry_t {
5594   size_t specialized_chunk_word_size;
5595   size_t small_chunk_word_size;
5596   size_t medium_chunk_word_size;
5597 };
5598 
5599 extern void test_metaspace_retrieve_chunk_geometry(Metaspace::MetadataType mdType, chunk_geometry_t* out) {
5600   if (mdType == Metaspace::NonClassType) {
5601     out->specialized_chunk_word_size = SpecializedChunk;
5602     out->small_chunk_word_size = SmallChunk;
5603     out->medium_chunk_word_size = MediumChunk;
5604   } else {
5605     out->specialized_chunk_word_size = ClassSpecializedChunk;
5606     out->small_chunk_word_size = ClassSmallChunk;
5607     out->medium_chunk_word_size = ClassMediumChunk;
5608   }
5609 }
< prev index next >