src/share/vm/memory/metaspace.cpp

Print this page
rev 5878 : 8034171: Remove use of template template parameters from binaryTreeDictionary.
Contributed-by: Matthias.Baesken@sap.com


  29 #include "memory/collectorPolicy.hpp"
  30 #include "memory/filemap.hpp"
  31 #include "memory/freeList.hpp"
  32 #include "memory/gcLocker.hpp"
  33 #include "memory/metachunk.hpp"
  34 #include "memory/metaspace.hpp"
  35 #include "memory/metaspaceShared.hpp"
  36 #include "memory/resourceArea.hpp"
  37 #include "memory/universe.hpp"
  38 #include "runtime/atomic.inline.hpp"
  39 #include "runtime/globals.hpp"
  40 #include "runtime/init.hpp"
  41 #include "runtime/java.hpp"
  42 #include "runtime/mutex.hpp"
  43 #include "runtime/orderAccess.hpp"
  44 #include "services/memTracker.hpp"
  45 #include "services/memoryService.hpp"
  46 #include "utilities/copy.hpp"
  47 #include "utilities/debug.hpp"
  48 
  49 typedef BinaryTreeDictionary<Metablock, FreeList> BlockTreeDictionary;
  50 typedef BinaryTreeDictionary<Metachunk, FreeList> ChunkTreeDictionary;
  51 
  52 // Set this constant to enable slow integrity checking of the free chunk lists
  53 const bool metaspace_slow_verify = false;
  54 
  55 size_t const allocation_from_dictionary_limit = 4 * K;
  56 
  57 MetaWord* last_allocated = 0;
  58 
  59 size_t Metaspace::_compressed_class_space_size;
  60 
  61 // Used in declarations in SpaceManager and ChunkManager
  62 enum ChunkIndex {
  63   ZeroIndex = 0,
  64   SpecializedIndex = ZeroIndex,
  65   SmallIndex = SpecializedIndex + 1,
  66   MediumIndex = SmallIndex + 1,
  67   HumongousIndex = MediumIndex + 1,
  68   NumberOfFreeLists = 3,
  69   NumberOfInUseLists = 4
  70 };


 773     if (Verbose && TraceMetadataChunkAllocation) {
 774       _dictionary->print_free_lists(gclog_or_tty);
 775     }
 776     delete _dictionary;
 777   }
 778 }
 779 
 780 void BlockFreelist::return_block(MetaWord* p, size_t word_size) {
 781   Metablock* free_chunk = ::new (p) Metablock(word_size);
 782   if (dictionary() == NULL) {
 783    _dictionary = new BlockTreeDictionary();
 784   }
 785   dictionary()->return_chunk(free_chunk);
 786 }
 787 
 788 MetaWord* BlockFreelist::get_block(size_t word_size) {
 789   if (dictionary() == NULL) {
 790     return NULL;
 791   }
 792 
 793   if (word_size < TreeChunk<Metablock, FreeList>::min_size()) {
 794     // Dark matter.  Too small for dictionary.
 795     return NULL;
 796   }
 797 
 798   Metablock* free_block =
 799     dictionary()->get_chunk(word_size, FreeBlockDictionary<Metablock>::atLeast);
 800   if (free_block == NULL) {
 801     return NULL;
 802   }
 803 
 804   const size_t block_size = free_block->size();
 805   if (block_size > WasteMultiplier * word_size) {
 806     return_block((MetaWord*)free_block, block_size);
 807     return NULL;
 808   }
 809 
 810   MetaWord* new_block = (MetaWord*)free_block;
 811   assert(block_size >= word_size, "Incorrect size of block from freelist");
 812   const size_t unused = block_size - word_size;
 813   if (unused >= TreeChunk<Metablock, FreeList>::min_size()) {
 814     return_block(new_block + word_size, unused);
 815   }
 816 
 817   return new_block;
 818 }
 819 
 820 void BlockFreelist::print_on(outputStream* st) const {
 821   if (dictionary() == NULL) {
 822     return;
 823   }
 824   dictionary()->print_free_lists(st);
 825 }
 826 
 827 // VirtualSpaceNode methods
 828 
 829 VirtualSpaceNode::~VirtualSpaceNode() {
 830   _rs.release();
 831 #ifdef ASSERT
 832   size_t word_size = sizeof(*this) / BytesPerWord;
 833   Copy::fill_to_words((HeapWord*) this, word_size, 0xf1f1f1f1);


2223     case SpecializedChunk:
2224       assert(SpecializedChunk == ClassSpecializedChunk,
2225              "Need branch for ClassSpecializedChunk");
2226       return SpecializedIndex;
2227     case SmallChunk:
2228     case ClassSmallChunk:
2229       return SmallIndex;
2230     case MediumChunk:
2231     case ClassMediumChunk:
2232       return MediumIndex;
2233     default:
2234       assert(size > MediumChunk || size > ClassMediumChunk,
2235              "Not a humongous chunk");
2236       return HumongousIndex;
2237   }
2238 }
2239 
2240 void SpaceManager::deallocate(MetaWord* p, size_t word_size) {
2241   assert_lock_strong(_lock);
2242   size_t raw_word_size = get_raw_word_size(word_size);
2243   size_t min_size = TreeChunk<Metablock, FreeList>::min_size();
2244   assert(raw_word_size >= min_size,
2245          err_msg("Should not deallocate dark matter " SIZE_FORMAT "<" SIZE_FORMAT, word_size, min_size));
2246   block_freelists()->return_block(p, raw_word_size);
2247 }
2248 
2249 // Adds a chunk to the list of chunks in use.
2250 void SpaceManager::add_chunk(Metachunk* new_chunk, bool make_current) {
2251 
2252   assert(new_chunk != NULL, "Should not be NULL");
2253   assert(new_chunk->next() == NULL, "Should not be on a list");
2254 
2255   new_chunk->reset_empty();
2256 
2257   // Find the correct list and and set the current
2258   // chunk for that list.
2259   ChunkIndex index = ChunkManager::list_index(new_chunk->word_size());
2260 
2261   if (index != HumongousIndex) {
2262     retire_current_chunk();
2263     set_current_chunk(new_chunk);


2279     set_chunks_in_use(HumongousIndex, new_chunk);
2280 
2281     assert(new_chunk->word_size() > medium_chunk_size(), "List inconsistency");
2282   }
2283 
2284   // Add to the running sum of capacity
2285   inc_size_metrics(new_chunk->word_size());
2286 
2287   assert(new_chunk->is_empty(), "Not ready for reuse");
2288   if (TraceMetadataChunkAllocation && Verbose) {
2289     gclog_or_tty->print("SpaceManager::add_chunk: %d) ",
2290                         sum_count_in_chunks_in_use());
2291     new_chunk->print_on(gclog_or_tty);
2292     chunk_manager()->locked_print_free_chunks(gclog_or_tty);
2293   }
2294 }
2295 
2296 void SpaceManager::retire_current_chunk() {
2297   if (current_chunk() != NULL) {
2298     size_t remaining_words = current_chunk()->free_word_size();
2299     if (remaining_words >= TreeChunk<Metablock, FreeList>::min_size()) {
2300       block_freelists()->return_block(current_chunk()->allocate(remaining_words), remaining_words);
2301       inc_used_metrics(remaining_words);
2302     }
2303   }
2304 }
2305 
2306 Metachunk* SpaceManager::get_new_chunk(size_t word_size,
2307                                        size_t grow_chunks_by_words) {
2308   // Get a chunk from the chunk freelist
2309   Metachunk* next = chunk_manager()->chunk_freelist_allocate(grow_chunks_by_words);
2310 
2311   if (next == NULL) {
2312     next = vs_list()->get_new_chunk(word_size,
2313                                     grow_chunks_by_words,
2314                                     medium_chunk_bunch());
2315   }
2316 
2317   if (TraceMetadataHumongousAllocation && next != NULL &&
2318       SpaceManager::is_humongous(next->word_size())) {
2319     gclog_or_tty->print_cr("  new humongous chunk word size "


3262   if (mdtype == ClassType) {
3263     return using_class_space() ? class_vsm()->sum_capacity_in_chunks_in_use() : 0;
3264   } else {
3265     return vsm()->sum_capacity_in_chunks_in_use();
3266   }
3267 }
3268 
3269 size_t Metaspace::used_bytes_slow(MetadataType mdtype) const {
3270   return used_words_slow(mdtype) * BytesPerWord;
3271 }
3272 
3273 size_t Metaspace::capacity_bytes_slow(MetadataType mdtype) const {
3274   return capacity_words_slow(mdtype) * BytesPerWord;
3275 }
3276 
3277 void Metaspace::deallocate(MetaWord* ptr, size_t word_size, bool is_class) {
3278   if (SafepointSynchronize::is_at_safepoint()) {
3279     assert(Thread::current()->is_VM_thread(), "should be the VM thread");
3280     // Don't take Heap_lock
3281     MutexLockerEx ml(vsm()->lock(), Mutex::_no_safepoint_check_flag);
3282     if (word_size < TreeChunk<Metablock, FreeList>::min_size()) {
3283       // Dark matter.  Too small for dictionary.
3284 #ifdef ASSERT
3285       Copy::fill_to_words((HeapWord*)ptr, word_size, 0xf5f5f5f5);
3286 #endif
3287       return;
3288     }
3289     if (is_class && using_class_space()) {
3290       class_vsm()->deallocate(ptr, word_size);
3291     } else {
3292       vsm()->deallocate(ptr, word_size);
3293     }
3294   } else {
3295     MutexLockerEx ml(vsm()->lock(), Mutex::_no_safepoint_check_flag);
3296 
3297     if (word_size < TreeChunk<Metablock, FreeList>::min_size()) {
3298       // Dark matter.  Too small for dictionary.
3299 #ifdef ASSERT
3300       Copy::fill_to_words((HeapWord*)ptr, word_size, 0xf5f5f5f5);
3301 #endif
3302       return;
3303     }
3304     if (is_class && using_class_space()) {
3305       class_vsm()->deallocate(ptr, word_size);
3306     } else {
3307       vsm()->deallocate(ptr, word_size);
3308     }
3309   }
3310 }
3311 
3312 
3313 MetaWord* Metaspace::allocate(ClassLoaderData* loader_data, size_t word_size,
3314                               bool read_only, MetaspaceObj::Type type, TRAPS) {
3315   if (HAS_PENDING_EXCEPTION) {
3316     assert(false, "Should not allocate with exception pending");
3317     return NULL;  // caller does a CHECK_NULL too




  29 #include "memory/collectorPolicy.hpp"
  30 #include "memory/filemap.hpp"
  31 #include "memory/freeList.hpp"
  32 #include "memory/gcLocker.hpp"
  33 #include "memory/metachunk.hpp"
  34 #include "memory/metaspace.hpp"
  35 #include "memory/metaspaceShared.hpp"
  36 #include "memory/resourceArea.hpp"
  37 #include "memory/universe.hpp"
  38 #include "runtime/atomic.inline.hpp"
  39 #include "runtime/globals.hpp"
  40 #include "runtime/init.hpp"
  41 #include "runtime/java.hpp"
  42 #include "runtime/mutex.hpp"
  43 #include "runtime/orderAccess.hpp"
  44 #include "services/memTracker.hpp"
  45 #include "services/memoryService.hpp"
  46 #include "utilities/copy.hpp"
  47 #include "utilities/debug.hpp"
  48 
  49 typedef BinaryTreeDictionary<Metablock, FreeList<Metablock> > BlockTreeDictionary;
  50 typedef BinaryTreeDictionary<Metachunk, FreeList<Metachunk> > ChunkTreeDictionary;
  51 
  52 // Set this constant to enable slow integrity checking of the free chunk lists
  53 const bool metaspace_slow_verify = false;
  54 
  55 size_t const allocation_from_dictionary_limit = 4 * K;
  56 
  57 MetaWord* last_allocated = 0;
  58 
  59 size_t Metaspace::_compressed_class_space_size;
  60 
  61 // Used in declarations in SpaceManager and ChunkManager
  62 enum ChunkIndex {
  63   ZeroIndex = 0,
  64   SpecializedIndex = ZeroIndex,
  65   SmallIndex = SpecializedIndex + 1,
  66   MediumIndex = SmallIndex + 1,
  67   HumongousIndex = MediumIndex + 1,
  68   NumberOfFreeLists = 3,
  69   NumberOfInUseLists = 4
  70 };


 773     if (Verbose && TraceMetadataChunkAllocation) {
 774       _dictionary->print_free_lists(gclog_or_tty);
 775     }
 776     delete _dictionary;
 777   }
 778 }
 779 
 780 void BlockFreelist::return_block(MetaWord* p, size_t word_size) {
 781   Metablock* free_chunk = ::new (p) Metablock(word_size);
 782   if (dictionary() == NULL) {
 783    _dictionary = new BlockTreeDictionary();
 784   }
 785   dictionary()->return_chunk(free_chunk);
 786 }
 787 
 788 MetaWord* BlockFreelist::get_block(size_t word_size) {
 789   if (dictionary() == NULL) {
 790     return NULL;
 791   }
 792 
 793   if (word_size < TreeChunk<Metablock, FreeList<Metablock> >::min_size()) {
 794     // Dark matter.  Too small for dictionary.
 795     return NULL;
 796   }
 797 
 798   Metablock* free_block =
 799     dictionary()->get_chunk(word_size, FreeBlockDictionary<Metablock>::atLeast);
 800   if (free_block == NULL) {
 801     return NULL;
 802   }
 803 
 804   const size_t block_size = free_block->size();
 805   if (block_size > WasteMultiplier * word_size) {
 806     return_block((MetaWord*)free_block, block_size);
 807     return NULL;
 808   }
 809 
 810   MetaWord* new_block = (MetaWord*)free_block;
 811   assert(block_size >= word_size, "Incorrect size of block from freelist");
 812   const size_t unused = block_size - word_size;
 813   if (unused >= TreeChunk<Metablock, FreeList<Metablock> >::min_size()) {
 814     return_block(new_block + word_size, unused);
 815   }
 816 
 817   return new_block;
 818 }
 819 
 820 void BlockFreelist::print_on(outputStream* st) const {
 821   if (dictionary() == NULL) {
 822     return;
 823   }
 824   dictionary()->print_free_lists(st);
 825 }
 826 
 827 // VirtualSpaceNode methods
 828 
 829 VirtualSpaceNode::~VirtualSpaceNode() {
 830   _rs.release();
 831 #ifdef ASSERT
 832   size_t word_size = sizeof(*this) / BytesPerWord;
 833   Copy::fill_to_words((HeapWord*) this, word_size, 0xf1f1f1f1);


2223     case SpecializedChunk:
2224       assert(SpecializedChunk == ClassSpecializedChunk,
2225              "Need branch for ClassSpecializedChunk");
2226       return SpecializedIndex;
2227     case SmallChunk:
2228     case ClassSmallChunk:
2229       return SmallIndex;
2230     case MediumChunk:
2231     case ClassMediumChunk:
2232       return MediumIndex;
2233     default:
2234       assert(size > MediumChunk || size > ClassMediumChunk,
2235              "Not a humongous chunk");
2236       return HumongousIndex;
2237   }
2238 }
2239 
2240 void SpaceManager::deallocate(MetaWord* p, size_t word_size) {
2241   assert_lock_strong(_lock);
2242   size_t raw_word_size = get_raw_word_size(word_size);
2243   size_t min_size = TreeChunk<Metablock, FreeList<Metablock> >::min_size();
2244   assert(raw_word_size >= min_size,
2245          err_msg("Should not deallocate dark matter " SIZE_FORMAT "<" SIZE_FORMAT, word_size, min_size));
2246   block_freelists()->return_block(p, raw_word_size);
2247 }
2248 
2249 // Adds a chunk to the list of chunks in use.
2250 void SpaceManager::add_chunk(Metachunk* new_chunk, bool make_current) {
2251 
2252   assert(new_chunk != NULL, "Should not be NULL");
2253   assert(new_chunk->next() == NULL, "Should not be on a list");
2254 
2255   new_chunk->reset_empty();
2256 
2257   // Find the correct list and and set the current
2258   // chunk for that list.
2259   ChunkIndex index = ChunkManager::list_index(new_chunk->word_size());
2260 
2261   if (index != HumongousIndex) {
2262     retire_current_chunk();
2263     set_current_chunk(new_chunk);


2279     set_chunks_in_use(HumongousIndex, new_chunk);
2280 
2281     assert(new_chunk->word_size() > medium_chunk_size(), "List inconsistency");
2282   }
2283 
2284   // Add to the running sum of capacity
2285   inc_size_metrics(new_chunk->word_size());
2286 
2287   assert(new_chunk->is_empty(), "Not ready for reuse");
2288   if (TraceMetadataChunkAllocation && Verbose) {
2289     gclog_or_tty->print("SpaceManager::add_chunk: %d) ",
2290                         sum_count_in_chunks_in_use());
2291     new_chunk->print_on(gclog_or_tty);
2292     chunk_manager()->locked_print_free_chunks(gclog_or_tty);
2293   }
2294 }
2295 
2296 void SpaceManager::retire_current_chunk() {
2297   if (current_chunk() != NULL) {
2298     size_t remaining_words = current_chunk()->free_word_size();
2299     if (remaining_words >= TreeChunk<Metablock, FreeList<Metablock> >::min_size()) {
2300       block_freelists()->return_block(current_chunk()->allocate(remaining_words), remaining_words);
2301       inc_used_metrics(remaining_words);
2302     }
2303   }
2304 }
2305 
2306 Metachunk* SpaceManager::get_new_chunk(size_t word_size,
2307                                        size_t grow_chunks_by_words) {
2308   // Get a chunk from the chunk freelist
2309   Metachunk* next = chunk_manager()->chunk_freelist_allocate(grow_chunks_by_words);
2310 
2311   if (next == NULL) {
2312     next = vs_list()->get_new_chunk(word_size,
2313                                     grow_chunks_by_words,
2314                                     medium_chunk_bunch());
2315   }
2316 
2317   if (TraceMetadataHumongousAllocation && next != NULL &&
2318       SpaceManager::is_humongous(next->word_size())) {
2319     gclog_or_tty->print_cr("  new humongous chunk word size "


3262   if (mdtype == ClassType) {
3263     return using_class_space() ? class_vsm()->sum_capacity_in_chunks_in_use() : 0;
3264   } else {
3265     return vsm()->sum_capacity_in_chunks_in_use();
3266   }
3267 }
3268 
3269 size_t Metaspace::used_bytes_slow(MetadataType mdtype) const {
3270   return used_words_slow(mdtype) * BytesPerWord;
3271 }
3272 
3273 size_t Metaspace::capacity_bytes_slow(MetadataType mdtype) const {
3274   return capacity_words_slow(mdtype) * BytesPerWord;
3275 }
3276 
3277 void Metaspace::deallocate(MetaWord* ptr, size_t word_size, bool is_class) {
3278   if (SafepointSynchronize::is_at_safepoint()) {
3279     assert(Thread::current()->is_VM_thread(), "should be the VM thread");
3280     // Don't take Heap_lock
3281     MutexLockerEx ml(vsm()->lock(), Mutex::_no_safepoint_check_flag);
3282     if (word_size < TreeChunk<Metablock, FreeList<Metablock> >::min_size()) {
3283       // Dark matter.  Too small for dictionary.
3284 #ifdef ASSERT
3285       Copy::fill_to_words((HeapWord*)ptr, word_size, 0xf5f5f5f5);
3286 #endif
3287       return;
3288     }
3289     if (is_class && using_class_space()) {
3290       class_vsm()->deallocate(ptr, word_size);
3291     } else {
3292       vsm()->deallocate(ptr, word_size);
3293     }
3294   } else {
3295     MutexLockerEx ml(vsm()->lock(), Mutex::_no_safepoint_check_flag);
3296 
3297     if (word_size < TreeChunk<Metablock, FreeList<Metablock> >::min_size()) {
3298       // Dark matter.  Too small for dictionary.
3299 #ifdef ASSERT
3300       Copy::fill_to_words((HeapWord*)ptr, word_size, 0xf5f5f5f5);
3301 #endif
3302       return;
3303     }
3304     if (is_class && using_class_space()) {
3305       class_vsm()->deallocate(ptr, word_size);
3306     } else {
3307       vsm()->deallocate(ptr, word_size);
3308     }
3309   }
3310 }
3311 
3312 
3313 MetaWord* Metaspace::allocate(ClassLoaderData* loader_data, size_t word_size,
3314                               bool read_only, MetaspaceObj::Type type, TRAPS) {
3315   if (HAS_PENDING_EXCEPTION) {
3316     assert(false, "Should not allocate with exception pending");
3317     return NULL;  // caller does a CHECK_NULL too