src/share/vm/memory/metaspace.cpp

Print this page
rev 6149 : 8037952: Remove code duplication in Metaspace::deallocate
Reviewed-by: TBD1, TBD2


3326 // have been made. Don't include space in the global freelist and
3327 // in the space available in the dictionary which
3328 // is already counted in some chunk.
3329 size_t Metaspace::capacity_words_slow(MetadataType mdtype) const {
3330   if (mdtype == ClassType) {
3331     return using_class_space() ? class_vsm()->sum_capacity_in_chunks_in_use() : 0;
3332   } else {
3333     return vsm()->sum_capacity_in_chunks_in_use();
3334   }
3335 }
3336 
3337 size_t Metaspace::used_bytes_slow(MetadataType mdtype) const {
3338   return used_words_slow(mdtype) * BytesPerWord;
3339 }
3340 
3341 size_t Metaspace::capacity_bytes_slow(MetadataType mdtype) const {
3342   return capacity_words_slow(mdtype) * BytesPerWord;
3343 }
3344 
3345 void Metaspace::deallocate(MetaWord* ptr, size_t word_size, bool is_class) {
3346   if (SafepointSynchronize::is_at_safepoint()) {
3347     assert(Thread::current()->is_VM_thread(), "should be the VM thread");
3348     // Don't take Heap_lock
3349     MutexLockerEx ml(vsm()->lock(), Mutex::_no_safepoint_check_flag);
3350     if (word_size < TreeChunk<Metablock, FreeList<Metablock> >::min_size()) {
3351       // Dark matter.  Too small for dictionary.
3352 #ifdef ASSERT
3353       Copy::fill_to_words((HeapWord*)ptr, word_size, 0xf5f5f5f5);
3354 #endif
3355       return;
3356     }
3357     if (is_class && using_class_space()) {
3358       class_vsm()->deallocate(ptr, word_size);
3359     } else {
3360       vsm()->deallocate(ptr, word_size);
3361     }
3362   } else {
3363     MutexLockerEx ml(vsm()->lock(), Mutex::_no_safepoint_check_flag);
3364 
3365     if (word_size < TreeChunk<Metablock, FreeList<Metablock> >::min_size()) {
3366       // Dark matter.  Too small for dictionary.
3367 #ifdef ASSERT
3368       Copy::fill_to_words((HeapWord*)ptr, word_size, 0xf5f5f5f5);
3369 #endif
3370       return;
3371     }
3372     if (is_class && using_class_space()) {
3373       class_vsm()->deallocate(ptr, word_size);
3374     } else {
3375       vsm()->deallocate(ptr, word_size);
3376     }
3377   }
3378 }
3379 
3380 
3381 MetaWord* Metaspace::allocate(ClassLoaderData* loader_data, size_t word_size,
3382                               bool read_only, MetaspaceObj::Type type, TRAPS) {
3383   if (HAS_PENDING_EXCEPTION) {
3384     assert(false, "Should not allocate with exception pending");
3385     return NULL;  // caller does a CHECK_NULL too
3386   }
3387 
3388   assert(loader_data != NULL, "Should never pass around a NULL loader_data. "
3389         "ClassLoaderData::the_null_class_loader_data() should have been used.");
3390 
3391   // Allocate in metaspaces without taking out a lock, because it deadlocks
3392   // with the SymbolTable_lock.  Dumping is single threaded for now.  We'll have
3393   // to revisit this for application class data sharing.
3394   if (DumpSharedSpaces) {
3395     assert(type > MetaspaceObj::UnknownType && type < MetaspaceObj::_number_of_types, "sanity");
3396     Metaspace* space = read_only ? loader_data->ro_metaspace() : loader_data->rw_metaspace();




3326 // have been made. Don't include space in the global freelist and
3327 // in the space available in the dictionary which
3328 // is already counted in some chunk.
3329 size_t Metaspace::capacity_words_slow(MetadataType mdtype) const {
3330   if (mdtype == ClassType) {
3331     return using_class_space() ? class_vsm()->sum_capacity_in_chunks_in_use() : 0;
3332   } else {
3333     return vsm()->sum_capacity_in_chunks_in_use();
3334   }
3335 }
3336 
3337 size_t Metaspace::used_bytes_slow(MetadataType mdtype) const {
3338   return used_words_slow(mdtype) * BytesPerWord;
3339 }
3340 
3341 size_t Metaspace::capacity_bytes_slow(MetadataType mdtype) const {
3342   return capacity_words_slow(mdtype) * BytesPerWord;
3343 }
3344 
3345 void Metaspace::deallocate(MetaWord* ptr, size_t word_size, bool is_class) {
3346   assert(!SafepointSynchronize::is_at_safepoint()
3347          || Thread::current()->is_VM_thread(), "should be the VM thread");
3348 














3349   MutexLockerEx ml(vsm()->lock(), Mutex::_no_safepoint_check_flag);
3350 
3351   if (word_size < TreeChunk<Metablock, FreeList<Metablock> >::min_size()) {
3352     // Dark matter.  Too small for dictionary.
3353 #ifdef ASSERT
3354     Copy::fill_to_words((HeapWord*)ptr, word_size, 0xf5f5f5f5);
3355 #endif
3356     return;
3357   }
3358   if (is_class && using_class_space()) {
3359     class_vsm()->deallocate(ptr, word_size);
3360   } else {
3361     vsm()->deallocate(ptr, word_size);

3362   }
3363 }
3364 
3365 
3366 MetaWord* Metaspace::allocate(ClassLoaderData* loader_data, size_t word_size,
3367                               bool read_only, MetaspaceObj::Type type, TRAPS) {
3368   if (HAS_PENDING_EXCEPTION) {
3369     assert(false, "Should not allocate with exception pending");
3370     return NULL;  // caller does a CHECK_NULL too
3371   }
3372 
3373   assert(loader_data != NULL, "Should never pass around a NULL loader_data. "
3374         "ClassLoaderData::the_null_class_loader_data() should have been used.");
3375 
3376   // Allocate in metaspaces without taking out a lock, because it deadlocks
3377   // with the SymbolTable_lock.  Dumping is single threaded for now.  We'll have
3378   // to revisit this for application class data sharing.
3379   if (DumpSharedSpaces) {
3380     assert(type > MetaspaceObj::UnknownType && type < MetaspaceObj::_number_of_types, "sanity");
3381     Metaspace* space = read_only ? loader_data->ro_metaspace() : loader_data->rw_metaspace();