src/share/vm/memory/metaspace.cpp

Print this page
rev 6722 : 8049599: MetaspaceGC::_capacity_until_GC can overflow
Reviewed-by: jmasa, stefank


1397     // allocation so make the delta greater than just enough
1398     // for this allocation.
1399     delta = max_delta;
1400   } else {
1401     // This allocation is large but the next ones are probably not
1402     // so increase by the minimum.
1403     delta = delta + min_delta;
1404   }
1405 
1406   assert_is_size_aligned(delta, Metaspace::commit_alignment());
1407 
1408   return delta;
1409 }
1410 
1411 size_t MetaspaceGC::capacity_until_GC() {
1412   size_t value = (size_t)OrderAccess::load_ptr_acquire(&_capacity_until_GC);
1413   assert(value >= MetaspaceSize, "Not initialied properly?");
1414   return value;
1415 }
1416 
1417 size_t MetaspaceGC::inc_capacity_until_GC(size_t v) {
1418   assert_is_size_aligned(v, Metaspace::commit_alignment());
1419 
1420   return (size_t)Atomic::add_ptr(v, &_capacity_until_GC);





















1421 }
1422 
1423 size_t MetaspaceGC::dec_capacity_until_GC(size_t v) {
1424   assert_is_size_aligned(v, Metaspace::commit_alignment());
1425 
1426   return (size_t)Atomic::add_ptr(-(intptr_t)v, &_capacity_until_GC);
1427 }
1428 
1429 void MetaspaceGC::initialize() {
1430   // Set the high-water mark to MaxMetapaceSize during VM initializaton since
1431   // we can't do a GC during initialization.
1432   _capacity_until_GC = MaxMetaspaceSize;
1433 }
1434 
1435 void MetaspaceGC::post_initialize() {
1436   // Reset the high-water mark once the VM initialization is done.
1437   _capacity_until_GC = MAX2(MetaspaceAux::committed_bytes(), MetaspaceSize);
1438 }
1439 
1440 bool MetaspaceGC::can_expand(size_t word_size, bool is_class) {


1500     gclog_or_tty->print_cr("\nMetaspaceGC::compute_new_size: ");
1501     gclog_or_tty->print_cr("  "
1502                   "  minimum_free_percentage: %6.2f"
1503                   "  maximum_used_percentage: %6.2f",
1504                   minimum_free_percentage,
1505                   maximum_used_percentage);
1506     gclog_or_tty->print_cr("  "
1507                   "   used_after_gc       : %6.1fKB",
1508                   used_after_gc / (double) K);
1509   }
1510 
1511 
1512   size_t shrink_bytes = 0;
1513   if (capacity_until_GC < minimum_desired_capacity) {
1514     // If we have less capacity below the metaspace HWM, then
1515     // increment the HWM.
1516     size_t expand_bytes = minimum_desired_capacity - capacity_until_GC;
1517     expand_bytes = align_size_up(expand_bytes, Metaspace::commit_alignment());
1518     // Don't expand unless it's significant
1519     if (expand_bytes >= MinMetaspaceExpansion) {
1520       size_t new_capacity_until_GC = MetaspaceGC::inc_capacity_until_GC(expand_bytes);



1521       Metaspace::tracer()->report_gc_threshold(capacity_until_GC,
1522                                                new_capacity_until_GC,
1523                                                MetaspaceGCThresholdUpdater::ComputeNewSize);
1524       if (PrintGCDetails && Verbose) {
1525         gclog_or_tty->print_cr("    expanding:"
1526                       "  minimum_desired_capacity: %6.1fKB"
1527                       "  expand_bytes: %6.1fKB"
1528                       "  MinMetaspaceExpansion: %6.1fKB"
1529                       "  new metaspace HWM:  %6.1fKB",
1530                       minimum_desired_capacity / (double) K,
1531                       expand_bytes / (double) K,
1532                       MinMetaspaceExpansion / (double) K,
1533                       new_capacity_until_GC / (double) K);
1534       }
1535     }
1536     return;
1537   }
1538 
1539   // No expansion, now see if we want to shrink
1540   // We would never want to shrink more than this


3302 
3303 size_t Metaspace::align_word_size_up(size_t word_size) {
3304   size_t byte_size = word_size * wordSize;
3305   return ReservedSpace::allocation_align_size_up(byte_size) / wordSize;
3306 }
3307 
3308 MetaWord* Metaspace::allocate(size_t word_size, MetadataType mdtype) {
3309   // DumpSharedSpaces doesn't use class metadata area (yet)
3310   // Also, don't use class_vsm() unless UseCompressedClassPointers is true.
3311   if (is_class_space_allocation(mdtype)) {
3312     return  class_vsm()->allocate(word_size);
3313   } else {
3314     return  vsm()->allocate(word_size);
3315   }
3316 }
3317 
3318 MetaWord* Metaspace::expand_and_allocate(size_t word_size, MetadataType mdtype) {
3319   size_t delta_bytes = MetaspaceGC::delta_capacity_until_GC(word_size * BytesPerWord);
3320   assert(delta_bytes > 0, "Must be");
3321 
3322   size_t after_inc = MetaspaceGC::inc_capacity_until_GC(delta_bytes);
3323 
3324   // capacity_until_GC might be updated concurrently, must calculate previous value.
3325   size_t before_inc = after_inc - delta_bytes;








3326 
3327   tracer()->report_gc_threshold(before_inc, after_inc,

3328                                 MetaspaceGCThresholdUpdater::ExpandAndAllocate);
3329   if (PrintGCDetails && Verbose) {
3330     gclog_or_tty->print_cr("Increase capacity to GC from " SIZE_FORMAT
3331         " to " SIZE_FORMAT, before_inc, after_inc);

3332   }
3333 
3334   return allocate(word_size, mdtype);
3335 }
3336 
3337 // Space allocated in the Metaspace.  This may
3338 // be across several metadata virtual spaces.
3339 char* Metaspace::bottom() const {
3340   assert(DumpSharedSpaces, "only useful and valid for dumping shared spaces");
3341   return (char*)vsm()->current_chunk()->bottom();
3342 }
3343 
3344 size_t Metaspace::used_words_slow(MetadataType mdtype) const {
3345   if (mdtype == ClassType) {
3346     return using_class_space() ? class_vsm()->sum_used_in_chunks_in_use() : 0;
3347   } else {
3348     return vsm()->sum_used_in_chunks_in_use();  // includes overhead!
3349   }
3350 }
3351 
3352 size_t Metaspace::free_words_slow(MetadataType mdtype) const {
3353   if (mdtype == ClassType) {
3354     return using_class_space() ? class_vsm()->sum_free_in_chunks_in_use() : 0;




1397     // allocation so make the delta greater than just enough
1398     // for this allocation.
1399     delta = max_delta;
1400   } else {
1401     // This allocation is large but the next ones are probably not
1402     // so increase by the minimum.
1403     delta = delta + min_delta;
1404   }
1405 
1406   assert_is_size_aligned(delta, Metaspace::commit_alignment());
1407 
1408   return delta;
1409 }
1410 
1411 size_t MetaspaceGC::capacity_until_GC() {
1412   size_t value = (size_t)OrderAccess::load_ptr_acquire(&_capacity_until_GC);
1413   assert(value >= MetaspaceSize, "Not initialied properly?");
1414   return value;
1415 }
1416 
1417 bool MetaspaceGC::inc_capacity_until_GC(size_t v, size_t* new_cap_until_GC, size_t* old_cap_until_GC) {
1418   assert_is_size_aligned(v, Metaspace::commit_alignment());
1419 
1420   size_t capacity_until_GC = (size_t) _capacity_until_GC;
1421   size_t new_value = capacity_until_GC + v;
1422 
1423   if (new_value < capacity_until_GC) {
1424     // The addition wrapped around, set new_value to aligned max value.
1425     new_value = align_size_down(max_uintx, Metaspace::commit_alignment());
1426   }
1427 
1428   intptr_t expected = (intptr_t) capacity_until_GC;
1429   intptr_t actual = Atomic::cmpxchg_ptr((intptr_t) new_value, &_capacity_until_GC, expected);
1430 
1431   if (expected != actual) {
1432     return false;
1433   }
1434 
1435   if (new_cap_until_GC != NULL) {
1436     *new_cap_until_GC = new_value;
1437   }
1438   if (old_cap_until_GC != NULL) {
1439     *old_cap_until_GC = capacity_until_GC;
1440   }
1441   return true;
1442 }
1443 
1444 size_t MetaspaceGC::dec_capacity_until_GC(size_t v) {
1445   assert_is_size_aligned(v, Metaspace::commit_alignment());
1446 
1447   return (size_t)Atomic::add_ptr(-(intptr_t)v, &_capacity_until_GC);
1448 }
1449 
1450 void MetaspaceGC::initialize() {
1451   // Set the high-water mark to MaxMetapaceSize during VM initializaton since
1452   // we can't do a GC during initialization.
1453   _capacity_until_GC = MaxMetaspaceSize;
1454 }
1455 
1456 void MetaspaceGC::post_initialize() {
1457   // Reset the high-water mark once the VM initialization is done.
1458   _capacity_until_GC = MAX2(MetaspaceAux::committed_bytes(), MetaspaceSize);
1459 }
1460 
1461 bool MetaspaceGC::can_expand(size_t word_size, bool is_class) {


1521     gclog_or_tty->print_cr("\nMetaspaceGC::compute_new_size: ");
1522     gclog_or_tty->print_cr("  "
1523                   "  minimum_free_percentage: %6.2f"
1524                   "  maximum_used_percentage: %6.2f",
1525                   minimum_free_percentage,
1526                   maximum_used_percentage);
1527     gclog_or_tty->print_cr("  "
1528                   "   used_after_gc       : %6.1fKB",
1529                   used_after_gc / (double) K);
1530   }
1531 
1532 
1533   size_t shrink_bytes = 0;
1534   if (capacity_until_GC < minimum_desired_capacity) {
1535     // If we have less capacity below the metaspace HWM, then
1536     // increment the HWM.
1537     size_t expand_bytes = minimum_desired_capacity - capacity_until_GC;
1538     expand_bytes = align_size_up(expand_bytes, Metaspace::commit_alignment());
1539     // Don't expand unless it's significant
1540     if (expand_bytes >= MinMetaspaceExpansion) {
1541       size_t new_capacity_until_GC = 0;
1542       bool succeeded = MetaspaceGC::inc_capacity_until_GC(expand_bytes, &new_capacity_until_GC);
1543       assert(succeeded, "Should always succesfully increment HWM when at safepoint");
1544 
1545       Metaspace::tracer()->report_gc_threshold(capacity_until_GC,
1546                                                new_capacity_until_GC,
1547                                                MetaspaceGCThresholdUpdater::ComputeNewSize);
1548       if (PrintGCDetails && Verbose) {
1549         gclog_or_tty->print_cr("    expanding:"
1550                       "  minimum_desired_capacity: %6.1fKB"
1551                       "  expand_bytes: %6.1fKB"
1552                       "  MinMetaspaceExpansion: %6.1fKB"
1553                       "  new metaspace HWM:  %6.1fKB",
1554                       minimum_desired_capacity / (double) K,
1555                       expand_bytes / (double) K,
1556                       MinMetaspaceExpansion / (double) K,
1557                       new_capacity_until_GC / (double) K);
1558       }
1559     }
1560     return;
1561   }
1562 
1563   // No expansion, now see if we want to shrink
1564   // We would never want to shrink more than this


3326 
3327 size_t Metaspace::align_word_size_up(size_t word_size) {
3328   size_t byte_size = word_size * wordSize;
3329   return ReservedSpace::allocation_align_size_up(byte_size) / wordSize;
3330 }
3331 
3332 MetaWord* Metaspace::allocate(size_t word_size, MetadataType mdtype) {
3333   // DumpSharedSpaces doesn't use class metadata area (yet)
3334   // Also, don't use class_vsm() unless UseCompressedClassPointers is true.
3335   if (is_class_space_allocation(mdtype)) {
3336     return  class_vsm()->allocate(word_size);
3337   } else {
3338     return  vsm()->allocate(word_size);
3339   }
3340 }
3341 
3342 MetaWord* Metaspace::expand_and_allocate(size_t word_size, MetadataType mdtype) {
3343   size_t delta_bytes = MetaspaceGC::delta_capacity_until_GC(word_size * BytesPerWord);
3344   assert(delta_bytes > 0, "Must be");
3345 
3346   size_t before = 0;
3347   size_t after = 0;
3348   MetaWord* res;
3349   bool incremented;
3350 
3351   // Each thread increments the HWM at most once. Even if the thread fails to increment
3352   // the HWM, an allocation is still attempted. This is because another thread must then
3353   // have incremented the HWM and therefore the allocation might still succeed.
3354   do {
3355     incremented = MetaspaceGC::inc_capacity_until_GC(delta_bytes, &after, &before);
3356     res = allocate(word_size, mdtype);
3357   } while (!incremented && res == NULL);
3358 
3359   if (incremented) {
3360     tracer()->report_gc_threshold(before, after,
3361                                   MetaspaceGCThresholdUpdater::ExpandAndAllocate);
3362     if (PrintGCDetails && Verbose) {
3363       gclog_or_tty->print_cr("Increase capacity to GC from " SIZE_FORMAT
3364           " to " SIZE_FORMAT, before, after);
3365     }
3366   }
3367 
3368   return res;
3369 }
3370 
3371 // Space allocated in the Metaspace.  This may
3372 // be across several metadata virtual spaces.
3373 char* Metaspace::bottom() const {
3374   assert(DumpSharedSpaces, "only useful and valid for dumping shared spaces");
3375   return (char*)vsm()->current_chunk()->bottom();
3376 }
3377 
3378 size_t Metaspace::used_words_slow(MetadataType mdtype) const {
3379   if (mdtype == ClassType) {
3380     return using_class_space() ? class_vsm()->sum_used_in_chunks_in_use() : 0;
3381   } else {
3382     return vsm()->sum_used_in_chunks_in_use();  // includes overhead!
3383   }
3384 }
3385 
3386 size_t Metaspace::free_words_slow(MetadataType mdtype) const {
3387   if (mdtype == ClassType) {
3388     return using_class_space() ? class_vsm()->sum_free_in_chunks_in_use() : 0;