1398 // allocation so make the delta greater than just enough
1399 // for this allocation.
1400 delta = max_delta;
1401 } else {
1402 // This allocation is large but the next ones are probably not
1403 // so increase by the minimum.
1404 delta = delta + min_delta;
1405 }
1406
1407 assert_is_size_aligned(delta, Metaspace::commit_alignment());
1408
1409 return delta;
1410 }
1411
1412 size_t MetaspaceGC::capacity_until_GC() {
1413 size_t value = (size_t)OrderAccess::load_ptr_acquire(&_capacity_until_GC);
1414 assert(value >= MetaspaceSize, "Not initialied properly?");
1415 return value;
1416 }
1417
1418 size_t MetaspaceGC::inc_capacity_until_GC(size_t v) {
1419 assert_is_size_aligned(v, Metaspace::commit_alignment());
1420
1421 return (size_t)Atomic::add_ptr(v, &_capacity_until_GC);
1422 }
1423
1424 size_t MetaspaceGC::dec_capacity_until_GC(size_t v) {
1425 assert_is_size_aligned(v, Metaspace::commit_alignment());
1426
1427 return (size_t)Atomic::add_ptr(-(intptr_t)v, &_capacity_until_GC);
1428 }
1429
1430 void MetaspaceGC::initialize() {
1431 // Set the high-water mark to MaxMetapaceSize during VM initializaton since
1432 // we can't do a GC during initialization.
1433 _capacity_until_GC = MaxMetaspaceSize;
1434 }
1435
1436 void MetaspaceGC::post_initialize() {
1437 // Reset the high-water mark once the VM initialization is done.
1438 _capacity_until_GC = MAX2(MetaspaceAux::committed_bytes(), MetaspaceSize);
1439 }
1440
1441 bool MetaspaceGC::can_expand(size_t word_size, bool is_class) {
1501 gclog_or_tty->print_cr("\nMetaspaceGC::compute_new_size: ");
1502 gclog_or_tty->print_cr(" "
1503 " minimum_free_percentage: %6.2f"
1504 " maximum_used_percentage: %6.2f",
1505 minimum_free_percentage,
1506 maximum_used_percentage);
1507 gclog_or_tty->print_cr(" "
1508 " used_after_gc : %6.1fKB",
1509 used_after_gc / (double) K);
1510 }
1511
1512
1513 size_t shrink_bytes = 0;
1514 if (capacity_until_GC < minimum_desired_capacity) {
1515 // If we have less capacity below the metaspace HWM, then
1516 // increment the HWM.
1517 size_t expand_bytes = minimum_desired_capacity - capacity_until_GC;
1518 expand_bytes = align_size_up(expand_bytes, Metaspace::commit_alignment());
1519 // Don't expand unless it's significant
1520 if (expand_bytes >= MinMetaspaceExpansion) {
1521 size_t new_capacity_until_GC = MetaspaceGC::inc_capacity_until_GC(expand_bytes);
1522 Metaspace::tracer()->report_gc_threshold(capacity_until_GC,
1523 new_capacity_until_GC,
1524 MetaspaceGCThresholdUpdater::ComputeNewSize);
1525 if (PrintGCDetails && Verbose) {
1526 gclog_or_tty->print_cr(" expanding:"
1527 " minimum_desired_capacity: %6.1fKB"
1528 " expand_bytes: %6.1fKB"
1529 " MinMetaspaceExpansion: %6.1fKB"
1530 " new metaspace HWM: %6.1fKB",
1531 minimum_desired_capacity / (double) K,
1532 expand_bytes / (double) K,
1533 MinMetaspaceExpansion / (double) K,
1534 new_capacity_until_GC / (double) K);
1535 }
1536 }
1537 return;
1538 }
1539
1540 // No expansion, now see if we want to shrink
1541 // We would never want to shrink more than this
3304
3305 size_t Metaspace::align_word_size_up(size_t word_size) {
3306 size_t byte_size = word_size * wordSize;
3307 return ReservedSpace::allocation_align_size_up(byte_size) / wordSize;
3308 }
3309
3310 MetaWord* Metaspace::allocate(size_t word_size, MetadataType mdtype) {
3311 // DumpSharedSpaces doesn't use class metadata area (yet)
3312 // Also, don't use class_vsm() unless UseCompressedClassPointers is true.
3313 if (is_class_space_allocation(mdtype)) {
3314 return class_vsm()->allocate(word_size);
3315 } else {
3316 return vsm()->allocate(word_size);
3317 }
3318 }
3319
3320 MetaWord* Metaspace::expand_and_allocate(size_t word_size, MetadataType mdtype) {
3321 size_t delta_bytes = MetaspaceGC::delta_capacity_until_GC(word_size * BytesPerWord);
3322 assert(delta_bytes > 0, "Must be");
3323
3324 size_t after_inc = MetaspaceGC::inc_capacity_until_GC(delta_bytes);
3325
3326 // capacity_until_GC might be updated concurrently, must calculate previous value.
3327 size_t before_inc = after_inc - delta_bytes;
3328
3329 tracer()->report_gc_threshold(before_inc, after_inc,
3330 MetaspaceGCThresholdUpdater::ExpandAndAllocate);
3331 if (PrintGCDetails && Verbose) {
3332 gclog_or_tty->print_cr("Increase capacity to GC from " SIZE_FORMAT
3333 " to " SIZE_FORMAT, before_inc, after_inc);
3334 }
3335
3336 return allocate(word_size, mdtype);
3337 }
3338
3339 // Space allocated in the Metaspace. This may
3340 // be across several metadata virtual spaces.
3341 char* Metaspace::bottom() const {
3342 assert(DumpSharedSpaces, "only useful and valid for dumping shared spaces");
3343 return (char*)vsm()->current_chunk()->bottom();
3344 }
3345
3346 size_t Metaspace::used_words_slow(MetadataType mdtype) const {
3347 if (mdtype == ClassType) {
3348 return using_class_space() ? class_vsm()->sum_used_in_chunks_in_use() : 0;
3349 } else {
3350 return vsm()->sum_used_in_chunks_in_use(); // includes overhead!
3351 }
3352 }
3353
3354 size_t Metaspace::free_words_slow(MetadataType mdtype) const {
3355 if (mdtype == ClassType) {
3356 return using_class_space() ? class_vsm()->sum_free_in_chunks_in_use() : 0;
|
1398 // allocation so make the delta greater than just enough
1399 // for this allocation.
1400 delta = max_delta;
1401 } else {
1402 // This allocation is large but the next ones are probably not
1403 // so increase by the minimum.
1404 delta = delta + min_delta;
1405 }
1406
1407 assert_is_size_aligned(delta, Metaspace::commit_alignment());
1408
1409 return delta;
1410 }
1411
1412 size_t MetaspaceGC::capacity_until_GC() {
1413 size_t value = (size_t)OrderAccess::load_ptr_acquire(&_capacity_until_GC);
1414 assert(value >= MetaspaceSize, "Not initialied properly?");
1415 return value;
1416 }
1417
1418 bool MetaspaceGC::inc_capacity_until_GC(size_t v, size_t* new_cap_until_GC, size_t* old_cap_until_GC) {
1419 assert_is_size_aligned(v, Metaspace::commit_alignment());
1420
1421 size_t capacity_until_GC = (size_t) _capacity_until_GC;
1422 size_t new_value = capacity_until_GC + v;
1423
1424 if (new_value < capacity_until_GC) {
1425 // The addition wrapped around, set new_value to aligned max value.
1426 new_value = align_size_down(max_uintx, Metaspace::commit_alignment());
1427 }
1428
1429 intptr_t expected = (intptr_t) capacity_until_GC;
1430 intptr_t actual = Atomic::cmpxchg_ptr((intptr_t) new_value, &_capacity_until_GC, expected);
1431
1432 if (expected != actual) {
1433 return false;
1434 }
1435
1436 if (new_cap_until_GC != NULL) {
1437 *new_cap_until_GC = new_value;
1438 }
1439 if (old_cap_until_GC != NULL) {
1440 *old_cap_until_GC = capacity_until_GC;
1441 }
1442 return true;
1443 }
1444
1445 size_t MetaspaceGC::dec_capacity_until_GC(size_t v) {
1446 assert_is_size_aligned(v, Metaspace::commit_alignment());
1447
1448 return (size_t)Atomic::add_ptr(-(intptr_t)v, &_capacity_until_GC);
1449 }
1450
1451 void MetaspaceGC::initialize() {
1452 // Set the high-water mark to MaxMetapaceSize during VM initializaton since
1453 // we can't do a GC during initialization.
1454 _capacity_until_GC = MaxMetaspaceSize;
1455 }
1456
1457 void MetaspaceGC::post_initialize() {
1458 // Reset the high-water mark once the VM initialization is done.
1459 _capacity_until_GC = MAX2(MetaspaceAux::committed_bytes(), MetaspaceSize);
1460 }
1461
1462 bool MetaspaceGC::can_expand(size_t word_size, bool is_class) {
1522 gclog_or_tty->print_cr("\nMetaspaceGC::compute_new_size: ");
1523 gclog_or_tty->print_cr(" "
1524 " minimum_free_percentage: %6.2f"
1525 " maximum_used_percentage: %6.2f",
1526 minimum_free_percentage,
1527 maximum_used_percentage);
1528 gclog_or_tty->print_cr(" "
1529 " used_after_gc : %6.1fKB",
1530 used_after_gc / (double) K);
1531 }
1532
1533
1534 size_t shrink_bytes = 0;
1535 if (capacity_until_GC < minimum_desired_capacity) {
1536 // If we have less capacity below the metaspace HWM, then
1537 // increment the HWM.
1538 size_t expand_bytes = minimum_desired_capacity - capacity_until_GC;
1539 expand_bytes = align_size_up(expand_bytes, Metaspace::commit_alignment());
1540 // Don't expand unless it's significant
1541 if (expand_bytes >= MinMetaspaceExpansion) {
1542 size_t new_capacity_until_GC = 0;
1543 bool succeeded = MetaspaceGC::inc_capacity_until_GC(expand_bytes, &new_capacity_until_GC);
1544 assert(succeeded, "Should always succesfully increment HWM when at safepoint");
1545
1546 Metaspace::tracer()->report_gc_threshold(capacity_until_GC,
1547 new_capacity_until_GC,
1548 MetaspaceGCThresholdUpdater::ComputeNewSize);
1549 if (PrintGCDetails && Verbose) {
1550 gclog_or_tty->print_cr(" expanding:"
1551 " minimum_desired_capacity: %6.1fKB"
1552 " expand_bytes: %6.1fKB"
1553 " MinMetaspaceExpansion: %6.1fKB"
1554 " new metaspace HWM: %6.1fKB",
1555 minimum_desired_capacity / (double) K,
1556 expand_bytes / (double) K,
1557 MinMetaspaceExpansion / (double) K,
1558 new_capacity_until_GC / (double) K);
1559 }
1560 }
1561 return;
1562 }
1563
1564 // No expansion, now see if we want to shrink
1565 // We would never want to shrink more than this
3328
3329 size_t Metaspace::align_word_size_up(size_t word_size) {
3330 size_t byte_size = word_size * wordSize;
3331 return ReservedSpace::allocation_align_size_up(byte_size) / wordSize;
3332 }
3333
3334 MetaWord* Metaspace::allocate(size_t word_size, MetadataType mdtype) {
3335 // DumpSharedSpaces doesn't use class metadata area (yet)
3336 // Also, don't use class_vsm() unless UseCompressedClassPointers is true.
3337 if (is_class_space_allocation(mdtype)) {
3338 return class_vsm()->allocate(word_size);
3339 } else {
3340 return vsm()->allocate(word_size);
3341 }
3342 }
3343
3344 MetaWord* Metaspace::expand_and_allocate(size_t word_size, MetadataType mdtype) {
3345 size_t delta_bytes = MetaspaceGC::delta_capacity_until_GC(word_size * BytesPerWord);
3346 assert(delta_bytes > 0, "Must be");
3347
3348 size_t before = 0;
3349 size_t after = 0;
3350 MetaWord* res;
3351 bool incremented;
3352
3353 // Each thread increments the HWM at most once. Even if the thread fails to increment
3354 // the HWM, an allocation is still attempted. This is because another thread must then
3355 // have incremented the HWM and therefore the allocation might still succeed.
3356 do {
3357 incremented = MetaspaceGC::inc_capacity_until_GC(delta_bytes, &after, &before);
3358 res = allocate(word_size, mdtype);
3359 } while (!incremented && res == NULL);
3360
3361 if (incremented) {
3362 tracer()->report_gc_threshold(before, after,
3363 MetaspaceGCThresholdUpdater::ExpandAndAllocate);
3364 if (PrintGCDetails && Verbose) {
3365 gclog_or_tty->print_cr("Increase capacity to GC from " SIZE_FORMAT
3366 " to " SIZE_FORMAT, before, after);
3367 }
3368 }
3369
3370 return res;
3371 }
3372
3373 // Space allocated in the Metaspace. This may
3374 // be across several metadata virtual spaces.
3375 char* Metaspace::bottom() const {
3376 assert(DumpSharedSpaces, "only useful and valid for dumping shared spaces");
3377 return (char*)vsm()->current_chunk()->bottom();
3378 }
3379
3380 size_t Metaspace::used_words_slow(MetadataType mdtype) const {
3381 if (mdtype == ClassType) {
3382 return using_class_space() ? class_vsm()->sum_used_in_chunks_in_use() : 0;
3383 } else {
3384 return vsm()->sum_used_in_chunks_in_use(); // includes overhead!
3385 }
3386 }
3387
3388 size_t Metaspace::free_words_slow(MetadataType mdtype) const {
3389 if (mdtype == ClassType) {
3390 return using_class_space() ? class_vsm()->sum_free_in_chunks_in_use() : 0;
|