1804 // Space allocated from the VirtualSpace
1805
1806 VirtualSpaceList::~VirtualSpaceList() {
1807 VirtualSpaceListIterator iter(virtual_space_list());
1808 while (iter.repeat()) {
1809 VirtualSpaceNode* vsl = iter.get_next();
1810 delete vsl;
1811 }
1812 }
1813
1814 void VirtualSpaceList::inc_reserved_words(size_t v) {
1815 assert_lock_strong(SpaceManager::expand_lock());
1816 _reserved_words = _reserved_words + v;
1817 }
1818 void VirtualSpaceList::dec_reserved_words(size_t v) {
1819 assert_lock_strong(SpaceManager::expand_lock());
1820 _reserved_words = _reserved_words - v;
1821 }
1822
1823 #define assert_committed_below_limit() \
1824 assert(MetaspaceAux::committed_bytes() <= MaxMetaspaceSize, \
1825 "Too much committed memory. Committed: " SIZE_FORMAT \
1826 " limit (MaxMetaspaceSize): " SIZE_FORMAT, \
1827 MetaspaceAux::committed_bytes(), MaxMetaspaceSize);
1828
1829 void VirtualSpaceList::inc_committed_words(size_t v) {
1830 assert_lock_strong(SpaceManager::expand_lock());
1831 _committed_words = _committed_words + v;
1832
1833 assert_committed_below_limit();
1834 }
1835 void VirtualSpaceList::dec_committed_words(size_t v) {
1836 assert_lock_strong(SpaceManager::expand_lock());
1837 _committed_words = _committed_words - v;
1838
1839 assert_committed_below_limit();
1840 }
1841
1842 void VirtualSpaceList::inc_virtual_space_count() {
1843 assert_lock_strong(SpaceManager::expand_lock());
1844 _virtual_space_count++;
1845 }
1846 void VirtualSpaceList::dec_virtual_space_count() {
1847 assert_lock_strong(SpaceManager::expand_lock());
2402 if (old_cap_until_GC != NULL) {
2403 *old_cap_until_GC = capacity_until_GC;
2404 }
2405 return true;
2406 }
2407
2408 size_t MetaspaceGC::dec_capacity_until_GC(size_t v) {
2409 assert_is_aligned(v, Metaspace::commit_alignment());
2410
2411 return (size_t)Atomic::sub((intptr_t)v, &_capacity_until_GC);
2412 }
2413
2414 void MetaspaceGC::initialize() {
2415 // Set the high-water mark to MaxMetapaceSize during VM initializaton since
2416 // we can't do a GC during initialization.
2417 _capacity_until_GC = MaxMetaspaceSize;
2418 }
2419
2420 void MetaspaceGC::post_initialize() {
2421 // Reset the high-water mark once the VM initialization is done.
2422 _capacity_until_GC = MAX2(MetaspaceAux::committed_bytes(), MetaspaceSize);
2423 }
2424
2425 bool MetaspaceGC::can_expand(size_t word_size, bool is_class) {
2426 // Check if the compressed class space is full.
2427 if (is_class && Metaspace::using_class_space()) {
2428 size_t class_committed = MetaspaceAux::committed_bytes(Metaspace::ClassType);
2429 if (class_committed + word_size * BytesPerWord > CompressedClassSpaceSize) {
2430 log_trace(gc, metaspace, freelist)("Cannot expand %s metaspace by " SIZE_FORMAT " words (CompressedClassSpaceSize = " SIZE_FORMAT " words)",
2431 (is_class ? "class" : "non-class"), word_size, CompressedClassSpaceSize / sizeof(MetaWord));
2432 return false;
2433 }
2434 }
2435
2436 // Check if the user has imposed a limit on the metaspace memory.
2437 size_t committed_bytes = MetaspaceAux::committed_bytes();
2438 if (committed_bytes + word_size * BytesPerWord > MaxMetaspaceSize) {
2439 log_trace(gc, metaspace, freelist)("Cannot expand %s metaspace by " SIZE_FORMAT " words (MaxMetaspaceSize = " SIZE_FORMAT " words)",
2440 (is_class ? "class" : "non-class"), word_size, MaxMetaspaceSize / sizeof(MetaWord));
2441 return false;
2442 }
2443
2444 return true;
2445 }
2446
2447 size_t MetaspaceGC::allowed_expansion() {
2448 size_t committed_bytes = MetaspaceAux::committed_bytes();
2449 size_t capacity_until_gc = capacity_until_GC();
2450
2451 assert(capacity_until_gc >= committed_bytes,
2452 "capacity_until_gc: " SIZE_FORMAT " < committed_bytes: " SIZE_FORMAT,
2453 capacity_until_gc, committed_bytes);
2454
2455 size_t left_until_max = MaxMetaspaceSize - committed_bytes;
2456 size_t left_until_GC = capacity_until_gc - committed_bytes;
2457 size_t left_to_commit = MIN2(left_until_GC, left_until_max);
2458 log_trace(gc, metaspace, freelist)("allowed expansion words: " SIZE_FORMAT
2459 " (left_until_max: " SIZE_FORMAT ", left_until_GC: " SIZE_FORMAT ".",
2460 left_to_commit / BytesPerWord, left_until_max / BytesPerWord, left_until_GC / BytesPerWord);
2461
2462 return left_to_commit / BytesPerWord;
2463 }
2464
2465 void MetaspaceGC::compute_new_size() {
2466 assert(_shrink_factor <= 100, "invalid shrink factor");
2467 uint current_shrink_factor = _shrink_factor;
2468 _shrink_factor = 0;
2469
2470 // Using committed_bytes() for used_after_gc is an overestimation, since the
2471 // chunk free lists are included in committed_bytes() and the memory in an
2472 // un-fragmented chunk free list is available for future allocations.
2473 // However, if the chunk free lists becomes fragmented, then the memory may
2474 // not be available for future allocations and the memory is therefore "in use".
2475 // Including the chunk free lists in the definition of "in use" is therefore
2476 // necessary. Not including the chunk free lists can cause capacity_until_GC to
2477 // shrink below committed_bytes() and this has caused serious bugs in the past.
2478 const size_t used_after_gc = MetaspaceAux::committed_bytes();
2479 const size_t capacity_until_GC = MetaspaceGC::capacity_until_GC();
2480
2481 const double minimum_free_percentage = MinMetaspaceFreeRatio / 100.0;
2482 const double maximum_used_percentage = 1.0 - minimum_free_percentage;
2483
2484 const double min_tmp = used_after_gc / maximum_used_percentage;
2485 size_t minimum_desired_capacity =
2486 (size_t)MIN2(min_tmp, double(max_uintx));
2487 // Don't shrink less than the initial generation size
2488 minimum_desired_capacity = MAX2(minimum_desired_capacity,
2489 MetaspaceSize);
2490
2491 log_trace(gc, metaspace)("MetaspaceGC::compute_new_size: ");
2492 log_trace(gc, metaspace)(" minimum_free_percentage: %6.2f maximum_used_percentage: %6.2f",
2493 minimum_free_percentage, maximum_used_percentage);
2494 log_trace(gc, metaspace)(" used_after_gc : %6.1fKB", used_after_gc / (double) K);
2495
2496
2497 size_t shrink_bytes = 0;
2498 if (capacity_until_GC < minimum_desired_capacity) {
3467 Metaspace::MetaspaceType space_type,
3468 Mutex* lock) :
3469 _mdtype(mdtype),
3470 _space_type(space_type),
3471 _allocated_blocks_words(0),
3472 _allocated_chunks_words(0),
3473 _allocated_chunks_count(0),
3474 _block_freelists(NULL),
3475 _lock(lock)
3476 {
3477 initialize();
3478 }
3479
3480 void SpaceManager::inc_size_metrics(size_t words) {
3481 assert_lock_strong(SpaceManager::expand_lock());
3482 // Total of allocated Metachunks and allocated Metachunks count
3483 // for each SpaceManager
3484 _allocated_chunks_words = _allocated_chunks_words + words;
3485 _allocated_chunks_count++;
3486 // Global total of capacity in allocated Metachunks
3487 MetaspaceAux::inc_capacity(mdtype(), words);
3488 // Global total of allocated Metablocks.
3489 // used_words_slow() includes the overhead in each
3490 // Metachunk so include it in the used when the
3491 // Metachunk is first added (so only added once per
3492 // Metachunk).
3493 MetaspaceAux::inc_used(mdtype(), Metachunk::overhead());
3494 }
3495
3496 void SpaceManager::inc_used_metrics(size_t words) {
3497 // Add to the per SpaceManager total
3498 Atomic::add(words, &_allocated_blocks_words);
3499 // Add to the global total
3500 MetaspaceAux::inc_used(mdtype(), words);
3501 }
3502
3503 void SpaceManager::dec_total_from_size_metrics() {
3504 MetaspaceAux::dec_capacity(mdtype(), allocated_chunks_words());
3505 MetaspaceAux::dec_used(mdtype(), allocated_blocks_words());
3506 // Also deduct the overhead per Metachunk
3507 MetaspaceAux::dec_used(mdtype(), allocated_chunks_count() * Metachunk::overhead());
3508 }
3509
3510 void SpaceManager::initialize() {
3511 Metadebug::init_allocation_fail_alot_count();
3512 for (ChunkIndex i = ZeroIndex; i < NumberOfInUseLists; i = next_chunk_index(i)) {
3513 _chunks_in_use[i] = NULL;
3514 }
3515 _current_chunk = NULL;
3516 log_trace(gc, metaspace, freelist)("SpaceManager(): " PTR_FORMAT, p2i(this));
3517 }
3518
3519 SpaceManager::~SpaceManager() {
3520 // This call this->_lock which can't be done while holding expand_lock()
3521 assert(sum_capacity_in_chunks_in_use() == allocated_chunks_words(),
3522 "sum_capacity_in_chunks_in_use() " SIZE_FORMAT
3523 " allocated_chunks_words() " SIZE_FORMAT,
3524 sum_capacity_in_chunks_in_use(), allocated_chunks_words());
3525
3526 MutexLockerEx fcl(SpaceManager::expand_lock(),
3527 Mutex::_no_safepoint_check_flag);
3760 curr_total += curr->word_size();
3761 used += curr->used_word_size();
3762 capacity += curr->word_size();
3763 waste += curr->free_word_size() + curr->overhead();;
3764 }
3765 }
3766
3767 if (log_is_enabled(Trace, gc, metaspace, freelist)) {
3768 if (block_freelists() != NULL) block_freelists()->print_on(out);
3769 }
3770
3771 size_t free = current_chunk() == NULL ? 0 : current_chunk()->free_word_size();
3772 // Free space isn't wasted.
3773 waste -= free;
3774
3775 out->print_cr("total of all chunks " SIZE_FORMAT " used " SIZE_FORMAT
3776 " free " SIZE_FORMAT " capacity " SIZE_FORMAT
3777 " waste " SIZE_FORMAT, curr_total, used, free, capacity, waste);
3778 }
3779
3780 // MetaspaceAux
3781
3782
3783 size_t MetaspaceAux::_capacity_words[] = {0, 0};
3784 volatile size_t MetaspaceAux::_used_words[] = {0, 0};
3785
3786 size_t MetaspaceAux::free_bytes(Metaspace::MetadataType mdtype) {
3787 VirtualSpaceList* list = Metaspace::get_space_list(mdtype);
3788 return list == NULL ? 0 : list->free_bytes();
3789 }
3790
3791 size_t MetaspaceAux::free_bytes() {
3792 return free_bytes(Metaspace::ClassType) + free_bytes(Metaspace::NonClassType);
3793 }
3794
3795 void MetaspaceAux::dec_capacity(Metaspace::MetadataType mdtype, size_t words) {
3796 assert_lock_strong(SpaceManager::expand_lock());
3797 assert(words <= capacity_words(mdtype),
3798 "About to decrement below 0: words " SIZE_FORMAT
3799 " is greater than _capacity_words[%u] " SIZE_FORMAT,
3800 words, mdtype, capacity_words(mdtype));
3801 _capacity_words[mdtype] -= words;
3802 }
3803
3804 void MetaspaceAux::inc_capacity(Metaspace::MetadataType mdtype, size_t words) {
3805 assert_lock_strong(SpaceManager::expand_lock());
3806 // Needs to be atomic
3807 _capacity_words[mdtype] += words;
3808 }
3809
3810 void MetaspaceAux::dec_used(Metaspace::MetadataType mdtype, size_t words) {
3811 assert(words <= used_words(mdtype),
3812 "About to decrement below 0: words " SIZE_FORMAT
3813 " is greater than _used_words[%u] " SIZE_FORMAT,
3814 words, mdtype, used_words(mdtype));
3815 // For CMS deallocation of the Metaspaces occurs during the
3816 // sweep which is a concurrent phase. Protection by the expand_lock()
3817 // is not enough since allocation is on a per Metaspace basis
3818 // and protected by the Metaspace lock.
3819 Atomic::sub(words, &_used_words[mdtype]);
3820 }
3821
3822 void MetaspaceAux::inc_used(Metaspace::MetadataType mdtype, size_t words) {
3823 // _used_words tracks allocations for
3824 // each piece of metadata. Those allocations are
3825 // generally done concurrently by different application
3826 // threads so must be done atomically.
3827 Atomic::add(words, &_used_words[mdtype]);
3828 }
3829
3830 size_t MetaspaceAux::used_bytes_slow(Metaspace::MetadataType mdtype) {
3831 size_t used = 0;
3832 ClassLoaderDataGraphMetaspaceIterator iter;
3833 while (iter.repeat()) {
3834 Metaspace* msp = iter.get_next();
3835 // Sum allocated_blocks_words for each metaspace
3836 if (msp != NULL) {
3837 used += msp->used_words_slow(mdtype);
3838 }
3839 }
3840 return used * BytesPerWord;
3841 }
3842
3843 size_t MetaspaceAux::free_bytes_slow(Metaspace::MetadataType mdtype) {
3844 size_t free = 0;
3845 ClassLoaderDataGraphMetaspaceIterator iter;
3846 while (iter.repeat()) {
3847 Metaspace* msp = iter.get_next();
3848 if (msp != NULL) {
3849 free += msp->free_words_slow(mdtype);
3850 }
3851 }
3852 return free * BytesPerWord;
3853 }
3854
3855 size_t MetaspaceAux::capacity_bytes_slow(Metaspace::MetadataType mdtype) {
3856 if ((mdtype == Metaspace::ClassType) && !Metaspace::using_class_space()) {
3857 return 0;
3858 }
3859 // Don't count the space in the freelists. That space will be
3860 // added to the capacity calculation as needed.
3861 size_t capacity = 0;
3862 ClassLoaderDataGraphMetaspaceIterator iter;
3863 while (iter.repeat()) {
3864 Metaspace* msp = iter.get_next();
3865 if (msp != NULL) {
3866 capacity += msp->capacity_words_slow(mdtype);
3867 }
3868 }
3869 return capacity * BytesPerWord;
3870 }
3871
3872 size_t MetaspaceAux::capacity_bytes_slow() {
3873 #ifdef PRODUCT
3874 // Use capacity_bytes() in PRODUCT instead of this function.
3875 guarantee(false, "Should not call capacity_bytes_slow() in the PRODUCT");
3876 #endif
3877 size_t class_capacity = capacity_bytes_slow(Metaspace::ClassType);
3878 size_t non_class_capacity = capacity_bytes_slow(Metaspace::NonClassType);
3879 assert(capacity_bytes() == class_capacity + non_class_capacity,
3880 "bad accounting: capacity_bytes() " SIZE_FORMAT
3881 " class_capacity + non_class_capacity " SIZE_FORMAT
3882 " class_capacity " SIZE_FORMAT " non_class_capacity " SIZE_FORMAT,
3883 capacity_bytes(), class_capacity + non_class_capacity,
3884 class_capacity, non_class_capacity);
3885
3886 return class_capacity + non_class_capacity;
3887 }
3888
3889 size_t MetaspaceAux::reserved_bytes(Metaspace::MetadataType mdtype) {
3890 VirtualSpaceList* list = Metaspace::get_space_list(mdtype);
3891 return list == NULL ? 0 : list->reserved_bytes();
3892 }
3893
3894 size_t MetaspaceAux::committed_bytes(Metaspace::MetadataType mdtype) {
3895 VirtualSpaceList* list = Metaspace::get_space_list(mdtype);
3896 return list == NULL ? 0 : list->committed_bytes();
3897 }
3898
3899 size_t MetaspaceAux::min_chunk_size_words() { return Metaspace::first_chunk_word_size(); }
3900
3901 size_t MetaspaceAux::free_chunks_total_words(Metaspace::MetadataType mdtype) {
3902 ChunkManager* chunk_manager = Metaspace::get_chunk_manager(mdtype);
3903 if (chunk_manager == NULL) {
3904 return 0;
3905 }
3906 chunk_manager->slow_verify();
3907 return chunk_manager->free_chunks_total_words();
3908 }
3909
3910 size_t MetaspaceAux::free_chunks_total_bytes(Metaspace::MetadataType mdtype) {
3911 return free_chunks_total_words(mdtype) * BytesPerWord;
3912 }
3913
3914 size_t MetaspaceAux::free_chunks_total_words() {
3915 return free_chunks_total_words(Metaspace::ClassType) +
3916 free_chunks_total_words(Metaspace::NonClassType);
3917 }
3918
3919 size_t MetaspaceAux::free_chunks_total_bytes() {
3920 return free_chunks_total_words() * BytesPerWord;
3921 }
3922
3923 bool MetaspaceAux::has_chunk_free_list(Metaspace::MetadataType mdtype) {
3924 return Metaspace::get_chunk_manager(mdtype) != NULL;
3925 }
3926
3927 MetaspaceChunkFreeListSummary MetaspaceAux::chunk_free_list_summary(Metaspace::MetadataType mdtype) {
3928 if (!has_chunk_free_list(mdtype)) {
3929 return MetaspaceChunkFreeListSummary();
3930 }
3931
3932 const ChunkManager* cm = Metaspace::get_chunk_manager(mdtype);
3933 return cm->chunk_free_list_summary();
3934 }
3935
3936 void MetaspaceAux::print_metaspace_change(size_t prev_metadata_used) {
3937 log_info(gc, metaspace)("Metaspace: " SIZE_FORMAT "K->" SIZE_FORMAT "K(" SIZE_FORMAT "K)",
3938 prev_metadata_used/K, used_bytes()/K, reserved_bytes()/K);
3939 }
3940
3941 void MetaspaceAux::print_on(outputStream* out) {
3942 Metaspace::MetadataType nct = Metaspace::NonClassType;
3943
3944 out->print_cr(" Metaspace "
3945 "used " SIZE_FORMAT "K, "
3946 "capacity " SIZE_FORMAT "K, "
3947 "committed " SIZE_FORMAT "K, "
3948 "reserved " SIZE_FORMAT "K",
3949 used_bytes()/K,
3950 capacity_bytes()/K,
3951 committed_bytes()/K,
3952 reserved_bytes()/K);
3953
3954 if (Metaspace::using_class_space()) {
3955 Metaspace::MetadataType ct = Metaspace::ClassType;
3956 out->print_cr(" class space "
3957 "used " SIZE_FORMAT "K, "
3958 "capacity " SIZE_FORMAT "K, "
3959 "committed " SIZE_FORMAT "K, "
3960 "reserved " SIZE_FORMAT "K",
3961 used_bytes(ct)/K,
3962 capacity_bytes(ct)/K,
3963 committed_bytes(ct)/K,
3964 reserved_bytes(ct)/K);
3965 }
3966 }
3967
3968 // Print information for class space and data space separately.
3969 // This is almost the same as above.
3970 void MetaspaceAux::print_on(outputStream* out, Metaspace::MetadataType mdtype) {
3971 size_t free_chunks_capacity_bytes = free_chunks_total_bytes(mdtype);
3972 size_t capacity_bytes = capacity_bytes_slow(mdtype);
3973 size_t used_bytes = used_bytes_slow(mdtype);
3974 size_t free_bytes = free_bytes_slow(mdtype);
3975 size_t used_and_free = used_bytes + free_bytes +
3976 free_chunks_capacity_bytes;
3977 out->print_cr(" Chunk accounting: (used in chunks " SIZE_FORMAT
3978 "K + unused in chunks " SIZE_FORMAT "K + "
3979 " capacity in free chunks " SIZE_FORMAT "K) = " SIZE_FORMAT
3980 "K capacity in allocated chunks " SIZE_FORMAT "K",
3981 used_bytes / K,
3982 free_bytes / K,
3983 free_chunks_capacity_bytes / K,
3984 used_and_free / K,
3985 capacity_bytes / K);
3986 // Accounting can only be correct if we got the values during a safepoint
3987 assert(!SafepointSynchronize::is_at_safepoint() || used_and_free == capacity_bytes, "Accounting is wrong");
3988 }
3989
3990 // Print total fragmentation for class metaspaces
3991 void MetaspaceAux::print_class_waste(outputStream* out) {
3992 assert(Metaspace::using_class_space(), "class metaspace not used");
3993 size_t cls_specialized_waste = 0, cls_small_waste = 0, cls_medium_waste = 0;
3994 size_t cls_specialized_count = 0, cls_small_count = 0, cls_medium_count = 0, cls_humongous_count = 0;
3995 ClassLoaderDataGraphMetaspaceIterator iter;
3996 while (iter.repeat()) {
3997 Metaspace* msp = iter.get_next();
3998 if (msp != NULL) {
3999 cls_specialized_waste += msp->class_vsm()->sum_waste_in_chunks_in_use(SpecializedIndex);
4000 cls_specialized_count += msp->class_vsm()->sum_count_in_chunks_in_use(SpecializedIndex);
4001 cls_small_waste += msp->class_vsm()->sum_waste_in_chunks_in_use(SmallIndex);
4002 cls_small_count += msp->class_vsm()->sum_count_in_chunks_in_use(SmallIndex);
4003 cls_medium_waste += msp->class_vsm()->sum_waste_in_chunks_in_use(MediumIndex);
4004 cls_medium_count += msp->class_vsm()->sum_count_in_chunks_in_use(MediumIndex);
4005 cls_humongous_count += msp->class_vsm()->sum_count_in_chunks_in_use(HumongousIndex);
4006 }
4007 }
4008 out->print_cr(" class: " SIZE_FORMAT " specialized(s) " SIZE_FORMAT ", "
4009 SIZE_FORMAT " small(s) " SIZE_FORMAT ", "
4010 SIZE_FORMAT " medium(s) " SIZE_FORMAT ", "
4011 "large count " SIZE_FORMAT,
4012 cls_specialized_count, cls_specialized_waste,
4013 cls_small_count, cls_small_waste,
4014 cls_medium_count, cls_medium_waste, cls_humongous_count);
4015 }
4016
4017 // Print total fragmentation for data and class metaspaces separately
4018 void MetaspaceAux::print_waste(outputStream* out) {
4019 size_t specialized_waste = 0, small_waste = 0, medium_waste = 0;
4020 size_t specialized_count = 0, small_count = 0, medium_count = 0, humongous_count = 0;
4021
4022 ClassLoaderDataGraphMetaspaceIterator iter;
4023 while (iter.repeat()) {
4024 Metaspace* msp = iter.get_next();
4025 if (msp != NULL) {
4026 specialized_waste += msp->vsm()->sum_waste_in_chunks_in_use(SpecializedIndex);
4027 specialized_count += msp->vsm()->sum_count_in_chunks_in_use(SpecializedIndex);
4028 small_waste += msp->vsm()->sum_waste_in_chunks_in_use(SmallIndex);
4029 small_count += msp->vsm()->sum_count_in_chunks_in_use(SmallIndex);
4030 medium_waste += msp->vsm()->sum_waste_in_chunks_in_use(MediumIndex);
4031 medium_count += msp->vsm()->sum_count_in_chunks_in_use(MediumIndex);
4032 humongous_count += msp->vsm()->sum_count_in_chunks_in_use(HumongousIndex);
4033 }
4034 }
4035 out->print_cr("Total fragmentation waste (words) doesn't count free space");
4036 out->print_cr(" data: " SIZE_FORMAT " specialized(s) " SIZE_FORMAT ", "
4037 SIZE_FORMAT " small(s) " SIZE_FORMAT ", "
4038 SIZE_FORMAT " medium(s) " SIZE_FORMAT ", "
4191 _total_class.print_on(_out, _scale);
4192 }
4193 _out->cr();
4194
4195 MetadataStats total_anon;
4196 total_anon.add(_total_anon_metadata);
4197 total_anon.add(_total_anon_class);
4198
4199 _out->print("For anonymous classes=" SIZE_FORMAT_W(6) " ", _total_anon_count);
4200 total_anon.print_on(_out, _scale);
4201
4202 _out->print(" Metadata ");
4203 _total_anon_metadata.print_on(_out, _scale);
4204
4205 if (Metaspace::using_class_space()) {
4206 _out->print(" Class data ");
4207 _total_anon_class.print_on(_out, _scale);
4208 }
4209 }
4210
4211 void MetaspaceAux::print_metadata_for_nmt(outputStream* out, size_t scale) {
4212 const char* unit = scale_unit(scale);
4213 out->print_cr("Metaspaces:");
4214 out->print_cr(" Metadata space: reserved=" SIZE_FORMAT_W(10) "%s committed=" SIZE_FORMAT_W(10) "%s",
4215 reserved_bytes(Metaspace::NonClassType) / scale, unit,
4216 committed_bytes(Metaspace::NonClassType) / scale, unit);
4217 if (Metaspace::using_class_space()) {
4218 out->print_cr(" Class space: reserved=" SIZE_FORMAT_W(10) "%s committed=" SIZE_FORMAT_W(10) "%s",
4219 reserved_bytes(Metaspace::ClassType) / scale, unit,
4220 committed_bytes(Metaspace::ClassType) / scale, unit);
4221 }
4222
4223 out->cr();
4224 ChunkManager::print_all_chunkmanagers(out, scale);
4225
4226 out->cr();
4227 out->print_cr("Per-classloader metadata:");
4228 out->cr();
4229
4230 PrintCLDMetaspaceInfoClosure cl(out, scale);
4231 ClassLoaderDataGraph::cld_do(&cl);
4232 }
4233
4234
4235 // Dump global metaspace things from the end of ClassLoaderDataGraph
4236 void MetaspaceAux::dump(outputStream* out) {
4237 out->print_cr("All Metaspace:");
4238 out->print("data space: "); print_on(out, Metaspace::NonClassType);
4239 out->print("class space: "); print_on(out, Metaspace::ClassType);
4240 print_waste(out);
4241 }
4242
4243 // Prints an ASCII representation of the given space.
4244 void MetaspaceAux::print_metaspace_map(outputStream* out, Metaspace::MetadataType mdtype) {
4245 MutexLockerEx cl(SpaceManager::expand_lock(), Mutex::_no_safepoint_check_flag);
4246 const bool for_class = mdtype == Metaspace::ClassType ? true : false;
4247 VirtualSpaceList* const vsl = for_class ? Metaspace::class_space_list() : Metaspace::space_list();
4248 if (vsl != NULL) {
4249 if (for_class) {
4250 if (!Metaspace::using_class_space()) {
4251 out->print_cr("No Class Space.");
4252 return;
4253 }
4254 out->print_raw("---- Metaspace Map (Class Space) ----");
4255 } else {
4256 out->print_raw("---- Metaspace Map (Non-Class Space) ----");
4257 }
4258 // Print legend:
4259 out->cr();
4260 out->print_cr("Chunk Types (uppercase chunks are in use): x-specialized, s-small, m-medium, h-humongous.");
4261 out->cr();
4262 VirtualSpaceList* const vsl = for_class ? Metaspace::class_space_list() : Metaspace::space_list();
4263 vsl->print_map(out);
4264 out->cr();
4265 }
4266 }
4267
4268 void MetaspaceAux::verify_free_chunks() {
4269 Metaspace::chunk_manager_metadata()->verify();
4270 if (Metaspace::using_class_space()) {
4271 Metaspace::chunk_manager_class()->verify();
4272 }
4273 }
4274
4275 void MetaspaceAux::verify_capacity() {
4276 #ifdef ASSERT
4277 size_t running_sum_capacity_bytes = capacity_bytes();
4278 // For purposes of the running sum of capacity, verify against capacity
4279 size_t capacity_in_use_bytes = capacity_bytes_slow();
4280 assert(running_sum_capacity_bytes == capacity_in_use_bytes,
4281 "capacity_words() * BytesPerWord " SIZE_FORMAT
4282 " capacity_bytes_slow()" SIZE_FORMAT,
4283 running_sum_capacity_bytes, capacity_in_use_bytes);
4284 for (Metaspace::MetadataType i = Metaspace::ClassType;
4285 i < Metaspace:: MetadataTypeCount;
4286 i = (Metaspace::MetadataType)(i + 1)) {
4287 size_t capacity_in_use_bytes = capacity_bytes_slow(i);
4288 assert(capacity_bytes(i) == capacity_in_use_bytes,
4289 "capacity_bytes(%u) " SIZE_FORMAT
4290 " capacity_bytes_slow(%u)" SIZE_FORMAT,
4291 i, capacity_bytes(i), i, capacity_in_use_bytes);
4292 }
4293 #endif
4294 }
4295
4296 void MetaspaceAux::verify_used() {
4297 #ifdef ASSERT
4298 size_t running_sum_used_bytes = used_bytes();
4299 // For purposes of the running sum of used, verify against used
4300 size_t used_in_use_bytes = used_bytes_slow();
4301 assert(used_bytes() == used_in_use_bytes,
4302 "used_bytes() " SIZE_FORMAT
4303 " used_bytes_slow()" SIZE_FORMAT,
4304 used_bytes(), used_in_use_bytes);
4305 for (Metaspace::MetadataType i = Metaspace::ClassType;
4306 i < Metaspace:: MetadataTypeCount;
4307 i = (Metaspace::MetadataType)(i + 1)) {
4308 size_t used_in_use_bytes = used_bytes_slow(i);
4309 assert(used_bytes(i) == used_in_use_bytes,
4310 "used_bytes(%u) " SIZE_FORMAT
4311 " used_bytes_slow(%u)" SIZE_FORMAT,
4312 i, used_bytes(i), i, used_in_use_bytes);
4313 }
4314 #endif
4315 }
4316
4317 void MetaspaceAux::verify_metrics() {
4318 verify_capacity();
4319 verify_used();
4320 }
4321
4322
4323 // Metaspace methods
4324
4325 size_t Metaspace::_first_chunk_word_size = 0;
4326 size_t Metaspace::_first_class_chunk_word_size = 0;
4327
4328 size_t Metaspace::_commit_alignment = 0;
4329 size_t Metaspace::_reserve_alignment = 0;
4330
4331 Metaspace::Metaspace(Mutex* lock, MetaspaceType type) {
4332 initialize(lock, type);
4333 }
4334
4335 Metaspace::~Metaspace() {
4336 delete _vsm;
4337 if (using_class_space()) {
4873 assert(using_class_space(), "Has to use class space");
4874 return class_vsm()->calc_chunk_size(word_size);
4875 }
4876
4877 void Metaspace::report_metadata_oome(ClassLoaderData* loader_data, size_t word_size, MetaspaceObj::Type type, MetadataType mdtype, TRAPS) {
4878 tracer()->report_metadata_oom(loader_data, word_size, type, mdtype);
4879
4880 // If result is still null, we are out of memory.
4881 Log(gc, metaspace, freelist) log;
4882 if (log.is_info()) {
4883 log.info("Metaspace (%s) allocation failed for size " SIZE_FORMAT,
4884 is_class_space_allocation(mdtype) ? "class" : "data", word_size);
4885 ResourceMark rm;
4886 if (log.is_debug()) {
4887 if (loader_data->metaspace_or_null() != NULL) {
4888 LogStream ls(log.debug());
4889 loader_data->dump(&ls);
4890 }
4891 }
4892 LogStream ls(log.info());
4893 MetaspaceAux::dump(&ls);
4894 MetaspaceAux::print_metaspace_map(&ls, mdtype);
4895 ChunkManager::print_all_chunkmanagers(&ls);
4896 }
4897
4898 bool out_of_compressed_class_space = false;
4899 if (is_class_space_allocation(mdtype)) {
4900 Metaspace* metaspace = loader_data->metaspace_non_null();
4901 out_of_compressed_class_space =
4902 MetaspaceAux::committed_bytes(Metaspace::ClassType) +
4903 (metaspace->class_chunk_size(word_size) * BytesPerWord) >
4904 CompressedClassSpaceSize;
4905 }
4906
4907 // -XX:+HeapDumpOnOutOfMemoryError and -XX:OnOutOfMemoryError support
4908 const char* space_string = out_of_compressed_class_space ?
4909 "Compressed class space" : "Metaspace";
4910
4911 report_java_out_of_memory(space_string);
4912
4913 if (JvmtiExport::should_post_resource_exhausted()) {
4914 JvmtiExport::post_resource_exhausted(
4915 JVMTI_RESOURCE_EXHAUSTED_OOM_ERROR,
4916 space_string);
4917 }
4918
4919 if (!is_init_completed()) {
4920 vm_exit_during_initialization("OutOfMemoryError", space_string);
4921 }
4922
4996 guarantee(chunk != NULL, "Sanity");
4997 // Verify chunk itself; then verify that it is consistent with the
4998 // occupany map of its containing node.
4999 chunk->verify();
5000 VirtualSpaceNode* const vsn = chunk->container();
5001 OccupancyMap* const ocmap = vsn->occupancy_map();
5002 ocmap->verify_for_chunk(chunk);
5003 }
5004 #endif
5005
5006 static void do_update_in_use_info_for_chunk(Metachunk* chunk, bool inuse) {
5007 chunk->set_is_tagged_free(!inuse);
5008 OccupancyMap* const ocmap = chunk->container()->occupancy_map();
5009 ocmap->set_region_in_use((MetaWord*)chunk, chunk->word_size(), inuse);
5010 }
5011
5012 /////////////// Unit tests ///////////////
5013
5014 #ifndef PRODUCT
5015
5016 class TestMetaspaceAuxTest : AllStatic {
5017 public:
5018 static void test_reserved() {
5019 size_t reserved = MetaspaceAux::reserved_bytes();
5020
5021 assert(reserved > 0, "assert");
5022
5023 size_t committed = MetaspaceAux::committed_bytes();
5024 assert(committed <= reserved, "assert");
5025
5026 size_t reserved_metadata = MetaspaceAux::reserved_bytes(Metaspace::NonClassType);
5027 assert(reserved_metadata > 0, "assert");
5028 assert(reserved_metadata <= reserved, "assert");
5029
5030 if (UseCompressedClassPointers) {
5031 size_t reserved_class = MetaspaceAux::reserved_bytes(Metaspace::ClassType);
5032 assert(reserved_class > 0, "assert");
5033 assert(reserved_class < reserved, "assert");
5034 }
5035 }
5036
5037 static void test_committed() {
5038 size_t committed = MetaspaceAux::committed_bytes();
5039
5040 assert(committed > 0, "assert");
5041
5042 size_t reserved = MetaspaceAux::reserved_bytes();
5043 assert(committed <= reserved, "assert");
5044
5045 size_t committed_metadata = MetaspaceAux::committed_bytes(Metaspace::NonClassType);
5046 assert(committed_metadata > 0, "assert");
5047 assert(committed_metadata <= committed, "assert");
5048
5049 if (UseCompressedClassPointers) {
5050 size_t committed_class = MetaspaceAux::committed_bytes(Metaspace::ClassType);
5051 assert(committed_class > 0, "assert");
5052 assert(committed_class < committed, "assert");
5053 }
5054 }
5055
5056 static void test_virtual_space_list_large_chunk() {
5057 VirtualSpaceList* vs_list = new VirtualSpaceList(os::vm_allocation_granularity());
5058 MutexLockerEx cl(SpaceManager::expand_lock(), Mutex::_no_safepoint_check_flag);
5059 // A size larger than VirtualSpaceSize (256k) and add one page to make it _not_ be
5060 // vm_allocation_granularity aligned on Windows.
5061 size_t large_size = (size_t)(2*256*K + (os::vm_page_size()/BytesPerWord));
5062 large_size += (os::vm_page_size()/BytesPerWord);
5063 vs_list->get_new_chunk(large_size, 0);
5064 }
5065
5066 static void test() {
5067 test_reserved();
5068 test_committed();
5069 test_virtual_space_list_large_chunk();
5070 }
5071 };
5072
5073 void TestMetaspaceAux_test() {
5074 TestMetaspaceAuxTest::test();
5075 }
5076
5077 class TestVirtualSpaceNodeTest {
5078 static void chunk_up(size_t words_left, size_t& num_medium_chunks,
5079 size_t& num_small_chunks,
5080 size_t& num_specialized_chunks) {
5081 num_medium_chunks = words_left / MediumChunk;
5082 words_left = words_left % MediumChunk;
5083
5084 num_small_chunks = words_left / SmallChunk;
5085 words_left = words_left % SmallChunk;
5086 // how many specialized chunks can we get?
5087 num_specialized_chunks = words_left / SpecializedChunk;
5088 assert(words_left % SpecializedChunk == 0, "should be nothing left");
5089 }
5090
5091 public:
5092 static void test() {
5093 MutexLockerEx ml(SpaceManager::expand_lock(), Mutex::_no_safepoint_check_flag);
5094 const size_t vsn_test_size_words = MediumChunk * 4;
|
1804 // Space allocated from the VirtualSpace
1805
1806 VirtualSpaceList::~VirtualSpaceList() {
1807 VirtualSpaceListIterator iter(virtual_space_list());
1808 while (iter.repeat()) {
1809 VirtualSpaceNode* vsl = iter.get_next();
1810 delete vsl;
1811 }
1812 }
1813
1814 void VirtualSpaceList::inc_reserved_words(size_t v) {
1815 assert_lock_strong(SpaceManager::expand_lock());
1816 _reserved_words = _reserved_words + v;
1817 }
1818 void VirtualSpaceList::dec_reserved_words(size_t v) {
1819 assert_lock_strong(SpaceManager::expand_lock());
1820 _reserved_words = _reserved_words - v;
1821 }
1822
1823 #define assert_committed_below_limit() \
1824 assert(MetaspaceUtils::committed_bytes() <= MaxMetaspaceSize, \
1825 "Too much committed memory. Committed: " SIZE_FORMAT \
1826 " limit (MaxMetaspaceSize): " SIZE_FORMAT, \
1827 MetaspaceUtils::committed_bytes(), MaxMetaspaceSize);
1828
1829 void VirtualSpaceList::inc_committed_words(size_t v) {
1830 assert_lock_strong(SpaceManager::expand_lock());
1831 _committed_words = _committed_words + v;
1832
1833 assert_committed_below_limit();
1834 }
1835 void VirtualSpaceList::dec_committed_words(size_t v) {
1836 assert_lock_strong(SpaceManager::expand_lock());
1837 _committed_words = _committed_words - v;
1838
1839 assert_committed_below_limit();
1840 }
1841
1842 void VirtualSpaceList::inc_virtual_space_count() {
1843 assert_lock_strong(SpaceManager::expand_lock());
1844 _virtual_space_count++;
1845 }
1846 void VirtualSpaceList::dec_virtual_space_count() {
1847 assert_lock_strong(SpaceManager::expand_lock());
2402 if (old_cap_until_GC != NULL) {
2403 *old_cap_until_GC = capacity_until_GC;
2404 }
2405 return true;
2406 }
2407
2408 size_t MetaspaceGC::dec_capacity_until_GC(size_t v) {
2409 assert_is_aligned(v, Metaspace::commit_alignment());
2410
2411 return (size_t)Atomic::sub((intptr_t)v, &_capacity_until_GC);
2412 }
2413
2414 void MetaspaceGC::initialize() {
2415 // Set the high-water mark to MaxMetapaceSize during VM initializaton since
2416 // we can't do a GC during initialization.
2417 _capacity_until_GC = MaxMetaspaceSize;
2418 }
2419
2420 void MetaspaceGC::post_initialize() {
2421 // Reset the high-water mark once the VM initialization is done.
2422 _capacity_until_GC = MAX2(MetaspaceUtils::committed_bytes(), MetaspaceSize);
2423 }
2424
2425 bool MetaspaceGC::can_expand(size_t word_size, bool is_class) {
2426 // Check if the compressed class space is full.
2427 if (is_class && Metaspace::using_class_space()) {
2428 size_t class_committed = MetaspaceUtils::committed_bytes(Metaspace::ClassType);
2429 if (class_committed + word_size * BytesPerWord > CompressedClassSpaceSize) {
2430 log_trace(gc, metaspace, freelist)("Cannot expand %s metaspace by " SIZE_FORMAT " words (CompressedClassSpaceSize = " SIZE_FORMAT " words)",
2431 (is_class ? "class" : "non-class"), word_size, CompressedClassSpaceSize / sizeof(MetaWord));
2432 return false;
2433 }
2434 }
2435
2436 // Check if the user has imposed a limit on the metaspace memory.
2437 size_t committed_bytes = MetaspaceUtils::committed_bytes();
2438 if (committed_bytes + word_size * BytesPerWord > MaxMetaspaceSize) {
2439 log_trace(gc, metaspace, freelist)("Cannot expand %s metaspace by " SIZE_FORMAT " words (MaxMetaspaceSize = " SIZE_FORMAT " words)",
2440 (is_class ? "class" : "non-class"), word_size, MaxMetaspaceSize / sizeof(MetaWord));
2441 return false;
2442 }
2443
2444 return true;
2445 }
2446
2447 size_t MetaspaceGC::allowed_expansion() {
2448 size_t committed_bytes = MetaspaceUtils::committed_bytes();
2449 size_t capacity_until_gc = capacity_until_GC();
2450
2451 assert(capacity_until_gc >= committed_bytes,
2452 "capacity_until_gc: " SIZE_FORMAT " < committed_bytes: " SIZE_FORMAT,
2453 capacity_until_gc, committed_bytes);
2454
2455 size_t left_until_max = MaxMetaspaceSize - committed_bytes;
2456 size_t left_until_GC = capacity_until_gc - committed_bytes;
2457 size_t left_to_commit = MIN2(left_until_GC, left_until_max);
2458 log_trace(gc, metaspace, freelist)("allowed expansion words: " SIZE_FORMAT
2459 " (left_until_max: " SIZE_FORMAT ", left_until_GC: " SIZE_FORMAT ".",
2460 left_to_commit / BytesPerWord, left_until_max / BytesPerWord, left_until_GC / BytesPerWord);
2461
2462 return left_to_commit / BytesPerWord;
2463 }
2464
2465 void MetaspaceGC::compute_new_size() {
2466 assert(_shrink_factor <= 100, "invalid shrink factor");
2467 uint current_shrink_factor = _shrink_factor;
2468 _shrink_factor = 0;
2469
2470 // Using committed_bytes() for used_after_gc is an overestimation, since the
2471 // chunk free lists are included in committed_bytes() and the memory in an
2472 // un-fragmented chunk free list is available for future allocations.
2473 // However, if the chunk free lists becomes fragmented, then the memory may
2474 // not be available for future allocations and the memory is therefore "in use".
2475 // Including the chunk free lists in the definition of "in use" is therefore
2476 // necessary. Not including the chunk free lists can cause capacity_until_GC to
2477 // shrink below committed_bytes() and this has caused serious bugs in the past.
2478 const size_t used_after_gc = MetaspaceUtils::committed_bytes();
2479 const size_t capacity_until_GC = MetaspaceGC::capacity_until_GC();
2480
2481 const double minimum_free_percentage = MinMetaspaceFreeRatio / 100.0;
2482 const double maximum_used_percentage = 1.0 - minimum_free_percentage;
2483
2484 const double min_tmp = used_after_gc / maximum_used_percentage;
2485 size_t minimum_desired_capacity =
2486 (size_t)MIN2(min_tmp, double(max_uintx));
2487 // Don't shrink less than the initial generation size
2488 minimum_desired_capacity = MAX2(minimum_desired_capacity,
2489 MetaspaceSize);
2490
2491 log_trace(gc, metaspace)("MetaspaceGC::compute_new_size: ");
2492 log_trace(gc, metaspace)(" minimum_free_percentage: %6.2f maximum_used_percentage: %6.2f",
2493 minimum_free_percentage, maximum_used_percentage);
2494 log_trace(gc, metaspace)(" used_after_gc : %6.1fKB", used_after_gc / (double) K);
2495
2496
2497 size_t shrink_bytes = 0;
2498 if (capacity_until_GC < minimum_desired_capacity) {
3467 Metaspace::MetaspaceType space_type,
3468 Mutex* lock) :
3469 _mdtype(mdtype),
3470 _space_type(space_type),
3471 _allocated_blocks_words(0),
3472 _allocated_chunks_words(0),
3473 _allocated_chunks_count(0),
3474 _block_freelists(NULL),
3475 _lock(lock)
3476 {
3477 initialize();
3478 }
3479
3480 void SpaceManager::inc_size_metrics(size_t words) {
3481 assert_lock_strong(SpaceManager::expand_lock());
3482 // Total of allocated Metachunks and allocated Metachunks count
3483 // for each SpaceManager
3484 _allocated_chunks_words = _allocated_chunks_words + words;
3485 _allocated_chunks_count++;
3486 // Global total of capacity in allocated Metachunks
3487 MetaspaceUtils::inc_capacity(mdtype(), words);
3488 // Global total of allocated Metablocks.
3489 // used_words_slow() includes the overhead in each
3490 // Metachunk so include it in the used when the
3491 // Metachunk is first added (so only added once per
3492 // Metachunk).
3493 MetaspaceUtils::inc_used(mdtype(), Metachunk::overhead());
3494 }
3495
3496 void SpaceManager::inc_used_metrics(size_t words) {
3497 // Add to the per SpaceManager total
3498 Atomic::add(words, &_allocated_blocks_words);
3499 // Add to the global total
3500 MetaspaceUtils::inc_used(mdtype(), words);
3501 }
3502
3503 void SpaceManager::dec_total_from_size_metrics() {
3504 MetaspaceUtils::dec_capacity(mdtype(), allocated_chunks_words());
3505 MetaspaceUtils::dec_used(mdtype(), allocated_blocks_words());
3506 // Also deduct the overhead per Metachunk
3507 MetaspaceUtils::dec_used(mdtype(), allocated_chunks_count() * Metachunk::overhead());
3508 }
3509
3510 void SpaceManager::initialize() {
3511 Metadebug::init_allocation_fail_alot_count();
3512 for (ChunkIndex i = ZeroIndex; i < NumberOfInUseLists; i = next_chunk_index(i)) {
3513 _chunks_in_use[i] = NULL;
3514 }
3515 _current_chunk = NULL;
3516 log_trace(gc, metaspace, freelist)("SpaceManager(): " PTR_FORMAT, p2i(this));
3517 }
3518
3519 SpaceManager::~SpaceManager() {
3520 // This call this->_lock which can't be done while holding expand_lock()
3521 assert(sum_capacity_in_chunks_in_use() == allocated_chunks_words(),
3522 "sum_capacity_in_chunks_in_use() " SIZE_FORMAT
3523 " allocated_chunks_words() " SIZE_FORMAT,
3524 sum_capacity_in_chunks_in_use(), allocated_chunks_words());
3525
3526 MutexLockerEx fcl(SpaceManager::expand_lock(),
3527 Mutex::_no_safepoint_check_flag);
3760 curr_total += curr->word_size();
3761 used += curr->used_word_size();
3762 capacity += curr->word_size();
3763 waste += curr->free_word_size() + curr->overhead();;
3764 }
3765 }
3766
3767 if (log_is_enabled(Trace, gc, metaspace, freelist)) {
3768 if (block_freelists() != NULL) block_freelists()->print_on(out);
3769 }
3770
3771 size_t free = current_chunk() == NULL ? 0 : current_chunk()->free_word_size();
3772 // Free space isn't wasted.
3773 waste -= free;
3774
3775 out->print_cr("total of all chunks " SIZE_FORMAT " used " SIZE_FORMAT
3776 " free " SIZE_FORMAT " capacity " SIZE_FORMAT
3777 " waste " SIZE_FORMAT, curr_total, used, free, capacity, waste);
3778 }
3779
3780 // MetaspaceUtils
3781
3782
3783 size_t MetaspaceUtils::_capacity_words[] = {0, 0};
3784 volatile size_t MetaspaceUtils::_used_words[] = {0, 0};
3785
3786 size_t MetaspaceUtils::free_bytes(Metaspace::MetadataType mdtype) {
3787 VirtualSpaceList* list = Metaspace::get_space_list(mdtype);
3788 return list == NULL ? 0 : list->free_bytes();
3789 }
3790
3791 size_t MetaspaceUtils::free_bytes() {
3792 return free_bytes(Metaspace::ClassType) + free_bytes(Metaspace::NonClassType);
3793 }
3794
3795 void MetaspaceUtils::dec_capacity(Metaspace::MetadataType mdtype, size_t words) {
3796 assert_lock_strong(SpaceManager::expand_lock());
3797 assert(words <= capacity_words(mdtype),
3798 "About to decrement below 0: words " SIZE_FORMAT
3799 " is greater than _capacity_words[%u] " SIZE_FORMAT,
3800 words, mdtype, capacity_words(mdtype));
3801 _capacity_words[mdtype] -= words;
3802 }
3803
3804 void MetaspaceUtils::inc_capacity(Metaspace::MetadataType mdtype, size_t words) {
3805 assert_lock_strong(SpaceManager::expand_lock());
3806 // Needs to be atomic
3807 _capacity_words[mdtype] += words;
3808 }
3809
3810 void MetaspaceUtils::dec_used(Metaspace::MetadataType mdtype, size_t words) {
3811 assert(words <= used_words(mdtype),
3812 "About to decrement below 0: words " SIZE_FORMAT
3813 " is greater than _used_words[%u] " SIZE_FORMAT,
3814 words, mdtype, used_words(mdtype));
3815 // For CMS deallocation of the Metaspaces occurs during the
3816 // sweep which is a concurrent phase. Protection by the expand_lock()
3817 // is not enough since allocation is on a per Metaspace basis
3818 // and protected by the Metaspace lock.
3819 Atomic::sub(words, &_used_words[mdtype]);
3820 }
3821
3822 void MetaspaceUtils::inc_used(Metaspace::MetadataType mdtype, size_t words) {
3823 // _used_words tracks allocations for
3824 // each piece of metadata. Those allocations are
3825 // generally done concurrently by different application
3826 // threads so must be done atomically.
3827 Atomic::add(words, &_used_words[mdtype]);
3828 }
3829
3830 size_t MetaspaceUtils::used_bytes_slow(Metaspace::MetadataType mdtype) {
3831 size_t used = 0;
3832 ClassLoaderDataGraphMetaspaceIterator iter;
3833 while (iter.repeat()) {
3834 Metaspace* msp = iter.get_next();
3835 // Sum allocated_blocks_words for each metaspace
3836 if (msp != NULL) {
3837 used += msp->used_words_slow(mdtype);
3838 }
3839 }
3840 return used * BytesPerWord;
3841 }
3842
3843 size_t MetaspaceUtils::free_bytes_slow(Metaspace::MetadataType mdtype) {
3844 size_t free = 0;
3845 ClassLoaderDataGraphMetaspaceIterator iter;
3846 while (iter.repeat()) {
3847 Metaspace* msp = iter.get_next();
3848 if (msp != NULL) {
3849 free += msp->free_words_slow(mdtype);
3850 }
3851 }
3852 return free * BytesPerWord;
3853 }
3854
3855 size_t MetaspaceUtils::capacity_bytes_slow(Metaspace::MetadataType mdtype) {
3856 if ((mdtype == Metaspace::ClassType) && !Metaspace::using_class_space()) {
3857 return 0;
3858 }
3859 // Don't count the space in the freelists. That space will be
3860 // added to the capacity calculation as needed.
3861 size_t capacity = 0;
3862 ClassLoaderDataGraphMetaspaceIterator iter;
3863 while (iter.repeat()) {
3864 Metaspace* msp = iter.get_next();
3865 if (msp != NULL) {
3866 capacity += msp->capacity_words_slow(mdtype);
3867 }
3868 }
3869 return capacity * BytesPerWord;
3870 }
3871
3872 size_t MetaspaceUtils::capacity_bytes_slow() {
3873 #ifdef PRODUCT
3874 // Use capacity_bytes() in PRODUCT instead of this function.
3875 guarantee(false, "Should not call capacity_bytes_slow() in the PRODUCT");
3876 #endif
3877 size_t class_capacity = capacity_bytes_slow(Metaspace::ClassType);
3878 size_t non_class_capacity = capacity_bytes_slow(Metaspace::NonClassType);
3879 assert(capacity_bytes() == class_capacity + non_class_capacity,
3880 "bad accounting: capacity_bytes() " SIZE_FORMAT
3881 " class_capacity + non_class_capacity " SIZE_FORMAT
3882 " class_capacity " SIZE_FORMAT " non_class_capacity " SIZE_FORMAT,
3883 capacity_bytes(), class_capacity + non_class_capacity,
3884 class_capacity, non_class_capacity);
3885
3886 return class_capacity + non_class_capacity;
3887 }
3888
3889 size_t MetaspaceUtils::reserved_bytes(Metaspace::MetadataType mdtype) {
3890 VirtualSpaceList* list = Metaspace::get_space_list(mdtype);
3891 return list == NULL ? 0 : list->reserved_bytes();
3892 }
3893
3894 size_t MetaspaceUtils::committed_bytes(Metaspace::MetadataType mdtype) {
3895 VirtualSpaceList* list = Metaspace::get_space_list(mdtype);
3896 return list == NULL ? 0 : list->committed_bytes();
3897 }
3898
3899 size_t MetaspaceUtils::min_chunk_size_words() { return Metaspace::first_chunk_word_size(); }
3900
3901 size_t MetaspaceUtils::free_chunks_total_words(Metaspace::MetadataType mdtype) {
3902 ChunkManager* chunk_manager = Metaspace::get_chunk_manager(mdtype);
3903 if (chunk_manager == NULL) {
3904 return 0;
3905 }
3906 chunk_manager->slow_verify();
3907 return chunk_manager->free_chunks_total_words();
3908 }
3909
3910 size_t MetaspaceUtils::free_chunks_total_bytes(Metaspace::MetadataType mdtype) {
3911 return free_chunks_total_words(mdtype) * BytesPerWord;
3912 }
3913
3914 size_t MetaspaceUtils::free_chunks_total_words() {
3915 return free_chunks_total_words(Metaspace::ClassType) +
3916 free_chunks_total_words(Metaspace::NonClassType);
3917 }
3918
3919 size_t MetaspaceUtils::free_chunks_total_bytes() {
3920 return free_chunks_total_words() * BytesPerWord;
3921 }
3922
3923 bool MetaspaceUtils::has_chunk_free_list(Metaspace::MetadataType mdtype) {
3924 return Metaspace::get_chunk_manager(mdtype) != NULL;
3925 }
3926
3927 MetaspaceChunkFreeListSummary MetaspaceUtils::chunk_free_list_summary(Metaspace::MetadataType mdtype) {
3928 if (!has_chunk_free_list(mdtype)) {
3929 return MetaspaceChunkFreeListSummary();
3930 }
3931
3932 const ChunkManager* cm = Metaspace::get_chunk_manager(mdtype);
3933 return cm->chunk_free_list_summary();
3934 }
3935
3936 void MetaspaceUtils::print_metaspace_change(size_t prev_metadata_used) {
3937 log_info(gc, metaspace)("Metaspace: " SIZE_FORMAT "K->" SIZE_FORMAT "K(" SIZE_FORMAT "K)",
3938 prev_metadata_used/K, used_bytes()/K, reserved_bytes()/K);
3939 }
3940
3941 void MetaspaceUtils::print_on(outputStream* out) {
3942 Metaspace::MetadataType nct = Metaspace::NonClassType;
3943
3944 out->print_cr(" Metaspace "
3945 "used " SIZE_FORMAT "K, "
3946 "capacity " SIZE_FORMAT "K, "
3947 "committed " SIZE_FORMAT "K, "
3948 "reserved " SIZE_FORMAT "K",
3949 used_bytes()/K,
3950 capacity_bytes()/K,
3951 committed_bytes()/K,
3952 reserved_bytes()/K);
3953
3954 if (Metaspace::using_class_space()) {
3955 Metaspace::MetadataType ct = Metaspace::ClassType;
3956 out->print_cr(" class space "
3957 "used " SIZE_FORMAT "K, "
3958 "capacity " SIZE_FORMAT "K, "
3959 "committed " SIZE_FORMAT "K, "
3960 "reserved " SIZE_FORMAT "K",
3961 used_bytes(ct)/K,
3962 capacity_bytes(ct)/K,
3963 committed_bytes(ct)/K,
3964 reserved_bytes(ct)/K);
3965 }
3966 }
3967
3968 // Print information for class space and data space separately.
3969 // This is almost the same as above.
3970 void MetaspaceUtils::print_on(outputStream* out, Metaspace::MetadataType mdtype) {
3971 size_t free_chunks_capacity_bytes = free_chunks_total_bytes(mdtype);
3972 size_t capacity_bytes = capacity_bytes_slow(mdtype);
3973 size_t used_bytes = used_bytes_slow(mdtype);
3974 size_t free_bytes = free_bytes_slow(mdtype);
3975 size_t used_and_free = used_bytes + free_bytes +
3976 free_chunks_capacity_bytes;
3977 out->print_cr(" Chunk accounting: (used in chunks " SIZE_FORMAT
3978 "K + unused in chunks " SIZE_FORMAT "K + "
3979 " capacity in free chunks " SIZE_FORMAT "K) = " SIZE_FORMAT
3980 "K capacity in allocated chunks " SIZE_FORMAT "K",
3981 used_bytes / K,
3982 free_bytes / K,
3983 free_chunks_capacity_bytes / K,
3984 used_and_free / K,
3985 capacity_bytes / K);
3986 // Accounting can only be correct if we got the values during a safepoint
3987 assert(!SafepointSynchronize::is_at_safepoint() || used_and_free == capacity_bytes, "Accounting is wrong");
3988 }
3989
3990 // Print total fragmentation for class metaspaces
3991 void MetaspaceUtils::print_class_waste(outputStream* out) {
3992 assert(Metaspace::using_class_space(), "class metaspace not used");
3993 size_t cls_specialized_waste = 0, cls_small_waste = 0, cls_medium_waste = 0;
3994 size_t cls_specialized_count = 0, cls_small_count = 0, cls_medium_count = 0, cls_humongous_count = 0;
3995 ClassLoaderDataGraphMetaspaceIterator iter;
3996 while (iter.repeat()) {
3997 Metaspace* msp = iter.get_next();
3998 if (msp != NULL) {
3999 cls_specialized_waste += msp->class_vsm()->sum_waste_in_chunks_in_use(SpecializedIndex);
4000 cls_specialized_count += msp->class_vsm()->sum_count_in_chunks_in_use(SpecializedIndex);
4001 cls_small_waste += msp->class_vsm()->sum_waste_in_chunks_in_use(SmallIndex);
4002 cls_small_count += msp->class_vsm()->sum_count_in_chunks_in_use(SmallIndex);
4003 cls_medium_waste += msp->class_vsm()->sum_waste_in_chunks_in_use(MediumIndex);
4004 cls_medium_count += msp->class_vsm()->sum_count_in_chunks_in_use(MediumIndex);
4005 cls_humongous_count += msp->class_vsm()->sum_count_in_chunks_in_use(HumongousIndex);
4006 }
4007 }
4008 out->print_cr(" class: " SIZE_FORMAT " specialized(s) " SIZE_FORMAT ", "
4009 SIZE_FORMAT " small(s) " SIZE_FORMAT ", "
4010 SIZE_FORMAT " medium(s) " SIZE_FORMAT ", "
4011 "large count " SIZE_FORMAT,
4012 cls_specialized_count, cls_specialized_waste,
4013 cls_small_count, cls_small_waste,
4014 cls_medium_count, cls_medium_waste, cls_humongous_count);
4015 }
4016
4017 // Print total fragmentation for data and class metaspaces separately
4018 void MetaspaceUtils::print_waste(outputStream* out) {
4019 size_t specialized_waste = 0, small_waste = 0, medium_waste = 0;
4020 size_t specialized_count = 0, small_count = 0, medium_count = 0, humongous_count = 0;
4021
4022 ClassLoaderDataGraphMetaspaceIterator iter;
4023 while (iter.repeat()) {
4024 Metaspace* msp = iter.get_next();
4025 if (msp != NULL) {
4026 specialized_waste += msp->vsm()->sum_waste_in_chunks_in_use(SpecializedIndex);
4027 specialized_count += msp->vsm()->sum_count_in_chunks_in_use(SpecializedIndex);
4028 small_waste += msp->vsm()->sum_waste_in_chunks_in_use(SmallIndex);
4029 small_count += msp->vsm()->sum_count_in_chunks_in_use(SmallIndex);
4030 medium_waste += msp->vsm()->sum_waste_in_chunks_in_use(MediumIndex);
4031 medium_count += msp->vsm()->sum_count_in_chunks_in_use(MediumIndex);
4032 humongous_count += msp->vsm()->sum_count_in_chunks_in_use(HumongousIndex);
4033 }
4034 }
4035 out->print_cr("Total fragmentation waste (words) doesn't count free space");
4036 out->print_cr(" data: " SIZE_FORMAT " specialized(s) " SIZE_FORMAT ", "
4037 SIZE_FORMAT " small(s) " SIZE_FORMAT ", "
4038 SIZE_FORMAT " medium(s) " SIZE_FORMAT ", "
4191 _total_class.print_on(_out, _scale);
4192 }
4193 _out->cr();
4194
4195 MetadataStats total_anon;
4196 total_anon.add(_total_anon_metadata);
4197 total_anon.add(_total_anon_class);
4198
4199 _out->print("For anonymous classes=" SIZE_FORMAT_W(6) " ", _total_anon_count);
4200 total_anon.print_on(_out, _scale);
4201
4202 _out->print(" Metadata ");
4203 _total_anon_metadata.print_on(_out, _scale);
4204
4205 if (Metaspace::using_class_space()) {
4206 _out->print(" Class data ");
4207 _total_anon_class.print_on(_out, _scale);
4208 }
4209 }
4210
4211 void MetaspaceUtils::print_metadata_for_nmt(outputStream* out, size_t scale) {
4212 const char* unit = scale_unit(scale);
4213 out->print_cr("Metaspaces:");
4214 out->print_cr(" Metadata space: reserved=" SIZE_FORMAT_W(10) "%s committed=" SIZE_FORMAT_W(10) "%s",
4215 reserved_bytes(Metaspace::NonClassType) / scale, unit,
4216 committed_bytes(Metaspace::NonClassType) / scale, unit);
4217 if (Metaspace::using_class_space()) {
4218 out->print_cr(" Class space: reserved=" SIZE_FORMAT_W(10) "%s committed=" SIZE_FORMAT_W(10) "%s",
4219 reserved_bytes(Metaspace::ClassType) / scale, unit,
4220 committed_bytes(Metaspace::ClassType) / scale, unit);
4221 }
4222
4223 out->cr();
4224 ChunkManager::print_all_chunkmanagers(out, scale);
4225
4226 out->cr();
4227 out->print_cr("Per-classloader metadata:");
4228 out->cr();
4229
4230 PrintCLDMetaspaceInfoClosure cl(out, scale);
4231 ClassLoaderDataGraph::cld_do(&cl);
4232 }
4233
4234
4235 // Dump global metaspace things from the end of ClassLoaderDataGraph
4236 void MetaspaceUtils::dump(outputStream* out) {
4237 out->print_cr("All Metaspace:");
4238 out->print("data space: "); print_on(out, Metaspace::NonClassType);
4239 out->print("class space: "); print_on(out, Metaspace::ClassType);
4240 print_waste(out);
4241 }
4242
4243 // Prints an ASCII representation of the given space.
4244 void MetaspaceUtils::print_metaspace_map(outputStream* out, Metaspace::MetadataType mdtype) {
4245 MutexLockerEx cl(SpaceManager::expand_lock(), Mutex::_no_safepoint_check_flag);
4246 const bool for_class = mdtype == Metaspace::ClassType ? true : false;
4247 VirtualSpaceList* const vsl = for_class ? Metaspace::class_space_list() : Metaspace::space_list();
4248 if (vsl != NULL) {
4249 if (for_class) {
4250 if (!Metaspace::using_class_space()) {
4251 out->print_cr("No Class Space.");
4252 return;
4253 }
4254 out->print_raw("---- Metaspace Map (Class Space) ----");
4255 } else {
4256 out->print_raw("---- Metaspace Map (Non-Class Space) ----");
4257 }
4258 // Print legend:
4259 out->cr();
4260 out->print_cr("Chunk Types (uppercase chunks are in use): x-specialized, s-small, m-medium, h-humongous.");
4261 out->cr();
4262 VirtualSpaceList* const vsl = for_class ? Metaspace::class_space_list() : Metaspace::space_list();
4263 vsl->print_map(out);
4264 out->cr();
4265 }
4266 }
4267
4268 void MetaspaceUtils::verify_free_chunks() {
4269 Metaspace::chunk_manager_metadata()->verify();
4270 if (Metaspace::using_class_space()) {
4271 Metaspace::chunk_manager_class()->verify();
4272 }
4273 }
4274
4275 void MetaspaceUtils::verify_capacity() {
4276 #ifdef ASSERT
4277 size_t running_sum_capacity_bytes = capacity_bytes();
4278 // For purposes of the running sum of capacity, verify against capacity
4279 size_t capacity_in_use_bytes = capacity_bytes_slow();
4280 assert(running_sum_capacity_bytes == capacity_in_use_bytes,
4281 "capacity_words() * BytesPerWord " SIZE_FORMAT
4282 " capacity_bytes_slow()" SIZE_FORMAT,
4283 running_sum_capacity_bytes, capacity_in_use_bytes);
4284 for (Metaspace::MetadataType i = Metaspace::ClassType;
4285 i < Metaspace:: MetadataTypeCount;
4286 i = (Metaspace::MetadataType)(i + 1)) {
4287 size_t capacity_in_use_bytes = capacity_bytes_slow(i);
4288 assert(capacity_bytes(i) == capacity_in_use_bytes,
4289 "capacity_bytes(%u) " SIZE_FORMAT
4290 " capacity_bytes_slow(%u)" SIZE_FORMAT,
4291 i, capacity_bytes(i), i, capacity_in_use_bytes);
4292 }
4293 #endif
4294 }
4295
4296 void MetaspaceUtils::verify_used() {
4297 #ifdef ASSERT
4298 size_t running_sum_used_bytes = used_bytes();
4299 // For purposes of the running sum of used, verify against used
4300 size_t used_in_use_bytes = used_bytes_slow();
4301 assert(used_bytes() == used_in_use_bytes,
4302 "used_bytes() " SIZE_FORMAT
4303 " used_bytes_slow()" SIZE_FORMAT,
4304 used_bytes(), used_in_use_bytes);
4305 for (Metaspace::MetadataType i = Metaspace::ClassType;
4306 i < Metaspace:: MetadataTypeCount;
4307 i = (Metaspace::MetadataType)(i + 1)) {
4308 size_t used_in_use_bytes = used_bytes_slow(i);
4309 assert(used_bytes(i) == used_in_use_bytes,
4310 "used_bytes(%u) " SIZE_FORMAT
4311 " used_bytes_slow(%u)" SIZE_FORMAT,
4312 i, used_bytes(i), i, used_in_use_bytes);
4313 }
4314 #endif
4315 }
4316
4317 void MetaspaceUtils::verify_metrics() {
4318 verify_capacity();
4319 verify_used();
4320 }
4321
4322
4323 // Metaspace methods
4324
4325 size_t Metaspace::_first_chunk_word_size = 0;
4326 size_t Metaspace::_first_class_chunk_word_size = 0;
4327
4328 size_t Metaspace::_commit_alignment = 0;
4329 size_t Metaspace::_reserve_alignment = 0;
4330
4331 Metaspace::Metaspace(Mutex* lock, MetaspaceType type) {
4332 initialize(lock, type);
4333 }
4334
4335 Metaspace::~Metaspace() {
4336 delete _vsm;
4337 if (using_class_space()) {
4873 assert(using_class_space(), "Has to use class space");
4874 return class_vsm()->calc_chunk_size(word_size);
4875 }
4876
4877 void Metaspace::report_metadata_oome(ClassLoaderData* loader_data, size_t word_size, MetaspaceObj::Type type, MetadataType mdtype, TRAPS) {
4878 tracer()->report_metadata_oom(loader_data, word_size, type, mdtype);
4879
4880 // If result is still null, we are out of memory.
4881 Log(gc, metaspace, freelist) log;
4882 if (log.is_info()) {
4883 log.info("Metaspace (%s) allocation failed for size " SIZE_FORMAT,
4884 is_class_space_allocation(mdtype) ? "class" : "data", word_size);
4885 ResourceMark rm;
4886 if (log.is_debug()) {
4887 if (loader_data->metaspace_or_null() != NULL) {
4888 LogStream ls(log.debug());
4889 loader_data->dump(&ls);
4890 }
4891 }
4892 LogStream ls(log.info());
4893 MetaspaceUtils::dump(&ls);
4894 MetaspaceUtils::print_metaspace_map(&ls, mdtype);
4895 ChunkManager::print_all_chunkmanagers(&ls);
4896 }
4897
4898 bool out_of_compressed_class_space = false;
4899 if (is_class_space_allocation(mdtype)) {
4900 Metaspace* metaspace = loader_data->metaspace_non_null();
4901 out_of_compressed_class_space =
4902 MetaspaceUtils::committed_bytes(Metaspace::ClassType) +
4903 (metaspace->class_chunk_size(word_size) * BytesPerWord) >
4904 CompressedClassSpaceSize;
4905 }
4906
4907 // -XX:+HeapDumpOnOutOfMemoryError and -XX:OnOutOfMemoryError support
4908 const char* space_string = out_of_compressed_class_space ?
4909 "Compressed class space" : "Metaspace";
4910
4911 report_java_out_of_memory(space_string);
4912
4913 if (JvmtiExport::should_post_resource_exhausted()) {
4914 JvmtiExport::post_resource_exhausted(
4915 JVMTI_RESOURCE_EXHAUSTED_OOM_ERROR,
4916 space_string);
4917 }
4918
4919 if (!is_init_completed()) {
4920 vm_exit_during_initialization("OutOfMemoryError", space_string);
4921 }
4922
4996 guarantee(chunk != NULL, "Sanity");
4997 // Verify chunk itself; then verify that it is consistent with the
4998 // occupany map of its containing node.
4999 chunk->verify();
5000 VirtualSpaceNode* const vsn = chunk->container();
5001 OccupancyMap* const ocmap = vsn->occupancy_map();
5002 ocmap->verify_for_chunk(chunk);
5003 }
5004 #endif
5005
5006 static void do_update_in_use_info_for_chunk(Metachunk* chunk, bool inuse) {
5007 chunk->set_is_tagged_free(!inuse);
5008 OccupancyMap* const ocmap = chunk->container()->occupancy_map();
5009 ocmap->set_region_in_use((MetaWord*)chunk, chunk->word_size(), inuse);
5010 }
5011
5012 /////////////// Unit tests ///////////////
5013
5014 #ifndef PRODUCT
5015
5016 class TestMetaspaceUtilsTest : AllStatic {
5017 public:
5018 static void test_reserved() {
5019 size_t reserved = MetaspaceUtils::reserved_bytes();
5020
5021 assert(reserved > 0, "assert");
5022
5023 size_t committed = MetaspaceUtils::committed_bytes();
5024 assert(committed <= reserved, "assert");
5025
5026 size_t reserved_metadata = MetaspaceUtils::reserved_bytes(Metaspace::NonClassType);
5027 assert(reserved_metadata > 0, "assert");
5028 assert(reserved_metadata <= reserved, "assert");
5029
5030 if (UseCompressedClassPointers) {
5031 size_t reserved_class = MetaspaceUtils::reserved_bytes(Metaspace::ClassType);
5032 assert(reserved_class > 0, "assert");
5033 assert(reserved_class < reserved, "assert");
5034 }
5035 }
5036
5037 static void test_committed() {
5038 size_t committed = MetaspaceUtils::committed_bytes();
5039
5040 assert(committed > 0, "assert");
5041
5042 size_t reserved = MetaspaceUtils::reserved_bytes();
5043 assert(committed <= reserved, "assert");
5044
5045 size_t committed_metadata = MetaspaceUtils::committed_bytes(Metaspace::NonClassType);
5046 assert(committed_metadata > 0, "assert");
5047 assert(committed_metadata <= committed, "assert");
5048
5049 if (UseCompressedClassPointers) {
5050 size_t committed_class = MetaspaceUtils::committed_bytes(Metaspace::ClassType);
5051 assert(committed_class > 0, "assert");
5052 assert(committed_class < committed, "assert");
5053 }
5054 }
5055
5056 static void test_virtual_space_list_large_chunk() {
5057 VirtualSpaceList* vs_list = new VirtualSpaceList(os::vm_allocation_granularity());
5058 MutexLockerEx cl(SpaceManager::expand_lock(), Mutex::_no_safepoint_check_flag);
5059 // A size larger than VirtualSpaceSize (256k) and add one page to make it _not_ be
5060 // vm_allocation_granularity aligned on Windows.
5061 size_t large_size = (size_t)(2*256*K + (os::vm_page_size()/BytesPerWord));
5062 large_size += (os::vm_page_size()/BytesPerWord);
5063 vs_list->get_new_chunk(large_size, 0);
5064 }
5065
5066 static void test() {
5067 test_reserved();
5068 test_committed();
5069 test_virtual_space_list_large_chunk();
5070 }
5071 };
5072
5073 void TestMetaspaceUtils_test() {
5074 TestMetaspaceUtilsTest::test();
5075 }
5076
5077 class TestVirtualSpaceNodeTest {
5078 static void chunk_up(size_t words_left, size_t& num_medium_chunks,
5079 size_t& num_small_chunks,
5080 size_t& num_specialized_chunks) {
5081 num_medium_chunks = words_left / MediumChunk;
5082 words_left = words_left % MediumChunk;
5083
5084 num_small_chunks = words_left / SmallChunk;
5085 words_left = words_left % SmallChunk;
5086 // how many specialized chunks can we get?
5087 num_specialized_chunks = words_left / SpecializedChunk;
5088 assert(words_left % SpecializedChunk == 0, "should be nothing left");
5089 }
5090
5091 public:
5092 static void test() {
5093 MutexLockerEx ml(SpaceManager::expand_lock(), Mutex::_no_safepoint_check_flag);
5094 const size_t vsn_test_size_words = MediumChunk * 4;
|