514 _current_virtual_space = v;
515 }
516
517 void link_vs(VirtualSpaceNode* new_entry);
518
519 // Get another virtual space and add it to the list. This
520 // is typically prompted by a failed attempt to allocate a chunk
521 // and is typically followed by the allocation of a chunk.
522 bool create_new_virtual_space(size_t vs_word_size);
523
524 // Chunk up the unused committed space in the current
525 // virtual space and add the chunks to the free list.
526 void retire_current_virtual_space();
527
528 public:
529 VirtualSpaceList(size_t word_size);
530 VirtualSpaceList(ReservedSpace rs);
531
532 size_t free_bytes();
533
534 Metachunk* get_new_chunk(size_t word_size,
535 size_t grow_chunks_by_words,
536 size_t medium_chunk_bunch);
537
538 bool expand_node_by(VirtualSpaceNode* node,
539 size_t min_words,
540 size_t preferred_words);
541
542 bool expand_by(size_t min_words,
543 size_t preferred_words);
544
545 VirtualSpaceNode* current_virtual_space() {
546 return _current_virtual_space;
547 }
548
549 bool is_class() const { return _is_class; }
550
551 bool initialization_succeeded() { return _virtual_space_list != NULL; }
552
553 size_t reserved_words() { return _reserved_words; }
554 size_t reserved_bytes() { return reserved_words() * BytesPerWord; }
555 size_t committed_words() { return _committed_words; }
556 size_t committed_bytes() { return committed_words() * BytesPerWord; }
670 // Add chunk to the list of chunks in use
671 void add_chunk(Metachunk* v, bool make_current);
672 void retire_current_chunk();
673
674 Mutex* lock() const { return _lock; }
675
676 const char* chunk_size_name(ChunkIndex index) const;
677
678 protected:
679 void initialize();
680
681 public:
682 SpaceManager(Metaspace::MetadataType mdtype,
683 Mutex* lock);
684 ~SpaceManager();
685
686 enum ChunkMultiples {
687 MediumChunkMultiple = 4
688 };
689
690 bool is_class() { return _mdtype == Metaspace::ClassType; }
691
692 // Accessors
693 size_t specialized_chunk_size() { return (size_t) is_class() ? ClassSpecializedChunk : SpecializedChunk; }
694 size_t small_chunk_size() { return (size_t) is_class() ? ClassSmallChunk : SmallChunk; }
695 size_t medium_chunk_size() { return (size_t) is_class() ? ClassMediumChunk : MediumChunk; }
696 size_t medium_chunk_bunch() { return medium_chunk_size() * MediumChunkMultiple; }
697
698 size_t smallest_chunk_size() { return specialized_chunk_size(); }
699
700 size_t allocated_blocks_words() const { return _allocated_blocks_words; }
701 size_t allocated_blocks_bytes() const { return _allocated_blocks_words * BytesPerWord; }
702 size_t allocated_chunks_words() const { return _allocated_chunks_words; }
703 size_t allocated_chunks_count() const { return _allocated_chunks_count; }
704
705 bool is_humongous(size_t word_size) { return word_size > medium_chunk_size(); }
706
707 static Mutex* expand_lock() { return _expand_lock; }
708
709 // Increment the per Metaspace and global running sums for Metachunks
710 // by the given size. This is used when a Metachunk to added to
711 // the in-use list.
712 void inc_size_metrics(size_t words);
713 // Increment the per Metaspace and global running sums Metablocks by the given
714 // size. This is used when a Metablock is allocated.
715 void inc_used_metrics(size_t words);
716 // Delete the portion of the running sums for this SpaceManager. That is,
717 // the globals running sums for the Metachunks and Metablocks are
718 // decremented for all the Metachunks in-use by this SpaceManager.
719 void dec_total_from_size_metrics();
720
721 // Set the sizes for the initial chunks.
722 void get_initial_chunk_sizes(Metaspace::MetaspaceType type,
723 size_t* chunk_word_size,
724 size_t* class_chunk_word_size);
725
726 size_t sum_capacity_in_chunks_in_use() const;
727 size_t sum_used_in_chunks_in_use() const;
728 size_t sum_free_in_chunks_in_use() const;
729 size_t sum_waste_in_chunks_in_use() const;
730 size_t sum_waste_in_chunks_in_use(ChunkIndex index ) const;
731
732 size_t sum_count_in_chunks_in_use();
733 size_t sum_count_in_chunks_in_use(ChunkIndex i);
734
735 Metachunk* get_new_chunk(size_t word_size, size_t grow_chunks_by_words);
736
737 // Block allocation and deallocation.
738 // Allocates a block from the current chunk
739 MetaWord* allocate(size_t word_size);
740
741 // Helper for allocations
742 MetaWord* allocate_work(size_t word_size);
743
744 // Returns a block to the per manager freelist
745 void deallocate(MetaWord* p, size_t word_size);
746
747 // Based on the allocation size and a minimum chunk size,
748 // returned chunk size (for expanding space for chunk allocation).
749 size_t calc_chunk_size(size_t allocation_word_size);
750
751 // Called when an allocation from the current chunk fails.
752 // Gets a new chunk (may require getting a new virtual space),
753 // and allocates from that chunk.
754 MetaWord* grow_and_allocate(size_t word_size);
755
1302 size_t grow_vs_words = MAX2((size_t)VirtualSpaceSize, preferred_words);
1303 grow_vs_words = align_size_up(grow_vs_words, Metaspace::reserve_alignment_words());
1304
1305 if (create_new_virtual_space(grow_vs_words)) {
1306 if (current_virtual_space()->is_pre_committed()) {
1307 // The memory was pre-committed, so we are done here.
1308 assert(min_words <= current_virtual_space()->committed_words(),
1309 "The new VirtualSpace was pre-committed, so it"
1310 "should be large enough to fit the alloc request.");
1311 return true;
1312 }
1313
1314 return expand_node_by(current_virtual_space(),
1315 min_words,
1316 max_expansion_words);
1317 }
1318
1319 return false;
1320 }
1321
1322 Metachunk* VirtualSpaceList::get_new_chunk(size_t word_size,
1323 size_t grow_chunks_by_words,
1324 size_t medium_chunk_bunch) {
1325
1326 // Allocate a chunk out of the current virtual space.
1327 Metachunk* next = current_virtual_space()->get_chunk_vs(grow_chunks_by_words);
1328
1329 if (next != NULL) {
1330 return next;
1331 }
1332
1333 // The expand amount is currently only determined by the requested sizes
1334 // and not how much committed memory is left in the current virtual space.
1335
1336 size_t min_word_size = align_size_up(grow_chunks_by_words, Metaspace::commit_alignment_words());
1337 size_t preferred_word_size = align_size_up(medium_chunk_bunch, Metaspace::commit_alignment_words());
1338 if (min_word_size >= preferred_word_size) {
1339 // Can happen when humongous chunks are allocated.
1340 preferred_word_size = min_word_size;
1341 }
1342
1343 bool expanded = expand_by(min_word_size, preferred_word_size);
1344 if (expanded) {
1345 next = current_virtual_space()->get_chunk_vs(grow_chunks_by_words);
1346 assert(next != NULL, "The allocation was expected to succeed after the expansion");
1347 }
1348
1349 return next;
1350 }
1351
1352 void VirtualSpaceList::print_on(outputStream* st) const {
1353 if (TraceMetadataChunkAllocation && Verbose) {
1354 VirtualSpaceListIterator iter(virtual_space_list());
1355 while (iter.repeat()) {
1356 VirtualSpaceNode* node = iter.get_next();
1357 node->print_on(st);
1358 }
1359 }
1360 }
1361
1362 // MetaspaceGC methods
1363
1364 // VM_CollectForMetadataAllocation is the vm operation used to GC.
1365 // Within the VM operation after the GC the attempt to allocate the metadata
1866 } else {
1867 list_count = humongous_dictionary()->total_count();
1868 }
1869 gclog_or_tty->print("ChunkManager::chunk_freelist_allocate: " PTR_FORMAT " chunk "
1870 PTR_FORMAT " size " SIZE_FORMAT " count " SIZE_FORMAT " ",
1871 this, chunk, chunk->word_size(), list_count);
1872 locked_print_free_chunks(gclog_or_tty);
1873 }
1874
1875 return chunk;
1876 }
1877
1878 void ChunkManager::print_on(outputStream* out) const {
1879 if (PrintFLSStatistics != 0) {
1880 const_cast<ChunkManager *>(this)->humongous_dictionary()->report_statistics();
1881 }
1882 }
1883
1884 // SpaceManager methods
1885
1886 void SpaceManager::get_initial_chunk_sizes(Metaspace::MetaspaceType type,
1887 size_t* chunk_word_size,
1888 size_t* class_chunk_word_size) {
1889 switch (type) {
1890 case Metaspace::BootMetaspaceType:
1891 *chunk_word_size = Metaspace::first_chunk_word_size();
1892 *class_chunk_word_size = Metaspace::first_class_chunk_word_size();
1893 break;
1894 case Metaspace::ROMetaspaceType:
1895 *chunk_word_size = SharedReadOnlySize / wordSize;
1896 *class_chunk_word_size = ClassSpecializedChunk;
1897 break;
1898 case Metaspace::ReadWriteMetaspaceType:
1899 *chunk_word_size = SharedReadWriteSize / wordSize;
1900 *class_chunk_word_size = ClassSpecializedChunk;
1901 break;
1902 case Metaspace::AnonymousMetaspaceType:
1903 case Metaspace::ReflectionMetaspaceType:
1904 *chunk_word_size = SpecializedChunk;
1905 *class_chunk_word_size = ClassSpecializedChunk;
1906 break;
1907 default:
1908 *chunk_word_size = SmallChunk;
1909 *class_chunk_word_size = ClassSmallChunk;
1910 break;
1911 }
1912 assert(*chunk_word_size != 0 && *class_chunk_word_size != 0,
1913 err_msg("Initial chunks sizes bad: data " SIZE_FORMAT
1914 " class " SIZE_FORMAT,
1915 *chunk_word_size, *class_chunk_word_size));
1916 }
1917
1918 size_t SpaceManager::sum_free_in_chunks_in_use() const {
1919 MutexLockerEx cl(lock(), Mutex::_no_safepoint_check_flag);
1920 size_t free = 0;
1921 for (ChunkIndex i = ZeroIndex; i < NumberOfInUseLists; i = next_chunk_index(i)) {
1922 Metachunk* chunk = chunks_in_use(i);
1923 while (chunk != NULL) {
1924 free += chunk->free_word_size();
1925 chunk = chunk->next();
1926 }
1927 }
1928 return free;
1929 }
1930
1931 size_t SpaceManager::sum_waste_in_chunks_in_use() const {
1932 MutexLockerEx cl(lock(), Mutex::_no_safepoint_check_flag);
1933 size_t result = 0;
1934 for (ChunkIndex i = ZeroIndex; i < NumberOfInUseLists; i = next_chunk_index(i)) {
1935 result += sum_waste_in_chunks_in_use(i);
2085 "Should have been set");
2086 assert(current_chunk() == NULL ||
2087 current_chunk()->allocate(word_size) == NULL,
2088 "Don't need to expand");
2089 MutexLockerEx cl(SpaceManager::expand_lock(), Mutex::_no_safepoint_check_flag);
2090
2091 if (TraceMetadataChunkAllocation && Verbose) {
2092 size_t words_left = 0;
2093 size_t words_used = 0;
2094 if (current_chunk() != NULL) {
2095 words_left = current_chunk()->free_word_size();
2096 words_used = current_chunk()->used_word_size();
2097 }
2098 gclog_or_tty->print_cr("SpaceManager::grow_and_allocate for " SIZE_FORMAT
2099 " words " SIZE_FORMAT " words used " SIZE_FORMAT
2100 " words left",
2101 word_size, words_used, words_left);
2102 }
2103
2104 // Get another chunk out of the virtual space
2105 size_t grow_chunks_by_words = calc_chunk_size(word_size);
2106 Metachunk* next = get_new_chunk(word_size, grow_chunks_by_words);
2107
2108 MetaWord* mem = NULL;
2109
2110 // If a chunk was available, add it to the in-use chunk list
2111 // and do an allocation from it.
2112 if (next != NULL) {
2113 // Add to this manager's list of chunks in use.
2114 add_chunk(next, false);
2115 mem = next->allocate(word_size);
2116 }
2117
2118 // Track metaspace memory usage statistic.
2119 track_metaspace_memory_usage();
2120
2121 return mem;
2122 }
2123
2124 void SpaceManager::print_on(outputStream* st) const {
2125
2126 for (ChunkIndex i = ZeroIndex;
2395
2396 assert(new_chunk->is_empty(), "Not ready for reuse");
2397 if (TraceMetadataChunkAllocation && Verbose) {
2398 gclog_or_tty->print("SpaceManager::add_chunk: %d) ",
2399 sum_count_in_chunks_in_use());
2400 new_chunk->print_on(gclog_or_tty);
2401 chunk_manager()->locked_print_free_chunks(gclog_or_tty);
2402 }
2403 }
2404
2405 void SpaceManager::retire_current_chunk() {
2406 if (current_chunk() != NULL) {
2407 size_t remaining_words = current_chunk()->free_word_size();
2408 if (remaining_words >= TreeChunk<Metablock, FreeList<Metablock> >::min_size()) {
2409 block_freelists()->return_block(current_chunk()->allocate(remaining_words), remaining_words);
2410 inc_used_metrics(remaining_words);
2411 }
2412 }
2413 }
2414
2415 Metachunk* SpaceManager::get_new_chunk(size_t word_size,
2416 size_t grow_chunks_by_words) {
2417 // Get a chunk from the chunk freelist
2418 Metachunk* next = chunk_manager()->chunk_freelist_allocate(grow_chunks_by_words);
2419
2420 if (next == NULL) {
2421 next = vs_list()->get_new_chunk(word_size,
2422 grow_chunks_by_words,
2423 medium_chunk_bunch());
2424 }
2425
2426 if (TraceMetadataHumongousAllocation && next != NULL &&
2427 SpaceManager::is_humongous(next->word_size())) {
2428 gclog_or_tty->print_cr(" new humongous chunk word size "
2429 PTR_FORMAT, next->word_size());
2430 }
2431
2432 return next;
2433 }
2434
2435 MetaWord* SpaceManager::allocate(size_t word_size) {
2436 MutexLockerEx cl(lock(), Mutex::_no_safepoint_check_flag);
2437
2438 size_t raw_word_size = get_raw_word_size(word_size);
2439 BlockFreelist* fl = block_freelists();
2440 MetaWord* p = NULL;
2441 // Allocation from the dictionary is expensive in the sense that
2442 // the dictionary has to be searched for a size. Don't allocate
3068 UseSharedSpaces ? (address)cds_base : 0);
3069
3070 initialize_class_space(metaspace_rs);
3071
3072 if (PrintCompressedOopsMode || (PrintMiscellaneous && Verbose)) {
3073 gclog_or_tty->print_cr("Narrow klass base: " PTR_FORMAT ", Narrow klass shift: " SIZE_FORMAT,
3074 Universe::narrow_klass_base(), Universe::narrow_klass_shift());
3075 gclog_or_tty->print_cr("Compressed class space size: " SIZE_FORMAT " Address: " PTR_FORMAT " Req Addr: " PTR_FORMAT,
3076 compressed_class_space_size(), metaspace_rs.base(), requested_addr);
3077 }
3078 }
3079
3080 // For UseCompressedClassPointers the class space is reserved above the top of
3081 // the Java heap. The argument passed in is at the base of the compressed space.
3082 void Metaspace::initialize_class_space(ReservedSpace rs) {
3083 // The reserved space size may be bigger because of alignment, esp with UseLargePages
3084 assert(rs.size() >= CompressedClassSpaceSize,
3085 err_msg(SIZE_FORMAT " != " UINTX_FORMAT, rs.size(), CompressedClassSpaceSize));
3086 assert(using_class_space(), "Must be using class space");
3087 _class_space_list = new VirtualSpaceList(rs);
3088 _chunk_manager_class = new ChunkManager(SpecializedChunk, ClassSmallChunk, ClassMediumChunk);
3089
3090 if (!_class_space_list->initialization_succeeded()) {
3091 vm_exit_during_initialization("Failed to setup compressed class space virtual space list.");
3092 }
3093 }
3094
3095 #endif
3096
3097 void Metaspace::ergo_initialize() {
3098 if (DumpSharedSpaces) {
3099 // Using large pages when dumping the shared archive is currently not implemented.
3100 FLAG_SET_ERGO(bool, UseLargePagesInMetaspace, false);
3101 }
3102
3103 size_t page_size = os::vm_page_size();
3104 if (UseLargePages && UseLargePagesInMetaspace) {
3105 page_size = os::large_page_size();
3106 }
3107
3108 _commit_alignment = page_size;
3269 // of the boot class loader size.
3270 size_t word_size = VIRTUALSPACEMULTIPLIER * _first_chunk_word_size;
3271 word_size = align_size_up(word_size, Metaspace::reserve_alignment_words());
3272
3273 // Initialize the list of virtual spaces.
3274 _space_list = new VirtualSpaceList(word_size);
3275 _chunk_manager_metadata = new ChunkManager(SpecializedChunk, SmallChunk, MediumChunk);
3276
3277 if (!_space_list->initialization_succeeded()) {
3278 vm_exit_during_initialization("Unable to setup metadata virtual space list.", NULL);
3279 }
3280 }
3281
3282 _tracer = new MetaspaceTracer();
3283 }
3284
3285 void Metaspace::post_initialize() {
3286 MetaspaceGC::post_initialize();
3287 }
3288
3289 Metachunk* Metaspace::get_initialization_chunk(MetadataType mdtype,
3290 size_t chunk_word_size,
3291 size_t chunk_bunch) {
3292 // Get a chunk from the chunk freelist
3293 Metachunk* chunk = get_chunk_manager(mdtype)->chunk_freelist_allocate(chunk_word_size);
3294 if (chunk != NULL) {
3295 return chunk;
3296 }
3297
3298 return get_space_list(mdtype)->get_new_chunk(chunk_word_size, chunk_word_size, chunk_bunch);
3299 }
3300
3301 void Metaspace::initialize(Mutex* lock, MetaspaceType type) {
3302
3303 assert(space_list() != NULL,
3304 "Metadata VirtualSpaceList has not been initialized");
3305 assert(chunk_manager_metadata() != NULL,
3306 "Metadata ChunkManager has not been initialized");
3307
3308 _vsm = new SpaceManager(NonClassType, lock);
3309 if (_vsm == NULL) {
3310 return;
3311 }
3312 size_t word_size;
3313 size_t class_word_size;
3314 vsm()->get_initial_chunk_sizes(type, &word_size, &class_word_size);
3315
3316 if (using_class_space()) {
3317 assert(class_space_list() != NULL,
3318 "Class VirtualSpaceList has not been initialized");
3319 assert(chunk_manager_class() != NULL,
3320 "Class ChunkManager has not been initialized");
3321
3322 // Allocate SpaceManager for classes.
3323 _class_vsm = new SpaceManager(ClassType, lock);
3324 if (_class_vsm == NULL) {
3325 return;
3326 }
3327 }
3328
3329 MutexLockerEx cl(SpaceManager::expand_lock(), Mutex::_no_safepoint_check_flag);
3330
3331 // Allocate chunk for metadata objects
3332 Metachunk* new_chunk = get_initialization_chunk(NonClassType,
3333 word_size,
3334 vsm()->medium_chunk_bunch());
3335 assert(!DumpSharedSpaces || new_chunk != NULL, "should have enough space for both chunks");
3336 if (new_chunk != NULL) {
3337 // Add to this manager's list of chunks in use and current_chunk().
3338 vsm()->add_chunk(new_chunk, true);
3339 }
3340
3341 // Allocate chunk for class metadata objects
3342 if (using_class_space()) {
3343 Metachunk* class_chunk = get_initialization_chunk(ClassType,
3344 class_word_size,
3345 class_vsm()->medium_chunk_bunch());
3346 if (class_chunk != NULL) {
3347 class_vsm()->add_chunk(class_chunk, true);
3348 }
3349 }
3350
3351 _alloc_record_head = NULL;
3352 _alloc_record_tail = NULL;
3353 }
3354
3355 size_t Metaspace::align_word_size_up(size_t word_size) {
3356 size_t byte_size = word_size * wordSize;
3357 return ReservedSpace::allocation_align_size_up(byte_size) / wordSize;
3358 }
3359
3360 MetaWord* Metaspace::allocate(size_t word_size, MetadataType mdtype) {
3361 // DumpSharedSpaces doesn't use class metadata area (yet)
3362 // Also, don't use class_vsm() unless UseCompressedClassPointers is true.
3363 if (is_class_space_allocation(mdtype)) {
3364 return class_vsm()->allocate(word_size);
3365 } else {
3366 return vsm()->allocate(word_size);
3367 }
3368 }
3755 assert(committed <= reserved, "assert");
3756
3757 size_t committed_metadata = MetaspaceAux::committed_bytes(Metaspace::NonClassType);
3758 assert(committed_metadata > 0, "assert");
3759 assert(committed_metadata <= committed, "assert");
3760
3761 if (UseCompressedClassPointers) {
3762 size_t committed_class = MetaspaceAux::committed_bytes(Metaspace::ClassType);
3763 assert(committed_class > 0, "assert");
3764 assert(committed_class < committed, "assert");
3765 }
3766 }
3767
3768 static void test_virtual_space_list_large_chunk() {
3769 VirtualSpaceList* vs_list = new VirtualSpaceList(os::vm_allocation_granularity());
3770 MutexLockerEx cl(SpaceManager::expand_lock(), Mutex::_no_safepoint_check_flag);
3771 // A size larger than VirtualSpaceSize (256k) and add one page to make it _not_ be
3772 // vm_allocation_granularity aligned on Windows.
3773 size_t large_size = (size_t)(2*256*K + (os::vm_page_size()/BytesPerWord));
3774 large_size += (os::vm_page_size()/BytesPerWord);
3775 vs_list->get_new_chunk(large_size, large_size, 0);
3776 }
3777
3778 static void test() {
3779 test_reserved();
3780 test_committed();
3781 test_virtual_space_list_large_chunk();
3782 }
3783 };
3784
3785 void TestMetaspaceAux_test() {
3786 TestMetaspaceAuxTest::test();
3787 }
3788
3789 class TestVirtualSpaceNodeTest {
3790 static void chunk_up(size_t words_left, size_t& num_medium_chunks,
3791 size_t& num_small_chunks,
3792 size_t& num_specialized_chunks) {
3793 num_medium_chunks = words_left / MediumChunk;
3794 words_left = words_left % MediumChunk;
3795
3930 void* virtual_space_max = (void*)(uintptr_t)-1;
3931 size_t bottom_to_max = pointer_delta(virtual_space_max, vsn.bottom(), 1);
3932 size_t overflow_size = bottom_to_max + BytesPerWord;
3933 size_t overflow_word_size = overflow_size / BytesPerWord;
3934
3935 // Check that is_available can handle the overflow.
3936 assert_is_available_negative(overflow_word_size);
3937 }
3938
3939 static void test_is_available() {
3940 TestVirtualSpaceNodeTest::test_is_available_positive();
3941 TestVirtualSpaceNodeTest::test_is_available_negative();
3942 TestVirtualSpaceNodeTest::test_is_available_overflow();
3943 }
3944 };
3945
3946 void TestVirtualSpaceNode_test() {
3947 TestVirtualSpaceNodeTest::test();
3948 TestVirtualSpaceNodeTest::test_is_available();
3949 }
3950 #endif
|
514 _current_virtual_space = v;
515 }
516
517 void link_vs(VirtualSpaceNode* new_entry);
518
519 // Get another virtual space and add it to the list. This
520 // is typically prompted by a failed attempt to allocate a chunk
521 // and is typically followed by the allocation of a chunk.
522 bool create_new_virtual_space(size_t vs_word_size);
523
524 // Chunk up the unused committed space in the current
525 // virtual space and add the chunks to the free list.
526 void retire_current_virtual_space();
527
528 public:
529 VirtualSpaceList(size_t word_size);
530 VirtualSpaceList(ReservedSpace rs);
531
532 size_t free_bytes();
533
534 Metachunk* get_new_chunk(size_t chunk_word_size,
535 size_t suggested_commit_granularity);
536
537 bool expand_node_by(VirtualSpaceNode* node,
538 size_t min_words,
539 size_t preferred_words);
540
541 bool expand_by(size_t min_words,
542 size_t preferred_words);
543
544 VirtualSpaceNode* current_virtual_space() {
545 return _current_virtual_space;
546 }
547
548 bool is_class() const { return _is_class; }
549
550 bool initialization_succeeded() { return _virtual_space_list != NULL; }
551
552 size_t reserved_words() { return _reserved_words; }
553 size_t reserved_bytes() { return reserved_words() * BytesPerWord; }
554 size_t committed_words() { return _committed_words; }
555 size_t committed_bytes() { return committed_words() * BytesPerWord; }
669 // Add chunk to the list of chunks in use
670 void add_chunk(Metachunk* v, bool make_current);
671 void retire_current_chunk();
672
673 Mutex* lock() const { return _lock; }
674
675 const char* chunk_size_name(ChunkIndex index) const;
676
677 protected:
678 void initialize();
679
680 public:
681 SpaceManager(Metaspace::MetadataType mdtype,
682 Mutex* lock);
683 ~SpaceManager();
684
685 enum ChunkMultiples {
686 MediumChunkMultiple = 4
687 };
688
689 static size_t specialized_chunk_size(bool is_class) { return is_class ? ClassSpecializedChunk : SpecializedChunk; }
690 static size_t small_chunk_size(bool is_class) { return is_class ? ClassSmallChunk : SmallChunk; }
691 static size_t medium_chunk_size(bool is_class) { return is_class ? ClassMediumChunk : MediumChunk; }
692
693 static size_t smallest_chunk_size(bool is_class) { return specialized_chunk_size(is_class); }
694
695 // Accessors
696 bool is_class() const { return _mdtype == Metaspace::ClassType; }
697
698 size_t specialized_chunk_size() const { return specialized_chunk_size(is_class()); }
699 size_t small_chunk_size() const { return small_chunk_size(is_class()); }
700 size_t medium_chunk_size() const { return medium_chunk_size(is_class()); }
701
702 size_t smallest_chunk_size() const { return smallest_chunk_size(is_class()); }
703
704 size_t medium_chunk_bunch() const { return medium_chunk_size() * MediumChunkMultiple; }
705
706 size_t allocated_blocks_words() const { return _allocated_blocks_words; }
707 size_t allocated_blocks_bytes() const { return _allocated_blocks_words * BytesPerWord; }
708 size_t allocated_chunks_words() const { return _allocated_chunks_words; }
709 size_t allocated_chunks_count() const { return _allocated_chunks_count; }
710
711 bool is_humongous(size_t word_size) { return word_size > medium_chunk_size(); }
712
713 static Mutex* expand_lock() { return _expand_lock; }
714
715 // Increment the per Metaspace and global running sums for Metachunks
716 // by the given size. This is used when a Metachunk to added to
717 // the in-use list.
718 void inc_size_metrics(size_t words);
719 // Increment the per Metaspace and global running sums Metablocks by the given
720 // size. This is used when a Metablock is allocated.
721 void inc_used_metrics(size_t words);
722 // Delete the portion of the running sums for this SpaceManager. That is,
723 // the globals running sums for the Metachunks and Metablocks are
724 // decremented for all the Metachunks in-use by this SpaceManager.
725 void dec_total_from_size_metrics();
726
727 // Adjust the initial chunk size to match one of the fixed chunk list sizes,
728 // or return the unadjusted size if the requested size is humongous.
729 static size_t adjust_initial_chunk_size(size_t requested, bool is_class_space);
730 size_t adjust_initial_chunk_size(size_t requested) const;
731
732 // Get the initial chunks size for this metaspace type.
733 size_t get_initial_chunk_size(Metaspace::MetaspaceType type) const;
734
735 size_t sum_capacity_in_chunks_in_use() const;
736 size_t sum_used_in_chunks_in_use() const;
737 size_t sum_free_in_chunks_in_use() const;
738 size_t sum_waste_in_chunks_in_use() const;
739 size_t sum_waste_in_chunks_in_use(ChunkIndex index ) const;
740
741 size_t sum_count_in_chunks_in_use();
742 size_t sum_count_in_chunks_in_use(ChunkIndex i);
743
744 Metachunk* get_new_chunk(size_t chunk_word_size);
745
746 // Block allocation and deallocation.
747 // Allocates a block from the current chunk
748 MetaWord* allocate(size_t word_size);
749
750 // Helper for allocations
751 MetaWord* allocate_work(size_t word_size);
752
753 // Returns a block to the per manager freelist
754 void deallocate(MetaWord* p, size_t word_size);
755
756 // Based on the allocation size and a minimum chunk size,
757 // returned chunk size (for expanding space for chunk allocation).
758 size_t calc_chunk_size(size_t allocation_word_size);
759
760 // Called when an allocation from the current chunk fails.
761 // Gets a new chunk (may require getting a new virtual space),
762 // and allocates from that chunk.
763 MetaWord* grow_and_allocate(size_t word_size);
764
1311 size_t grow_vs_words = MAX2((size_t)VirtualSpaceSize, preferred_words);
1312 grow_vs_words = align_size_up(grow_vs_words, Metaspace::reserve_alignment_words());
1313
1314 if (create_new_virtual_space(grow_vs_words)) {
1315 if (current_virtual_space()->is_pre_committed()) {
1316 // The memory was pre-committed, so we are done here.
1317 assert(min_words <= current_virtual_space()->committed_words(),
1318 "The new VirtualSpace was pre-committed, so it"
1319 "should be large enough to fit the alloc request.");
1320 return true;
1321 }
1322
1323 return expand_node_by(current_virtual_space(),
1324 min_words,
1325 max_expansion_words);
1326 }
1327
1328 return false;
1329 }
1330
1331 Metachunk* VirtualSpaceList::get_new_chunk(size_t chunk_word_size, size_t suggested_commit_granularity) {
1332
1333 // Allocate a chunk out of the current virtual space.
1334 Metachunk* next = current_virtual_space()->get_chunk_vs(chunk_word_size);
1335
1336 if (next != NULL) {
1337 return next;
1338 }
1339
1340 // The expand amount is currently only determined by the requested sizes
1341 // and not how much committed memory is left in the current virtual space.
1342
1343 size_t min_word_size = align_size_up(chunk_word_size, Metaspace::commit_alignment_words());
1344 size_t preferred_word_size = align_size_up(suggested_commit_granularity, Metaspace::commit_alignment_words());
1345 if (min_word_size >= preferred_word_size) {
1346 // Can happen when humongous chunks are allocated.
1347 preferred_word_size = min_word_size;
1348 }
1349
1350 bool expanded = expand_by(min_word_size, preferred_word_size);
1351 if (expanded) {
1352 next = current_virtual_space()->get_chunk_vs(chunk_word_size);
1353 assert(next != NULL, "The allocation was expected to succeed after the expansion");
1354 }
1355
1356 return next;
1357 }
1358
1359 void VirtualSpaceList::print_on(outputStream* st) const {
1360 if (TraceMetadataChunkAllocation && Verbose) {
1361 VirtualSpaceListIterator iter(virtual_space_list());
1362 while (iter.repeat()) {
1363 VirtualSpaceNode* node = iter.get_next();
1364 node->print_on(st);
1365 }
1366 }
1367 }
1368
1369 // MetaspaceGC methods
1370
1371 // VM_CollectForMetadataAllocation is the vm operation used to GC.
1372 // Within the VM operation after the GC the attempt to allocate the metadata
1873 } else {
1874 list_count = humongous_dictionary()->total_count();
1875 }
1876 gclog_or_tty->print("ChunkManager::chunk_freelist_allocate: " PTR_FORMAT " chunk "
1877 PTR_FORMAT " size " SIZE_FORMAT " count " SIZE_FORMAT " ",
1878 this, chunk, chunk->word_size(), list_count);
1879 locked_print_free_chunks(gclog_or_tty);
1880 }
1881
1882 return chunk;
1883 }
1884
1885 void ChunkManager::print_on(outputStream* out) const {
1886 if (PrintFLSStatistics != 0) {
1887 const_cast<ChunkManager *>(this)->humongous_dictionary()->report_statistics();
1888 }
1889 }
1890
1891 // SpaceManager methods
1892
1893 size_t SpaceManager::adjust_initial_chunk_size(size_t requested, bool is_class_space) {
1894 size_t chunk_sizes[] = {
1895 specialized_chunk_size(is_class_space),
1896 small_chunk_size(is_class_space),
1897 medium_chunk_size(is_class_space)
1898 };
1899
1900 // Adjust up to one of the fixed chunk sizes ...
1901 for (size_t i = 0; i < ARRAY_SIZE(chunk_sizes); i++) {
1902 if (requested <= chunk_sizes[i]) {
1903 return chunk_sizes[i];
1904 }
1905 }
1906
1907 // ... or return the size as a humongous chunk.
1908 return requested;
1909 }
1910
1911 size_t SpaceManager::adjust_initial_chunk_size(size_t requested) const {
1912 return adjust_initial_chunk_size(requested, is_class());
1913 }
1914
1915 size_t SpaceManager::get_initial_chunk_size(Metaspace::MetaspaceType type) const {
1916 size_t requested;
1917
1918 if (is_class()) {
1919 switch (type) {
1920 case Metaspace::BootMetaspaceType: requested = Metaspace::first_class_chunk_word_size(); break;
1921 case Metaspace::ROMetaspaceType: requested = ClassSpecializedChunk; break;
1922 case Metaspace::ReadWriteMetaspaceType: requested = ClassSpecializedChunk; break;
1923 case Metaspace::AnonymousMetaspaceType: requested = ClassSpecializedChunk; break;
1924 case Metaspace::ReflectionMetaspaceType: requested = ClassSpecializedChunk; break;
1925 default: requested = ClassSmallChunk; break;
1926 }
1927 } else {
1928 switch (type) {
1929 case Metaspace::BootMetaspaceType: requested = Metaspace::first_chunk_word_size(); break;
1930 case Metaspace::ROMetaspaceType: requested = SharedReadOnlySize / wordSize; break;
1931 case Metaspace::ReadWriteMetaspaceType: requested = SharedReadWriteSize / wordSize; break;
1932 case Metaspace::AnonymousMetaspaceType: requested = SpecializedChunk; break;
1933 case Metaspace::ReflectionMetaspaceType: requested = SpecializedChunk; break;
1934 default: requested = SmallChunk; break;
1935 }
1936 }
1937
1938 // Adjust to one of the fixed chunk sizes (unless humongous)
1939 const size_t adjusted = adjust_initial_chunk_size(requested);
1940
1941 assert(adjusted != 0, err_msg("Incorrect initial chunk size. Requested: "
1942 SIZE_FORMAT " adjusted: " SIZE_FORMAT, requested, adjusted));
1943
1944 return adjusted;
1945 }
1946
1947 size_t SpaceManager::sum_free_in_chunks_in_use() const {
1948 MutexLockerEx cl(lock(), Mutex::_no_safepoint_check_flag);
1949 size_t free = 0;
1950 for (ChunkIndex i = ZeroIndex; i < NumberOfInUseLists; i = next_chunk_index(i)) {
1951 Metachunk* chunk = chunks_in_use(i);
1952 while (chunk != NULL) {
1953 free += chunk->free_word_size();
1954 chunk = chunk->next();
1955 }
1956 }
1957 return free;
1958 }
1959
1960 size_t SpaceManager::sum_waste_in_chunks_in_use() const {
1961 MutexLockerEx cl(lock(), Mutex::_no_safepoint_check_flag);
1962 size_t result = 0;
1963 for (ChunkIndex i = ZeroIndex; i < NumberOfInUseLists; i = next_chunk_index(i)) {
1964 result += sum_waste_in_chunks_in_use(i);
2114 "Should have been set");
2115 assert(current_chunk() == NULL ||
2116 current_chunk()->allocate(word_size) == NULL,
2117 "Don't need to expand");
2118 MutexLockerEx cl(SpaceManager::expand_lock(), Mutex::_no_safepoint_check_flag);
2119
2120 if (TraceMetadataChunkAllocation && Verbose) {
2121 size_t words_left = 0;
2122 size_t words_used = 0;
2123 if (current_chunk() != NULL) {
2124 words_left = current_chunk()->free_word_size();
2125 words_used = current_chunk()->used_word_size();
2126 }
2127 gclog_or_tty->print_cr("SpaceManager::grow_and_allocate for " SIZE_FORMAT
2128 " words " SIZE_FORMAT " words used " SIZE_FORMAT
2129 " words left",
2130 word_size, words_used, words_left);
2131 }
2132
2133 // Get another chunk out of the virtual space
2134 size_t chunk_word_size = calc_chunk_size(word_size);
2135 Metachunk* next = get_new_chunk(chunk_word_size);
2136
2137 MetaWord* mem = NULL;
2138
2139 // If a chunk was available, add it to the in-use chunk list
2140 // and do an allocation from it.
2141 if (next != NULL) {
2142 // Add to this manager's list of chunks in use.
2143 add_chunk(next, false);
2144 mem = next->allocate(word_size);
2145 }
2146
2147 // Track metaspace memory usage statistic.
2148 track_metaspace_memory_usage();
2149
2150 return mem;
2151 }
2152
2153 void SpaceManager::print_on(outputStream* st) const {
2154
2155 for (ChunkIndex i = ZeroIndex;
2424
2425 assert(new_chunk->is_empty(), "Not ready for reuse");
2426 if (TraceMetadataChunkAllocation && Verbose) {
2427 gclog_or_tty->print("SpaceManager::add_chunk: %d) ",
2428 sum_count_in_chunks_in_use());
2429 new_chunk->print_on(gclog_or_tty);
2430 chunk_manager()->locked_print_free_chunks(gclog_or_tty);
2431 }
2432 }
2433
2434 void SpaceManager::retire_current_chunk() {
2435 if (current_chunk() != NULL) {
2436 size_t remaining_words = current_chunk()->free_word_size();
2437 if (remaining_words >= TreeChunk<Metablock, FreeList<Metablock> >::min_size()) {
2438 block_freelists()->return_block(current_chunk()->allocate(remaining_words), remaining_words);
2439 inc_used_metrics(remaining_words);
2440 }
2441 }
2442 }
2443
2444 Metachunk* SpaceManager::get_new_chunk(size_t chunk_word_size) {
2445 // Get a chunk from the chunk freelist
2446 Metachunk* next = chunk_manager()->chunk_freelist_allocate(chunk_word_size);
2447
2448 if (next == NULL) {
2449 next = vs_list()->get_new_chunk(chunk_word_size,
2450 medium_chunk_bunch());
2451 }
2452
2453 if (TraceMetadataHumongousAllocation && next != NULL &&
2454 SpaceManager::is_humongous(next->word_size())) {
2455 gclog_or_tty->print_cr(" new humongous chunk word size "
2456 PTR_FORMAT, next->word_size());
2457 }
2458
2459 return next;
2460 }
2461
2462 MetaWord* SpaceManager::allocate(size_t word_size) {
2463 MutexLockerEx cl(lock(), Mutex::_no_safepoint_check_flag);
2464
2465 size_t raw_word_size = get_raw_word_size(word_size);
2466 BlockFreelist* fl = block_freelists();
2467 MetaWord* p = NULL;
2468 // Allocation from the dictionary is expensive in the sense that
2469 // the dictionary has to be searched for a size. Don't allocate
3095 UseSharedSpaces ? (address)cds_base : 0);
3096
3097 initialize_class_space(metaspace_rs);
3098
3099 if (PrintCompressedOopsMode || (PrintMiscellaneous && Verbose)) {
3100 gclog_or_tty->print_cr("Narrow klass base: " PTR_FORMAT ", Narrow klass shift: " SIZE_FORMAT,
3101 Universe::narrow_klass_base(), Universe::narrow_klass_shift());
3102 gclog_or_tty->print_cr("Compressed class space size: " SIZE_FORMAT " Address: " PTR_FORMAT " Req Addr: " PTR_FORMAT,
3103 compressed_class_space_size(), metaspace_rs.base(), requested_addr);
3104 }
3105 }
3106
3107 // For UseCompressedClassPointers the class space is reserved above the top of
3108 // the Java heap. The argument passed in is at the base of the compressed space.
3109 void Metaspace::initialize_class_space(ReservedSpace rs) {
3110 // The reserved space size may be bigger because of alignment, esp with UseLargePages
3111 assert(rs.size() >= CompressedClassSpaceSize,
3112 err_msg(SIZE_FORMAT " != " UINTX_FORMAT, rs.size(), CompressedClassSpaceSize));
3113 assert(using_class_space(), "Must be using class space");
3114 _class_space_list = new VirtualSpaceList(rs);
3115 _chunk_manager_class = new ChunkManager(ClassSpecializedChunk, ClassSmallChunk, ClassMediumChunk);
3116
3117 if (!_class_space_list->initialization_succeeded()) {
3118 vm_exit_during_initialization("Failed to setup compressed class space virtual space list.");
3119 }
3120 }
3121
3122 #endif
3123
3124 void Metaspace::ergo_initialize() {
3125 if (DumpSharedSpaces) {
3126 // Using large pages when dumping the shared archive is currently not implemented.
3127 FLAG_SET_ERGO(bool, UseLargePagesInMetaspace, false);
3128 }
3129
3130 size_t page_size = os::vm_page_size();
3131 if (UseLargePages && UseLargePagesInMetaspace) {
3132 page_size = os::large_page_size();
3133 }
3134
3135 _commit_alignment = page_size;
3296 // of the boot class loader size.
3297 size_t word_size = VIRTUALSPACEMULTIPLIER * _first_chunk_word_size;
3298 word_size = align_size_up(word_size, Metaspace::reserve_alignment_words());
3299
3300 // Initialize the list of virtual spaces.
3301 _space_list = new VirtualSpaceList(word_size);
3302 _chunk_manager_metadata = new ChunkManager(SpecializedChunk, SmallChunk, MediumChunk);
3303
3304 if (!_space_list->initialization_succeeded()) {
3305 vm_exit_during_initialization("Unable to setup metadata virtual space list.", NULL);
3306 }
3307 }
3308
3309 _tracer = new MetaspaceTracer();
3310 }
3311
3312 void Metaspace::post_initialize() {
3313 MetaspaceGC::post_initialize();
3314 }
3315
3316 void Metaspace::initialize_first_chunk(MetaspaceType type, MetadataType mdtype) {
3317 Metachunk* chunk = get_initialization_chunk(type, mdtype);
3318 if (chunk != NULL) {
3319 // Add to this manager's list of chunks in use and current_chunk().
3320 get_space_manager(mdtype)->add_chunk(chunk, true);
3321 }
3322 }
3323
3324 Metachunk* Metaspace::get_initialization_chunk(MetaspaceType type, MetadataType mdtype) {
3325 size_t chunk_word_size = get_space_manager(mdtype)->get_initial_chunk_size(type);
3326
3327 // Get a chunk from the chunk freelist
3328 Metachunk* chunk = get_chunk_manager(mdtype)->chunk_freelist_allocate(chunk_word_size);
3329
3330 if (chunk == NULL) {
3331 chunk = get_space_list(mdtype)->get_new_chunk(chunk_word_size,
3332 get_space_manager(mdtype)->medium_chunk_bunch());
3333 }
3334
3335 // For dumping shared archive, report error if allocation has failed.
3336 if (DumpSharedSpaces && chunk == NULL) {
3337 report_insufficient_metaspace(MetaspaceAux::committed_bytes() + chunk_word_size * BytesPerWord);
3338 }
3339
3340 return chunk;
3341 }
3342
3343 void Metaspace::verify_global_initialization() {
3344 assert(space_list() != NULL, "Metadata VirtualSpaceList has not been initialized");
3345 assert(chunk_manager_metadata() != NULL, "Metadata ChunkManager has not been initialized");
3346
3347 if (using_class_space()) {
3348 assert(class_space_list() != NULL, "Class VirtualSpaceList has not been initialized");
3349 assert(chunk_manager_class() != NULL, "Class ChunkManager has not been initialized");
3350 }
3351 }
3352
3353 void Metaspace::initialize(Mutex* lock, MetaspaceType type) {
3354 verify_global_initialization();
3355
3356 // Allocate SpaceManager for metadata objects.
3357 _vsm = new SpaceManager(NonClassType, lock);
3358
3359 if (using_class_space()) {
3360 // Allocate SpaceManager for classes.
3361 _class_vsm = new SpaceManager(ClassType, lock);
3362 }
3363
3364 MutexLockerEx cl(SpaceManager::expand_lock(), Mutex::_no_safepoint_check_flag);
3365
3366 // Allocate chunk for metadata objects
3367 initialize_first_chunk(type, NonClassType);
3368
3369 // Allocate chunk for class metadata objects
3370 if (using_class_space()) {
3371 initialize_first_chunk(type, ClassType);
3372 }
3373
3374 _alloc_record_head = NULL;
3375 _alloc_record_tail = NULL;
3376 }
3377
3378 size_t Metaspace::align_word_size_up(size_t word_size) {
3379 size_t byte_size = word_size * wordSize;
3380 return ReservedSpace::allocation_align_size_up(byte_size) / wordSize;
3381 }
3382
3383 MetaWord* Metaspace::allocate(size_t word_size, MetadataType mdtype) {
3384 // DumpSharedSpaces doesn't use class metadata area (yet)
3385 // Also, don't use class_vsm() unless UseCompressedClassPointers is true.
3386 if (is_class_space_allocation(mdtype)) {
3387 return class_vsm()->allocate(word_size);
3388 } else {
3389 return vsm()->allocate(word_size);
3390 }
3391 }
3778 assert(committed <= reserved, "assert");
3779
3780 size_t committed_metadata = MetaspaceAux::committed_bytes(Metaspace::NonClassType);
3781 assert(committed_metadata > 0, "assert");
3782 assert(committed_metadata <= committed, "assert");
3783
3784 if (UseCompressedClassPointers) {
3785 size_t committed_class = MetaspaceAux::committed_bytes(Metaspace::ClassType);
3786 assert(committed_class > 0, "assert");
3787 assert(committed_class < committed, "assert");
3788 }
3789 }
3790
3791 static void test_virtual_space_list_large_chunk() {
3792 VirtualSpaceList* vs_list = new VirtualSpaceList(os::vm_allocation_granularity());
3793 MutexLockerEx cl(SpaceManager::expand_lock(), Mutex::_no_safepoint_check_flag);
3794 // A size larger than VirtualSpaceSize (256k) and add one page to make it _not_ be
3795 // vm_allocation_granularity aligned on Windows.
3796 size_t large_size = (size_t)(2*256*K + (os::vm_page_size()/BytesPerWord));
3797 large_size += (os::vm_page_size()/BytesPerWord);
3798 vs_list->get_new_chunk(large_size, 0);
3799 }
3800
3801 static void test() {
3802 test_reserved();
3803 test_committed();
3804 test_virtual_space_list_large_chunk();
3805 }
3806 };
3807
3808 void TestMetaspaceAux_test() {
3809 TestMetaspaceAuxTest::test();
3810 }
3811
3812 class TestVirtualSpaceNodeTest {
3813 static void chunk_up(size_t words_left, size_t& num_medium_chunks,
3814 size_t& num_small_chunks,
3815 size_t& num_specialized_chunks) {
3816 num_medium_chunks = words_left / MediumChunk;
3817 words_left = words_left % MediumChunk;
3818
3953 void* virtual_space_max = (void*)(uintptr_t)-1;
3954 size_t bottom_to_max = pointer_delta(virtual_space_max, vsn.bottom(), 1);
3955 size_t overflow_size = bottom_to_max + BytesPerWord;
3956 size_t overflow_word_size = overflow_size / BytesPerWord;
3957
3958 // Check that is_available can handle the overflow.
3959 assert_is_available_negative(overflow_word_size);
3960 }
3961
3962 static void test_is_available() {
3963 TestVirtualSpaceNodeTest::test_is_available_positive();
3964 TestVirtualSpaceNodeTest::test_is_available_negative();
3965 TestVirtualSpaceNodeTest::test_is_available_overflow();
3966 }
3967 };
3968
3969 void TestVirtualSpaceNode_test() {
3970 TestVirtualSpaceNodeTest::test();
3971 TestVirtualSpaceNodeTest::test_is_available();
3972 }
3973
3974 // The following test is placed here instead of a gtest / unittest file
3975 // because the ChunkManager class is only available in this file.
3976 class SpaceManagerTest : AllStatic {
3977 friend void SpaceManager_test_adjust_initial_chunk_size();
3978
3979 static void test_adjust_initial_chunk_size(bool is_class) {
3980 const size_t smallest = SpaceManager::smallest_chunk_size(is_class);
3981 const size_t normal = SpaceManager::small_chunk_size(is_class);
3982 const size_t medium = SpaceManager::medium_chunk_size(is_class);
3983
3984 #define test_adjust_initial_chunk_size(value, expected, is_class_value) \
3985 do { \
3986 size_t v = value; \
3987 size_t e = expected; \
3988 assert(SpaceManager::adjust_initial_chunk_size(v, (is_class_value)) == e, \
3989 err_msg("Expected: " SIZE_FORMAT " got: " SIZE_FORMAT, e, v)); \
3990 } while (0)
3991
3992 // Smallest (specialized)
3993 test_adjust_initial_chunk_size(1, smallest, is_class);
3994 test_adjust_initial_chunk_size(smallest - 1, smallest, is_class);
3995 test_adjust_initial_chunk_size(smallest, smallest, is_class);
3996
3997 // Small
3998 test_adjust_initial_chunk_size(smallest + 1, normal, is_class);
3999 test_adjust_initial_chunk_size(normal - 1, normal, is_class);
4000 test_adjust_initial_chunk_size(normal, normal, is_class);
4001
4002 // Medium
4003 test_adjust_initial_chunk_size(normal + 1, medium, is_class);
4004 test_adjust_initial_chunk_size(medium - 1, medium, is_class);
4005 test_adjust_initial_chunk_size(medium, medium, is_class);
4006
4007 // Humongous
4008 test_adjust_initial_chunk_size(medium + 1, medium + 1, is_class);
4009
4010 #undef test_adjust_initial_chunk_size
4011 }
4012
4013 static void test_adjust_initial_chunk_size() {
4014 test_adjust_initial_chunk_size(false);
4015 test_adjust_initial_chunk_size(true);
4016 }
4017 };
4018
4019 void SpaceManager_test_adjust_initial_chunk_size() {
4020 SpaceManagerTest::test_adjust_initial_chunk_size();
4021 }
4022
4023 #endif
|