src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.cpp

Print this page
rev 5878 : 8034171: Remove use of template template parameters from binaryTreeDictionary.
Contributed-by: Matthias.Baesken@sap.com


1687     _bt.allocated((HeapWord*)fc, fc->size());
1688   }
1689   assert(fc->size() >= size + MinChunkSize, "tautology");
1690   fc = splitChunkAndReturnRemainder(fc, size);
1691   assert(fc->size() == size, "chunk is wrong size");
1692   _bt.verify_single_block((HeapWord*)fc, size);
1693   return fc;
1694 }
1695 
1696 void
1697 CompactibleFreeListSpace::returnChunkToDictionary(FreeChunk* chunk) {
1698   assert_locked();
1699 
1700   size_t size = chunk->size();
1701   _bt.verify_single_block((HeapWord*)chunk, size);
1702   // adjust _unallocated_block downward, as necessary
1703   _bt.freed((HeapWord*)chunk, size);
1704   _dictionary->return_chunk(chunk);
1705 #ifndef PRODUCT
1706   if (CMSCollector::abstract_state() != CMSCollector::Sweeping) {
1707     TreeChunk<FreeChunk, AdaptiveFreeList>* tc = TreeChunk<FreeChunk, AdaptiveFreeList>::as_TreeChunk(chunk);
1708     TreeList<FreeChunk, AdaptiveFreeList>* tl = tc->list();
1709     tl->verify_stats();
1710   }
1711 #endif // PRODUCT
1712 }
1713 
1714 void
1715 CompactibleFreeListSpace::returnChunkToFreeList(FreeChunk* fc) {
1716   assert_locked();
1717   size_t size = fc->size();
1718   _bt.verify_single_block((HeapWord*) fc, size);
1719   _bt.verify_not_unallocated((HeapWord*) fc, size);
1720   if (_adaptive_freelists) {
1721     _indexedFreeList[size].return_chunk_at_tail(fc);
1722   } else {
1723     _indexedFreeList[size].return_chunk_at_head(fc);
1724   }
1725 #ifndef PRODUCT
1726   if (CMSCollector::abstract_state() != CMSCollector::Sweeping) {
1727      _indexedFreeList[size].verify_stats();
1728   }


2498 }
2499 
2500 void CompactibleFreeListSpace::verifyIndexedFreeList(size_t size) const {
2501   FreeChunk* fc   =  _indexedFreeList[size].head();
2502   FreeChunk* tail =  _indexedFreeList[size].tail();
2503   size_t    num = _indexedFreeList[size].count();
2504   size_t      n = 0;
2505   guarantee(((size >= IndexSetStart) && (size % IndexSetStride == 0)) || fc == NULL,
2506             "Slot should have been empty");
2507   for (; fc != NULL; fc = fc->next(), n++) {
2508     guarantee(fc->size() == size, "Size inconsistency");
2509     guarantee(fc->is_free(), "!free?");
2510     guarantee(fc->next() == NULL || fc->next()->prev() == fc, "Broken list");
2511     guarantee((fc->next() == NULL) == (fc == tail), "Incorrect tail");
2512   }
2513   guarantee(n == num, "Incorrect count");
2514 }
2515 
2516 #ifndef PRODUCT
2517 void CompactibleFreeListSpace::check_free_list_consistency() const {
2518   assert((TreeChunk<FreeChunk, AdaptiveFreeList>::min_size() <= IndexSetSize),
2519     "Some sizes can't be allocated without recourse to"
2520     " linear allocation buffers");
2521   assert((TreeChunk<FreeChunk, AdaptiveFreeList>::min_size()*HeapWordSize == sizeof(TreeChunk<FreeChunk, AdaptiveFreeList>)),
2522     "else MIN_TREE_CHUNK_SIZE is wrong");
2523   assert(IndexSetStart != 0, "IndexSetStart not initialized");
2524   assert(IndexSetStride != 0, "IndexSetStride not initialized");
2525 }
2526 #endif
2527 
2528 void CompactibleFreeListSpace::printFLCensus(size_t sweep_count) const {
2529   assert_lock_strong(&_freelistLock);
2530   AdaptiveFreeList<FreeChunk> total;
2531   gclog_or_tty->print("end sweep# " SIZE_FORMAT "\n", sweep_count);
2532   AdaptiveFreeList<FreeChunk>::print_labels_on(gclog_or_tty, "size");
2533   size_t total_free = 0;
2534   for (size_t i = IndexSetStart; i < IndexSetSize; i += IndexSetStride) {
2535     const AdaptiveFreeList<FreeChunk> *fl = &_indexedFreeList[i];
2536     total_free += fl->count() * fl->size();
2537     if (i % (40*IndexSetStride) == 0) {
2538       AdaptiveFreeList<FreeChunk>::print_labels_on(gclog_or_tty, "size");
2539     }
2540     fl->print_on(gclog_or_tty);
2541     total.set_bfr_surp(    total.bfr_surp()     + fl->bfr_surp()    );




1687     _bt.allocated((HeapWord*)fc, fc->size());
1688   }
1689   assert(fc->size() >= size + MinChunkSize, "tautology");
1690   fc = splitChunkAndReturnRemainder(fc, size);
1691   assert(fc->size() == size, "chunk is wrong size");
1692   _bt.verify_single_block((HeapWord*)fc, size);
1693   return fc;
1694 }
1695 
1696 void
1697 CompactibleFreeListSpace::returnChunkToDictionary(FreeChunk* chunk) {
1698   assert_locked();
1699 
1700   size_t size = chunk->size();
1701   _bt.verify_single_block((HeapWord*)chunk, size);
1702   // adjust _unallocated_block downward, as necessary
1703   _bt.freed((HeapWord*)chunk, size);
1704   _dictionary->return_chunk(chunk);
1705 #ifndef PRODUCT
1706   if (CMSCollector::abstract_state() != CMSCollector::Sweeping) {
1707     TreeChunk<FreeChunk, AdaptiveFreeList<FreeChunk> >* tc = TreeChunk<FreeChunk, AdaptiveFreeList<FreeChunk> >::as_TreeChunk(chunk);
1708     TreeList<FreeChunk, AdaptiveFreeList<FreeChunk> >* tl = tc->list();
1709     tl->verify_stats();
1710   }
1711 #endif // PRODUCT
1712 }
1713 
1714 void
1715 CompactibleFreeListSpace::returnChunkToFreeList(FreeChunk* fc) {
1716   assert_locked();
1717   size_t size = fc->size();
1718   _bt.verify_single_block((HeapWord*) fc, size);
1719   _bt.verify_not_unallocated((HeapWord*) fc, size);
1720   if (_adaptive_freelists) {
1721     _indexedFreeList[size].return_chunk_at_tail(fc);
1722   } else {
1723     _indexedFreeList[size].return_chunk_at_head(fc);
1724   }
1725 #ifndef PRODUCT
1726   if (CMSCollector::abstract_state() != CMSCollector::Sweeping) {
1727      _indexedFreeList[size].verify_stats();
1728   }


2498 }
2499 
2500 void CompactibleFreeListSpace::verifyIndexedFreeList(size_t size) const {
2501   FreeChunk* fc   =  _indexedFreeList[size].head();
2502   FreeChunk* tail =  _indexedFreeList[size].tail();
2503   size_t    num = _indexedFreeList[size].count();
2504   size_t      n = 0;
2505   guarantee(((size >= IndexSetStart) && (size % IndexSetStride == 0)) || fc == NULL,
2506             "Slot should have been empty");
2507   for (; fc != NULL; fc = fc->next(), n++) {
2508     guarantee(fc->size() == size, "Size inconsistency");
2509     guarantee(fc->is_free(), "!free?");
2510     guarantee(fc->next() == NULL || fc->next()->prev() == fc, "Broken list");
2511     guarantee((fc->next() == NULL) == (fc == tail), "Incorrect tail");
2512   }
2513   guarantee(n == num, "Incorrect count");
2514 }
2515 
2516 #ifndef PRODUCT
2517 void CompactibleFreeListSpace::check_free_list_consistency() const {
2518   assert((TreeChunk<FreeChunk, AdaptiveFreeList<FreeChunk> >::min_size() <= IndexSetSize),
2519     "Some sizes can't be allocated without recourse to"
2520     " linear allocation buffers");
2521   assert((TreeChunk<FreeChunk, AdaptiveFreeList<FreeChunk> >::min_size()*HeapWordSize == sizeof(TreeChunk<FreeChunk, AdaptiveFreeList<FreeChunk> >)),
2522     "else MIN_TREE_CHUNK_SIZE is wrong");
2523   assert(IndexSetStart != 0, "IndexSetStart not initialized");
2524   assert(IndexSetStride != 0, "IndexSetStride not initialized");
2525 }
2526 #endif
2527 
2528 void CompactibleFreeListSpace::printFLCensus(size_t sweep_count) const {
2529   assert_lock_strong(&_freelistLock);
2530   AdaptiveFreeList<FreeChunk> total;
2531   gclog_or_tty->print("end sweep# " SIZE_FORMAT "\n", sweep_count);
2532   AdaptiveFreeList<FreeChunk>::print_labels_on(gclog_or_tty, "size");
2533   size_t total_free = 0;
2534   for (size_t i = IndexSetStart; i < IndexSetSize; i += IndexSetStride) {
2535     const AdaptiveFreeList<FreeChunk> *fl = &_indexedFreeList[i];
2536     total_free += fl->count() * fl->size();
2537     if (i % (40*IndexSetStride) == 0) {
2538       AdaptiveFreeList<FreeChunk>::print_labels_on(gclog_or_tty, "size");
2539     }
2540     fl->print_on(gclog_or_tty);
2541     total.set_bfr_surp(    total.bfr_surp()     + fl->bfr_surp()    );