--- old/src/share/vm/gc/g1/g1Allocator.cpp 2015-05-27 14:30:17.937777968 -0400 +++ new/src/share/vm/gc/g1/g1Allocator.cpp 2015-05-27 14:30:16.609702291 -0400 @@ -26,6 +26,7 @@ #include "gc/g1/g1Allocator.hpp" #include "gc/g1/g1CollectedHeap.inline.hpp" #include "gc/g1/g1CollectorPolicy.hpp" +#include "gc/g1/g1MarkSweep.hpp" #include "gc/g1/heapRegion.inline.hpp" #include "gc/g1/heapRegionSet.inline.hpp" @@ -44,6 +45,8 @@ HeapRegion** retained_old) { HeapRegion* retained_region = *retained_old; *retained_old = NULL; + assert(retained_region == NULL || !retained_region->is_archive(), + "Archive region should not be alloc region"); // We will discard the current GC alloc region if: // a) it's in the collection set (it can happen!), @@ -168,3 +171,125 @@ } } } + + +G1RecordingAllocator* G1RecordingAllocator::create_allocator(G1CollectedHeap* g1h) { + // Create the recording allocator, and also enable archive object checking + // in mark-sweep, since we will be creating archive regions. + G1RecordingAllocator* result = new G1RecordingAllocator(g1h); + G1MarkSweep::enable_archive_object_check(); + return result; +} + +HeapRegion* G1RecordingAllocator::alloc_new_region() { + // Allocate the highest available region in the reserved heap, + // and add it to our list of allocated regions. It is marked + // archive and added to the old set. + HeapRegion* hr = _g1h->alloc_highest_available_region(); + assert(hr->top() == hr->bottom(), "expected empty region"); + hr->set_archive(); + _g1h->_old_set.add(hr); + _g1h->_hr_printer.alloc(hr, G1HRPrinter::Archive); + _allocated_regions.append(hr); + _allocation_region = hr; + + // Set up _bottom and _max to begin allocating in the lowest + // min_region_size'd chunk of the allocated G1 region. + _bottom = hr->bottom(); + _max = _bottom + HeapRegion::min_region_size_in_words(); + + // Tell mark-sweep that objects in this region are not to be marked. + G1MarkSweep::mark_range_archive(_bottom, hr->end() - 1); + + // Since we've modified the old set, call update_sizes. + _g1h->g1mm()->update_sizes(); + return hr; +} + +HeapWord* G1RecordingAllocator::record_mem_allocate(size_t word_size) { + if (_allocation_region == NULL) { + alloc_new_region(); + } + HeapWord* old_top = _allocation_region->top(); + assert(_bottom >= _allocation_region->bottom(), "inconsistent allocation state"); + assert(_max <= _allocation_region->end(), "inconsistent allocation state"); + assert(_bottom <= old_top && old_top <= _max, "inconsistent allocation state"); + + // Allocate the next word_size words in the current allocation chunk. + // If allocation would cross the _max boundary, insert a fill and begin + // at the base of the next min_region_size'd chunk. Also advance to the next + // chunk if we don't yet cross the boundary, but the remainder would be too + // small to fill. + HeapWord* new_top = old_top + word_size; + size_t remainder = (size_t)(_max - new_top); + if ((new_top > _max) || + ((new_top < _max) && (remainder < CollectedHeap::min_fill_size()))) { + if (old_top != _max) { + size_t fill_size = _max - old_top; + CollectedHeap::fill_with_object(old_top, fill_size); + _summary_bytes_used += fill_size * HeapWordSize; + } + _allocation_region->set_top(_max); + old_top = _bottom = _max; + + // Check if we've just used up the last min_region_size'd chunk + // in the current region, and if so, allocate a new one. + if (_bottom != _allocation_region->end()) { + _max = _bottom + HeapRegion::min_region_size_in_words(); + } else { + alloc_new_region(); + old_top = _allocation_region->bottom(); + } + } + _allocation_region->set_top(old_top + word_size); + _summary_bytes_used += word_size * HeapWordSize; + + return old_top; +} + +void G1RecordingAllocator::complete_recording(GrowableArray* ranges, + uint end_alignment) { + assert((end_alignment >> LogHeapWordSize) < HeapRegion::min_region_size_in_words(), + "alignment too large"); + // If we've allocated nothing, simply return. + if (_allocation_region == NULL) { + return; + } + + // If an end alignment was requested, insert filler objects. + if (end_alignment != 0) { + HeapWord* currtop = _allocation_region->top(); + HeapWord* newtop = (HeapWord*)round_to((intptr_t)currtop, end_alignment); + size_t fill_size = newtop - currtop; + if (fill_size != 0) { + HeapWord* fill = record_mem_allocate(fill_size); + CollectedHeap::fill_with_objects(fill, fill_size); + } + } + + // Loop through the allocated regions, and create MemRegions summarizing + // the allocated address range, combining contiguous ranges. Add the + // MemRegions to the growable array provided by the caller. + int index = _allocated_regions.length() - 1; + assert(_allocated_regions.at(index) == _allocation_region, "expect current region at end of array"); + HeapWord* base_address = _allocation_region->bottom(); + HeapWord* top = base_address; + + while (index >= 0) { + HeapRegion* next = _allocated_regions.at(index--); + HeapWord* new_base = next->bottom(); + HeapWord* new_top = next->top(); + if (new_base != top) { + ranges->append(MemRegion(base_address, top - base_address)); + base_address = new_base; + } + top = new_top; + } + + ranges->append(MemRegion(base_address, top - base_address)); + _allocated_regions.clear(); + _allocation_region = NULL; + + return; + +};