< prev index next >

src/hotspot/share/gc/g1/g1Allocator.cpp

Print this page

        

*** 29,38 **** --- 29,39 ---- #include "gc/g1/g1CollectedHeap.inline.hpp" #include "gc/g1/g1Policy.hpp" #include "gc/g1/heapRegion.inline.hpp" #include "gc/g1/heapRegionSet.inline.hpp" #include "gc/g1/heapRegionType.hpp" + #include "gc/shared/fill.hpp" #include "utilities/align.hpp" G1Allocator::G1Allocator(G1CollectedHeap* heap) : _g1h(heap), _survivor_is_full(false),
*** 404,417 **** // chunk if we don't yet cross the boundary, but the remainder would be too // small to fill. HeapWord* new_top = old_top + word_size; size_t remainder = pointer_delta(_max, new_top); if ((new_top > _max) || ! ((new_top < _max) && (remainder < CollectedHeap::min_fill_size()))) { if (old_top != _max) { size_t fill_size = pointer_delta(_max, old_top); ! CollectedHeap::fill_with_object(old_top, fill_size); _summary_bytes_used += fill_size * HeapWordSize; } _allocation_region->set_top(_max); old_top = _bottom = _max; --- 405,418 ---- // chunk if we don't yet cross the boundary, but the remainder would be too // small to fill. HeapWord* new_top = old_top + word_size; size_t remainder = pointer_delta(_max, new_top); if ((new_top > _max) || ! ((new_top < _max) && (remainder < Fill::min_size()))) { if (old_top != _max) { size_t fill_size = pointer_delta(_max, old_top); ! Fill::range(old_top, fill_size); _summary_bytes_used += fill_size * HeapWordSize; } _allocation_region->set_top(_max); old_top = _bottom = _max;
*** 448,469 **** if (end_alignment_in_bytes != 0) { HeapWord* currtop = _allocation_region->top(); HeapWord* newtop = align_up(currtop, end_alignment_in_bytes); size_t fill_size = pointer_delta(newtop, currtop); if (fill_size != 0) { ! if (fill_size < CollectedHeap::min_fill_size()) { // If the required fill is smaller than we can represent, // bump up to the next aligned address. We know we won't exceed the current // region boundary because the max supported alignment is smaller than the min // region size, and because the allocation code never leaves space smaller than ! // the min_fill_size at the top of the current allocation region. ! newtop = align_up(currtop + CollectedHeap::min_fill_size(), ! end_alignment_in_bytes); fill_size = pointer_delta(newtop, currtop); } HeapWord* fill = archive_mem_allocate(fill_size); ! CollectedHeap::fill_with_objects(fill, fill_size); } } // Loop through the allocated regions, and create MemRegions summarizing // the allocated address range, combining contiguous ranges. Add the --- 449,469 ---- if (end_alignment_in_bytes != 0) { HeapWord* currtop = _allocation_region->top(); HeapWord* newtop = align_up(currtop, end_alignment_in_bytes); size_t fill_size = pointer_delta(newtop, currtop); if (fill_size != 0) { ! if (fill_size < Fill::min_size()) { // If the required fill is smaller than we can represent, // bump up to the next aligned address. We know we won't exceed the current // region boundary because the max supported alignment is smaller than the min // region size, and because the allocation code never leaves space smaller than ! // the Fill::min_size() at the top of the current allocation region. ! newtop = align_up(currtop + Fill::min_size(), end_alignment_in_bytes); fill_size = pointer_delta(newtop, currtop); } HeapWord* fill = archive_mem_allocate(fill_size); ! Fill::range(fill, fill_size); } } // Loop through the allocated regions, and create MemRegions summarizing // the allocated address range, combining contiguous ranges. Add the
< prev index next >