index

src/share/vm/gc_implementation/shared/vmGCOperations.cpp

Print this page
rev 7474 : imported patch cleanup

*** 177,190 **** void VM_GenCollectForAllocation::doit() { SvcGCMarker sgcm(SvcGCMarker::MINOR); GenCollectedHeap* gch = GenCollectedHeap::heap(); GCCauseSetter gccs(gch, _gc_cause); ! _res = gch->satisfy_failed_allocation(_size, _tlab); ! assert(gch->is_in_reserved_or_null(_res), "result not in heap"); ! if (_res == NULL && GC_locker::is_active_and_needs_gc()) { set_gc_locked(); } } void VM_GenCollectFull::doit() { --- 177,190 ---- void VM_GenCollectForAllocation::doit() { SvcGCMarker sgcm(SvcGCMarker::MINOR); GenCollectedHeap* gch = GenCollectedHeap::heap(); GCCauseSetter gccs(gch, _gc_cause); ! _result = gch->satisfy_failed_allocation(_word_size, _tlab); ! assert(gch->is_in_reserved_or_null(_result), "result not in heap"); ! if (_result == NULL && GC_locker::is_active_and_needs_gc()) { set_gc_locked(); } } void VM_GenCollectFull::doit() {
*** 297,301 **** --- 297,307 ---- if (GC_locker::is_active_and_needs_gc()) { set_gc_locked(); } } + + VM_CollectForAllocation::VM_CollectForAllocation(size_t word_size, uint gc_count_before, GCCause::Cause cause) : + VM_GC_Operation(gc_count_before, cause), _result(NULL), _word_size(word_size) { + // G1's incremental collections are not always caused by an allocation, which is indicated by word_size = 0. + assert(_word_size != 0 || UseG1GC, "word_size = 0 should only happen with G1"); + }
index