--- old/src/share/vm/gc_implementation/shared/vmGCOperations.cpp 2015-02-05 16:34:49.885948157 +0100 +++ new/src/share/vm/gc_implementation/shared/vmGCOperations.cpp 2015-02-05 16:34:49.797951023 +0100 @@ -179,10 +179,10 @@ GenCollectedHeap* gch = GenCollectedHeap::heap(); GCCauseSetter gccs(gch, _gc_cause); - _res = gch->satisfy_failed_allocation(_size, _tlab); - assert(gch->is_in_reserved_or_null(_res), "result not in heap"); + _result = gch->satisfy_failed_allocation(_word_size, _tlab); + assert(gch->is_in_reserved_or_null(_result), "result not in heap"); - if (_res == NULL && GC_locker::is_active_and_needs_gc()) { + if (_result == NULL && GC_locker::is_active_and_needs_gc()) { set_gc_locked(); } } @@ -299,3 +299,9 @@ set_gc_locked(); } } + +VM_CollectForAllocation::VM_CollectForAllocation(size_t word_size, uint gc_count_before, GCCause::Cause cause) : + VM_GC_Operation(gc_count_before, cause), _result(NULL), _word_size(word_size) { + // G1's incremental collections are not always caused by an allocation, which is indicated by word_size = 0. + assert(_word_size != 0 || UseG1GC, "word_size = 0 should only happen with G1"); +}