< prev index next >

src/share/vm/gc/shared/collectorPolicy.cpp

Print this page

        

*** 618,628 **** if (result != NULL) { assert(gch->is_in_reserved(result), "result not in heap"); return result; } ! if (GC_locker::is_active_and_needs_gc()) { if (is_tlab) { return NULL; // Caller will retry allocating individual object. } if (!gch->is_maximal_no_gc()) { // Try and expand heap to satisfy request. --- 618,628 ---- if (result != NULL) { assert(gch->is_in_reserved(result), "result not in heap"); return result; } ! if (GCLocker::is_active_and_needs_gc()) { if (is_tlab) { return NULL; // Caller will retry allocating individual object. } if (!gch->is_maximal_no_gc()) { // Try and expand heap to satisfy request.
*** 645,655 **** // rather than causing more, now probably unnecessary, GC attempts. JavaThread* jthr = JavaThread::current(); if (!jthr->in_critical()) { MutexUnlocker mul(Heap_lock); // Wait for JNI critical section to be exited ! GC_locker::stall_until_clear(); gclocker_stalled_count += 1; continue; } else { if (CheckJNICalls) { fatal("Possible deadlock due to allocating while" --- 645,655 ---- // rather than causing more, now probably unnecessary, GC attempts. JavaThread* jthr = JavaThread::current(); if (!jthr->in_critical()) { MutexUnlocker mul(Heap_lock); // Wait for JNI critical section to be exited ! GCLocker::stall_until_clear(); gclocker_stalled_count += 1; continue; } else { if (CheckJNICalls) { fatal("Possible deadlock due to allocating while"
*** 726,736 **** GenCollectedHeap *gch = GenCollectedHeap::heap(); GCCauseSetter x(gch, GCCause::_allocation_failure); HeapWord* result = NULL; assert(size != 0, "Precondition violated"); ! if (GC_locker::is_active_and_needs_gc()) { // GC locker is active; instead of a collection we will attempt // to expand the heap, if there's room for expansion. if (!gch->is_maximal_no_gc()) { result = expand_heap_and_allocate(size, is_tlab); } --- 726,736 ---- GenCollectedHeap *gch = GenCollectedHeap::heap(); GCCauseSetter x(gch, GCCause::_allocation_failure); HeapWord* result = NULL; assert(size != 0, "Precondition violated"); ! if (GCLocker::is_active_and_needs_gc()) { // GC locker is active; instead of a collection we will attempt // to expand the heap, if there's room for expansion. if (!gch->is_maximal_no_gc()) { result = expand_heap_and_allocate(size, is_tlab); }
*** 813,824 **** MetaWord* result = loader_data->metaspace_non_null()->allocate(word_size, mdtype); if (result != NULL) { return result; } ! if (GC_locker::is_active_and_needs_gc()) { ! // If the GC_locker is active, just expand and allocate. // If that does not succeed, wait if this thread is not // in a critical section itself. result = loader_data->metaspace_non_null()->expand_and_allocate(word_size, mdtype); --- 813,824 ---- MetaWord* result = loader_data->metaspace_non_null()->allocate(word_size, mdtype); if (result != NULL) { return result; } ! if (GCLocker::is_active_and_needs_gc()) { ! // If the GCLocker is active, just expand and allocate. // If that does not succeed, wait if this thread is not // in a critical section itself. result = loader_data->metaspace_non_null()->expand_and_allocate(word_size, mdtype);
*** 826,836 **** return result; } JavaThread* jthr = JavaThread::current(); if (!jthr->in_critical()) { // Wait for JNI critical section to be exited ! GC_locker::stall_until_clear(); // The GC invoked by the last thread leaving the critical // section will be a young collection and a full collection // is (currently) needed for unloading classes so continue // to the next iteration to get a full GC. continue; --- 826,836 ---- return result; } JavaThread* jthr = JavaThread::current(); if (!jthr->in_critical()) { // Wait for JNI critical section to be exited ! GCLocker::stall_until_clear(); // The GC invoked by the last thread leaving the critical // section will be a young collection and a full collection // is (currently) needed for unloading classes so continue // to the next iteration to get a full GC. continue;
*** 885,895 **** bool GenCollectorPolicy::should_try_older_generation_allocation( size_t word_size) const { GenCollectedHeap* gch = GenCollectedHeap::heap(); size_t young_capacity = gch->young_gen()->capacity_before_gc(); return (word_size > heap_word_size(young_capacity)) ! || GC_locker::is_active_and_needs_gc() || gch->incremental_collection_failed(); } // --- 885,895 ---- bool GenCollectorPolicy::should_try_older_generation_allocation( size_t word_size) const { GenCollectedHeap* gch = GenCollectedHeap::heap(); size_t young_capacity = gch->young_gen()->capacity_before_gc(); return (word_size > heap_word_size(young_capacity)) ! || GCLocker::is_active_and_needs_gc() || gch->incremental_collection_failed(); } //
< prev index next >