< prev index next >

src/hotspot/share/gc/g1/g1CollectedHeap.hpp

Print this page
rev 48020 : [mq]: 8191821-rev-sang-poonam


 509   // Resize the heap if necessary after a full collection.
 510   void resize_if_necessary_after_full_collection();
 511 
 512   // Callback from VM_G1CollectForAllocation operation.
 513   // This function does everything necessary/possible to satisfy a
 514   // failed allocation request (including collection, expansion, etc.)
 515   HeapWord* satisfy_failed_allocation(size_t word_size,
 516                                       AllocationContext_t context,
 517                                       bool* succeeded);
 518 private:
 519   // Internal helpers used during full GC to split it up to
 520   // increase readability.
 521   void abort_concurrent_cycle();
 522   void verify_before_full_collection(bool explicit_gc);
 523   void prepare_heap_for_full_collection();
 524   void prepare_heap_for_mutators();
 525   void abort_refinement();
 526   void verify_after_full_collection();
 527   void print_heap_after_full_collection(G1HeapTransition* heap_transition);
 528 



 529   // Helper method for satisfy_failed_allocation()
 530   HeapWord* satisfy_failed_allocation_helper(size_t word_size,
 531                                              AllocationContext_t context,
 532                                              bool do_gc,
 533                                              bool clear_all_soft_refs,
 534                                              bool expect_null_mutator_alloc_region,
 535                                              bool* gc_succeeded);
 536 
 537 protected:
 538   // Attempting to expand the heap sufficiently
 539   // to support an allocation of the given "word_size".  If
 540   // successful, perform the allocation and return the address of the
 541   // allocated block, or else "NULL".
 542   HeapWord* expand_and_allocate(size_t word_size, AllocationContext_t context);
 543 
 544   // Preserve any referents discovered by concurrent marking that have not yet been
 545   // copied by the STW pause.
 546   void preserve_cm_referents(G1ParScanThreadStateSet* per_thread_states);
 547   // Process any reference objects discovered during
 548   // an incremental evacuation pause.




 509   // Resize the heap if necessary after a full collection.
 510   void resize_if_necessary_after_full_collection();
 511 
 512   // Callback from VM_G1CollectForAllocation operation.
 513   // This function does everything necessary/possible to satisfy a
 514   // failed allocation request (including collection, expansion, etc.)
 515   HeapWord* satisfy_failed_allocation(size_t word_size,
 516                                       AllocationContext_t context,
 517                                       bool* succeeded);
 518 private:
 519   // Internal helpers used during full GC to split it up to
 520   // increase readability.
 521   void abort_concurrent_cycle();
 522   void verify_before_full_collection(bool explicit_gc);
 523   void prepare_heap_for_full_collection();
 524   void prepare_heap_for_mutators();
 525   void abort_refinement();
 526   void verify_after_full_collection();
 527   void print_heap_after_full_collection(G1HeapTransition* heap_transition);
 528 
 529   // Verification type helper.
 530   G1HeapVerifier::G1VerifyType young_verification_type();
 531 
 532   // Helper method for satisfy_failed_allocation()
 533   HeapWord* satisfy_failed_allocation_helper(size_t word_size,
 534                                              AllocationContext_t context,
 535                                              bool do_gc,
 536                                              bool clear_all_soft_refs,
 537                                              bool expect_null_mutator_alloc_region,
 538                                              bool* gc_succeeded);
 539 
 540 protected:
 541   // Attempting to expand the heap sufficiently
 542   // to support an allocation of the given "word_size".  If
 543   // successful, perform the allocation and return the address of the
 544   // allocated block, or else "NULL".
 545   HeapWord* expand_and_allocate(size_t word_size, AllocationContext_t context);
 546 
 547   // Preserve any referents discovered by concurrent marking that have not yet been
 548   // copied by the STW pause.
 549   void preserve_cm_referents(G1ParScanThreadStateSet* per_thread_states);
 550   // Process any reference objects discovered during
 551   // an incremental evacuation pause.


< prev index next >