< prev index next >

src/hotspot/share/gc/g1/g1CollectedHeap.hpp

Print this page
rev 49831 : imported patch 8201492-properly-implement-non-contiguous-reference-processing
rev 49836 : [mq]: 8202017-reference-processor-remove-enqueue


 492   void verify_before_full_collection(bool explicit_gc);
 493   void prepare_heap_for_full_collection();
 494   void prepare_heap_for_mutators();
 495   void abort_refinement();
 496   void verify_after_full_collection();
 497   void print_heap_after_full_collection(G1HeapTransition* heap_transition);
 498 
 499   // Helper method for satisfy_failed_allocation()
 500   HeapWord* satisfy_failed_allocation_helper(size_t word_size,
 501                                              bool do_gc,
 502                                              bool clear_all_soft_refs,
 503                                              bool expect_null_mutator_alloc_region,
 504                                              bool* gc_succeeded);
 505 
 506   // Attempting to expand the heap sufficiently
 507   // to support an allocation of the given "word_size".  If
 508   // successful, perform the allocation and return the address of the
 509   // allocated block, or else "NULL".
 510   HeapWord* expand_and_allocate(size_t word_size);
 511 
 512   // Process any reference objects discovered during
 513   // an incremental evacuation pause.
 514   void process_discovered_references(G1ParScanThreadStateSet* per_thread_states);
 515 
 516   // Enqueue any remaining discovered references
 517   // after processing.
 518   void enqueue_discovered_references(G1ParScanThreadStateSet* per_thread_states);
 519 
 520   // Merges the information gathered on a per-thread basis for all worker threads
 521   // during GC into global variables.
 522   void merge_per_thread_state_info(G1ParScanThreadStateSet* per_thread_states);
 523 public:
 524   G1YoungRemSetSamplingThread* sampling_thread() const { return _young_gen_sampling_thread; }
 525 
 526   WorkGang* workers() const { return _workers; }
 527 
 528   G1Allocator* allocator() {
 529     return _allocator;
 530   }
 531 
 532   G1HeapVerifier* verifier() {
 533     return _verifier;
 534   }
 535 
 536   G1MonitoringSupport* g1mm() {
 537     assert(_g1mm != NULL, "should have been initialized");
 538     return _g1mm;




 492   void verify_before_full_collection(bool explicit_gc);
 493   void prepare_heap_for_full_collection();
 494   void prepare_heap_for_mutators();
 495   void abort_refinement();
 496   void verify_after_full_collection();
 497   void print_heap_after_full_collection(G1HeapTransition* heap_transition);
 498 
 499   // Helper method for satisfy_failed_allocation()
 500   HeapWord* satisfy_failed_allocation_helper(size_t word_size,
 501                                              bool do_gc,
 502                                              bool clear_all_soft_refs,
 503                                              bool expect_null_mutator_alloc_region,
 504                                              bool* gc_succeeded);
 505 
 506   // Attempting to expand the heap sufficiently
 507   // to support an allocation of the given "word_size".  If
 508   // successful, perform the allocation and return the address of the
 509   // allocated block, or else "NULL".
 510   HeapWord* expand_and_allocate(size_t word_size);
 511 
 512   // Process any reference objects discovered.

 513   void process_discovered_references(G1ParScanThreadStateSet* per_thread_states);
 514 
 515   // If during an initial mark pause we install a pending list head which is not otherwise reachable
 516   // ensure that it is marked in the bitmap for concurrent marking to discover.
 517   void make_pending_list_reachable();
 518 
 519   // Merges the information gathered on a per-thread basis for all worker threads
 520   // during GC into global variables.
 521   void merge_per_thread_state_info(G1ParScanThreadStateSet* per_thread_states);
 522 public:
 523   G1YoungRemSetSamplingThread* sampling_thread() const { return _young_gen_sampling_thread; }
 524 
 525   WorkGang* workers() const { return _workers; }
 526 
 527   G1Allocator* allocator() {
 528     return _allocator;
 529   }
 530 
 531   G1HeapVerifier* verifier() {
 532     return _verifier;
 533   }
 534 
 535   G1MonitoringSupport* g1mm() {
 536     assert(_g1mm != NULL, "should have been initialized");
 537     return _g1mm;


< prev index next >