src/share/vm/gc_implementation/parallelScavenge/psParallelCompact.cpp

Print this page




 810 
 811 void PSParallelCompact::KeepAliveClosure::do_oop(oop* p)       { PSParallelCompact::KeepAliveClosure::do_oop_work(p); }
 812 void PSParallelCompact::KeepAliveClosure::do_oop(narrowOop* p) { PSParallelCompact::KeepAliveClosure::do_oop_work(p); }
 813 
 814 PSParallelCompact::AdjustPointerClosure PSParallelCompact::_adjust_root_pointer_closure(true);
 815 PSParallelCompact::AdjustPointerClosure PSParallelCompact::_adjust_pointer_closure(false);
 816 
 817 void PSParallelCompact::AdjustPointerClosure::do_oop(oop* p)       { adjust_pointer(p, _is_root); }
 818 void PSParallelCompact::AdjustPointerClosure::do_oop(narrowOop* p) { adjust_pointer(p, _is_root); }
 819 
 820 void PSParallelCompact::FollowStackClosure::do_void() { _compaction_manager->follow_marking_stacks(); }
 821 
 822 void PSParallelCompact::MarkAndPushClosure::do_oop(oop* p)       { mark_and_push(_compaction_manager, p); }
 823 void PSParallelCompact::MarkAndPushClosure::do_oop(narrowOop* p) { mark_and_push(_compaction_manager, p); }
 824 
 825 void PSParallelCompact::post_initialize() {
 826   ParallelScavengeHeap* heap = gc_heap();
 827   assert(heap->kind() == CollectedHeap::ParallelScavengeHeap, "Sanity");
 828 
 829   MemRegion mr = heap->reserved_region();
 830   _ref_processor = ReferenceProcessor::create_ref_processor(
 831     mr,                         // span




 832     true,                       // atomic_discovery
 833     true,                       // mt_discovery
 834     &_is_alive_closure,
 835     ParallelGCThreads,
 836     ParallelRefProcEnabled);
 837   _counters = new CollectorCounters("PSParallelCompact", 1);
 838 
 839   // Initialize static fields in ParCompactionManager.
 840   ParCompactionManager::initialize(mark_bitmap());
 841 }
 842 
 843 bool PSParallelCompact::initialize() {
 844   ParallelScavengeHeap* heap = gc_heap();
 845   assert(heap->kind() == CollectedHeap::ParallelScavengeHeap, "Sanity");
 846   MemRegion mr = heap->reserved_region();
 847 
 848   // Was the old gen get allocated successfully?
 849   if (!heap->old_gen()->is_allocated()) {
 850     return false;
 851   }
 852 
 853   initialize_space_info();
 854   initialize_dead_wood_limiter();
 855 
 856   if (!_mark_bitmap.initialize(mr)) {




 810 
 811 void PSParallelCompact::KeepAliveClosure::do_oop(oop* p)       { PSParallelCompact::KeepAliveClosure::do_oop_work(p); }
 812 void PSParallelCompact::KeepAliveClosure::do_oop(narrowOop* p) { PSParallelCompact::KeepAliveClosure::do_oop_work(p); }
 813 
 814 PSParallelCompact::AdjustPointerClosure PSParallelCompact::_adjust_root_pointer_closure(true);
 815 PSParallelCompact::AdjustPointerClosure PSParallelCompact::_adjust_pointer_closure(false);
 816 
 817 void PSParallelCompact::AdjustPointerClosure::do_oop(oop* p)       { adjust_pointer(p, _is_root); }
 818 void PSParallelCompact::AdjustPointerClosure::do_oop(narrowOop* p) { adjust_pointer(p, _is_root); }
 819 
 820 void PSParallelCompact::FollowStackClosure::do_void() { _compaction_manager->follow_marking_stacks(); }
 821 
 822 void PSParallelCompact::MarkAndPushClosure::do_oop(oop* p)       { mark_and_push(_compaction_manager, p); }
 823 void PSParallelCompact::MarkAndPushClosure::do_oop(narrowOop* p) { mark_and_push(_compaction_manager, p); }
 824 
 825 void PSParallelCompact::post_initialize() {
 826   ParallelScavengeHeap* heap = gc_heap();
 827   assert(heap->kind() == CollectedHeap::ParallelScavengeHeap, "Sanity");
 828 
 829   MemRegion mr = heap->reserved_region();
 830   _ref_processor =
 831     new ReferenceProcessor(mr,            // span
 832                            ParallelRefProcEnabled && (ParallelGCThreads > 1), // mt processing
 833                            ParallelGCThreads, // mt processing degree
 834                            true,          // mt discovery
 835                            ParallelGCThreads, // mt discovery degree
 836                            true,          // atomic_discovery
 837                            &_is_alive_closure, // non-header is alive closure
 838                            false);        // write barrier for next field updates


 839   _counters = new CollectorCounters("PSParallelCompact", 1);
 840 
 841   // Initialize static fields in ParCompactionManager.
 842   ParCompactionManager::initialize(mark_bitmap());
 843 }
 844 
 845 bool PSParallelCompact::initialize() {
 846   ParallelScavengeHeap* heap = gc_heap();
 847   assert(heap->kind() == CollectedHeap::ParallelScavengeHeap, "Sanity");
 848   MemRegion mr = heap->reserved_region();
 849 
 850   // Was the old gen get allocated successfully?
 851   if (!heap->old_gen()->is_allocated()) {
 852     return false;
 853   }
 854 
 855   initialize_space_info();
 856   initialize_dead_wood_limiter();
 857 
 858   if (!_mark_bitmap.initialize(mr)) {