src/share/vm/gc_implementation/parallelScavenge/psParallelCompact.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hs-gc9 Sdiff src/share/vm/gc_implementation/parallelScavenge

src/share/vm/gc_implementation/parallelScavenge/psParallelCompact.cpp

Print this page
rev 6446 : [mq]: ref-write-new-fix


 837 
 838 void PSParallelCompact::FollowKlassClosure::do_klass(Klass* klass) {
 839   klass->oops_do(_mark_and_push_closure);
 840 }
 841 void PSParallelCompact::AdjustKlassClosure::do_klass(Klass* klass) {
 842   klass->oops_do(&PSParallelCompact::_adjust_pointer_closure);
 843 }
 844 
 845 void PSParallelCompact::post_initialize() {
 846   ParallelScavengeHeap* heap = gc_heap();
 847   assert(heap->kind() == CollectedHeap::ParallelScavengeHeap, "Sanity");
 848 
 849   MemRegion mr = heap->reserved_region();
 850   _ref_processor =
 851     new ReferenceProcessor(mr,            // span
 852                            ParallelRefProcEnabled && (ParallelGCThreads > 1), // mt processing
 853                            (int) ParallelGCThreads, // mt processing degree
 854                            true,          // mt discovery
 855                            (int) ParallelGCThreads, // mt discovery degree
 856                            true,          // atomic_discovery
 857                            &_is_alive_closure, // non-header is alive closure
 858                            false);        // write barrier for next field updates
 859   _counters = new CollectorCounters("PSParallelCompact", 1);
 860 
 861   // Initialize static fields in ParCompactionManager.
 862   ParCompactionManager::initialize(mark_bitmap());
 863 }
 864 
 865 bool PSParallelCompact::initialize() {
 866   ParallelScavengeHeap* heap = gc_heap();
 867   assert(heap->kind() == CollectedHeap::ParallelScavengeHeap, "Sanity");
 868   MemRegion mr = heap->reserved_region();
 869 
 870   // Was the old gen get allocated successfully?
 871   if (!heap->old_gen()->is_allocated()) {
 872     return false;
 873   }
 874 
 875   initialize_space_info();
 876   initialize_dead_wood_limiter();
 877 
 878   if (!_mark_bitmap.initialize(mr)) {




 837 
 838 void PSParallelCompact::FollowKlassClosure::do_klass(Klass* klass) {
 839   klass->oops_do(_mark_and_push_closure);
 840 }
 841 void PSParallelCompact::AdjustKlassClosure::do_klass(Klass* klass) {
 842   klass->oops_do(&PSParallelCompact::_adjust_pointer_closure);
 843 }
 844 
 845 void PSParallelCompact::post_initialize() {
 846   ParallelScavengeHeap* heap = gc_heap();
 847   assert(heap->kind() == CollectedHeap::ParallelScavengeHeap, "Sanity");
 848 
 849   MemRegion mr = heap->reserved_region();
 850   _ref_processor =
 851     new ReferenceProcessor(mr,            // span
 852                            ParallelRefProcEnabled && (ParallelGCThreads > 1), // mt processing
 853                            (int) ParallelGCThreads, // mt processing degree
 854                            true,          // mt discovery
 855                            (int) ParallelGCThreads, // mt discovery degree
 856                            true,          // atomic_discovery
 857                            &_is_alive_closure); // non-header is alive closure

 858   _counters = new CollectorCounters("PSParallelCompact", 1);
 859 
 860   // Initialize static fields in ParCompactionManager.
 861   ParCompactionManager::initialize(mark_bitmap());
 862 }
 863 
 864 bool PSParallelCompact::initialize() {
 865   ParallelScavengeHeap* heap = gc_heap();
 866   assert(heap->kind() == CollectedHeap::ParallelScavengeHeap, "Sanity");
 867   MemRegion mr = heap->reserved_region();
 868 
 869   // Was the old gen get allocated successfully?
 870   if (!heap->old_gen()->is_allocated()) {
 871     return false;
 872   }
 873 
 874   initialize_space_info();
 875   initialize_dead_wood_limiter();
 876 
 877   if (!_mark_bitmap.initialize(mr)) {


src/share/vm/gc_implementation/parallelScavenge/psParallelCompact.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File