803 { 804 verify_clear(_region_vspace); 805 verify_clear(_block_vspace); 806 } 807 #endif // #ifdef ASSERT 808 809 STWGCTimer PSParallelCompact::_gc_timer; 810 ParallelOldTracer PSParallelCompact::_gc_tracer; 811 elapsedTimer PSParallelCompact::_accumulated_time; 812 unsigned int PSParallelCompact::_total_invocations = 0; 813 unsigned int PSParallelCompact::_maximum_compaction_gc_num = 0; 814 jlong PSParallelCompact::_time_of_last_gc = 0; 815 CollectorCounters* PSParallelCompact::_counters = NULL; 816 ParMarkBitMap PSParallelCompact::_mark_bitmap; 817 ParallelCompactData PSParallelCompact::_summary_data; 818 819 PSParallelCompact::IsAliveClosure PSParallelCompact::_is_alive_closure; 820 821 bool PSParallelCompact::IsAliveClosure::do_object_b(oop p) { return mark_bitmap()->is_marked(p); } 822 823 void PSParallelCompact::KeepAliveClosure::do_oop(oop* p) { PSParallelCompact::KeepAliveClosure::do_oop_work(p); } 824 void PSParallelCompact::KeepAliveClosure::do_oop(narrowOop* p) { PSParallelCompact::KeepAliveClosure::do_oop_work(p); } 825 826 PSParallelCompact::AdjustPointerClosure PSParallelCompact::_adjust_pointer_closure; 827 PSParallelCompact::AdjustKlassClosure PSParallelCompact::_adjust_klass_closure; 828 829 void PSParallelCompact::FollowStackClosure::do_void() { _compaction_manager->follow_marking_stacks(); } 830 831 void PSParallelCompact::FollowKlassClosure::do_klass(Klass* klass) { 832 klass->oops_do(_mark_and_push_closure); 833 } 834 void PSParallelCompact::AdjustKlassClosure::do_klass(Klass* klass) { 835 klass->oops_do(&PSParallelCompact::_adjust_pointer_closure); 836 } 837 838 void PSParallelCompact::post_initialize() { 839 ParallelScavengeHeap* heap = ParallelScavengeHeap::heap(); 840 MemRegion mr = heap->reserved_region(); 841 _ref_processor = 842 new ReferenceProcessor(mr, // span 843 ParallelRefProcEnabled && (ParallelGCThreads > 1), // mt processing 844 (int) ParallelGCThreads, // mt processing degree 845 true, // mt discovery 846 (int) ParallelGCThreads, // mt discovery degree 847 true, // atomic_discovery 848 &_is_alive_closure); // non-header is alive closure 849 _counters = new CollectorCounters("PSParallelCompact", 1); 850 851 // Initialize static fields in ParCompactionManager. 852 ParCompactionManager::initialize(mark_bitmap()); 853 } 2333 } 2334 2335 GCTaskManager* const PSParallelCompact::gc_task_manager() { 2336 assert(ParallelScavengeHeap::gc_task_manager() != NULL, 2337 "shouldn't return NULL"); 2338 return ParallelScavengeHeap::gc_task_manager(); 2339 } 2340 2341 void PSParallelCompact::marking_phase(ParCompactionManager* cm, 2342 bool maximum_heap_compaction, 2343 ParallelOldTracer *gc_tracer) { 2344 // Recursively traverse all live objects and mark them 2345 GCTraceTime tm("marking phase", print_phases(), true, &_gc_timer, _gc_tracer.gc_id()); 2346 2347 ParallelScavengeHeap* heap = ParallelScavengeHeap::heap(); 2348 uint parallel_gc_threads = heap->gc_task_manager()->workers(); 2349 uint active_gc_threads = heap->gc_task_manager()->active_workers(); 2350 TaskQueueSetSuper* qset = ParCompactionManager::region_array(); 2351 ParallelTaskTerminator terminator(active_gc_threads, qset); 2352 2353 PSParallelCompact::MarkAndPushClosure mark_and_push_closure(cm); 2354 PSParallelCompact::FollowStackClosure follow_stack_closure(cm); 2355 2356 // Need new claim bits before marking starts. 2357 ClassLoaderDataGraph::clear_claimed_marks(); 2358 2359 { 2360 GCTraceTime tm_m("par mark", print_phases(), true, &_gc_timer, _gc_tracer.gc_id()); 2361 2362 ParallelScavengeHeap::ParStrongRootsScope psrs; 2363 2364 GCTaskQueue* q = GCTaskQueue::create(); 2365 2366 q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::universe)); 2367 q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::jni_handles)); 2368 // We scan the thread roots in parallel 2369 Threads::create_thread_roots_marking_tasks(q); 2370 q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::object_synchronizer)); 2371 q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::flat_profiler)); 2372 q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::management)); 2373 q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::system_dictionary)); 2374 q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::class_loader_data)); 2410 2411 // Follow system dictionary roots and unload classes. 2412 bool purged_class = SystemDictionary::do_unloading(is_alive_closure()); 2413 2414 // Unload nmethods. 2415 CodeCache::do_unloading(is_alive_closure(), purged_class); 2416 2417 // Prune dead klasses from subklass/sibling/implementor lists. 2418 Klass::clean_weak_klass_links(is_alive_closure()); 2419 2420 // Delete entries for dead interned strings. 2421 StringTable::unlink(is_alive_closure()); 2422 2423 // Clean up unreferenced symbols in symbol table. 2424 SymbolTable::unlink(); 2425 _gc_tracer.report_object_count_after_gc(is_alive_closure()); 2426 } 2427 2428 void PSParallelCompact::follow_class_loader(ParCompactionManager* cm, 2429 ClassLoaderData* cld) { 2430 PSParallelCompact::MarkAndPushClosure mark_and_push_closure(cm); 2431 PSParallelCompact::FollowKlassClosure follow_klass_closure(&mark_and_push_closure); 2432 2433 cld->oops_do(&mark_and_push_closure, &follow_klass_closure, true); 2434 } 2435 2436 // This should be moved to the shared markSweep code! 2437 class PSAlwaysTrueClosure: public BoolObjectClosure { 2438 public: 2439 bool do_object_b(oop p) { return true; } 2440 }; 2441 static PSAlwaysTrueClosure always_true; 2442 2443 void PSParallelCompact::adjust_roots() { 2444 // Adjust the pointers to reflect the new locations 2445 GCTraceTime tm("adjust roots", print_phases(), true, &_gc_timer, _gc_tracer.gc_id()); 2446 2447 // Need new claim bits when tracing through and adjusting pointers. 2448 ClassLoaderDataGraph::clear_claimed_marks(); 2449 2450 // General strong roots. 2451 Universe::oops_do(adjust_pointer_closure()); | 803 { 804 verify_clear(_region_vspace); 805 verify_clear(_block_vspace); 806 } 807 #endif // #ifdef ASSERT 808 809 STWGCTimer PSParallelCompact::_gc_timer; 810 ParallelOldTracer PSParallelCompact::_gc_tracer; 811 elapsedTimer PSParallelCompact::_accumulated_time; 812 unsigned int PSParallelCompact::_total_invocations = 0; 813 unsigned int PSParallelCompact::_maximum_compaction_gc_num = 0; 814 jlong PSParallelCompact::_time_of_last_gc = 0; 815 CollectorCounters* PSParallelCompact::_counters = NULL; 816 ParMarkBitMap PSParallelCompact::_mark_bitmap; 817 ParallelCompactData PSParallelCompact::_summary_data; 818 819 PSParallelCompact::IsAliveClosure PSParallelCompact::_is_alive_closure; 820 821 bool PSParallelCompact::IsAliveClosure::do_object_b(oop p) { return mark_bitmap()->is_marked(p); } 822 823 PSParallelCompact::AdjustPointerClosure PSParallelCompact::_adjust_pointer_closure; 824 PSParallelCompact::AdjustKlassClosure PSParallelCompact::_adjust_klass_closure; 825 826 void PSParallelCompact::AdjustKlassClosure::do_klass(Klass* klass) { 827 klass->oops_do(&PSParallelCompact::_adjust_pointer_closure); 828 } 829 830 void PSParallelCompact::post_initialize() { 831 ParallelScavengeHeap* heap = ParallelScavengeHeap::heap(); 832 MemRegion mr = heap->reserved_region(); 833 _ref_processor = 834 new ReferenceProcessor(mr, // span 835 ParallelRefProcEnabled && (ParallelGCThreads > 1), // mt processing 836 (int) ParallelGCThreads, // mt processing degree 837 true, // mt discovery 838 (int) ParallelGCThreads, // mt discovery degree 839 true, // atomic_discovery 840 &_is_alive_closure); // non-header is alive closure 841 _counters = new CollectorCounters("PSParallelCompact", 1); 842 843 // Initialize static fields in ParCompactionManager. 844 ParCompactionManager::initialize(mark_bitmap()); 845 } 2325 } 2326 2327 GCTaskManager* const PSParallelCompact::gc_task_manager() { 2328 assert(ParallelScavengeHeap::gc_task_manager() != NULL, 2329 "shouldn't return NULL"); 2330 return ParallelScavengeHeap::gc_task_manager(); 2331 } 2332 2333 void PSParallelCompact::marking_phase(ParCompactionManager* cm, 2334 bool maximum_heap_compaction, 2335 ParallelOldTracer *gc_tracer) { 2336 // Recursively traverse all live objects and mark them 2337 GCTraceTime tm("marking phase", print_phases(), true, &_gc_timer, _gc_tracer.gc_id()); 2338 2339 ParallelScavengeHeap* heap = ParallelScavengeHeap::heap(); 2340 uint parallel_gc_threads = heap->gc_task_manager()->workers(); 2341 uint active_gc_threads = heap->gc_task_manager()->active_workers(); 2342 TaskQueueSetSuper* qset = ParCompactionManager::region_array(); 2343 ParallelTaskTerminator terminator(active_gc_threads, qset); 2344 2345 ParCompactionManager::MarkAndPushClosure mark_and_push_closure(cm); 2346 ParCompactionManager::FollowStackClosure follow_stack_closure(cm); 2347 2348 // Need new claim bits before marking starts. 2349 ClassLoaderDataGraph::clear_claimed_marks(); 2350 2351 { 2352 GCTraceTime tm_m("par mark", print_phases(), true, &_gc_timer, _gc_tracer.gc_id()); 2353 2354 ParallelScavengeHeap::ParStrongRootsScope psrs; 2355 2356 GCTaskQueue* q = GCTaskQueue::create(); 2357 2358 q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::universe)); 2359 q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::jni_handles)); 2360 // We scan the thread roots in parallel 2361 Threads::create_thread_roots_marking_tasks(q); 2362 q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::object_synchronizer)); 2363 q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::flat_profiler)); 2364 q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::management)); 2365 q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::system_dictionary)); 2366 q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::class_loader_data)); 2402 2403 // Follow system dictionary roots and unload classes. 2404 bool purged_class = SystemDictionary::do_unloading(is_alive_closure()); 2405 2406 // Unload nmethods. 2407 CodeCache::do_unloading(is_alive_closure(), purged_class); 2408 2409 // Prune dead klasses from subklass/sibling/implementor lists. 2410 Klass::clean_weak_klass_links(is_alive_closure()); 2411 2412 // Delete entries for dead interned strings. 2413 StringTable::unlink(is_alive_closure()); 2414 2415 // Clean up unreferenced symbols in symbol table. 2416 SymbolTable::unlink(); 2417 _gc_tracer.report_object_count_after_gc(is_alive_closure()); 2418 } 2419 2420 void PSParallelCompact::follow_class_loader(ParCompactionManager* cm, 2421 ClassLoaderData* cld) { 2422 ParCompactionManager::MarkAndPushClosure mark_and_push_closure(cm); 2423 ParCompactionManager::FollowKlassClosure follow_klass_closure(&mark_and_push_closure); 2424 2425 cld->oops_do(&mark_and_push_closure, &follow_klass_closure, true); 2426 } 2427 2428 // This should be moved to the shared markSweep code! 2429 class PSAlwaysTrueClosure: public BoolObjectClosure { 2430 public: 2431 bool do_object_b(oop p) { return true; } 2432 }; 2433 static PSAlwaysTrueClosure always_true; 2434 2435 void PSParallelCompact::adjust_roots() { 2436 // Adjust the pointers to reflect the new locations 2437 GCTraceTime tm("adjust roots", print_phases(), true, &_gc_timer, _gc_tracer.gc_id()); 2438 2439 // Need new claim bits when tracing through and adjusting pointers. 2440 ClassLoaderDataGraph::clear_claimed_marks(); 2441 2442 // General strong roots. 2443 Universe::oops_do(adjust_pointer_closure()); |