src/share/vm/gc_implementation/parallelScavenge/psParallelCompact.cpp

Print this page
rev 4300 : 8010289: PSParallelCompact::marking_phase should use instance GCTracer


2344   ObjectStartArray* const start_array = old_gen->start_array();
2345   for (HeapWord* p = unused_start; p < new_top; p += oop(p)->size()) {
2346     start_array->allocate_block(p);
2347   }
2348 
2349   // Could update the promoted average here, but it is not typically updated at
2350   // full GCs and the value to use is unclear.  Something like
2351   //
2352   // cur_promoted_avg + absorb_size / number_of_scavenges_since_last_full_gc.
2353 
2354   size_policy->set_bytes_absorbed_from_eden(absorb_size);
2355   return true;
2356 }
2357 
2358 GCTaskManager* const PSParallelCompact::gc_task_manager() {
2359   assert(ParallelScavengeHeap::gc_task_manager() != NULL,
2360     "shouldn't return NULL");
2361   return ParallelScavengeHeap::gc_task_manager();
2362 }
2363 
2364 void PSParallelCompact::marking_phase(ParCompactionManager* cm,
2365                                       bool maximum_heap_compaction,
2366                                       ParallelOldTracer *gc_tracer) {
2367   // Recursively traverse all live objects and mark them
2368   GCTraceTime tm("marking phase", print_phases(), true, &_gc_timer);
2369 
2370   ParallelScavengeHeap* heap = gc_heap();
2371   uint parallel_gc_threads = heap->gc_task_manager()->workers();
2372   uint active_gc_threads = heap->gc_task_manager()->active_workers();
2373   TaskQueueSetSuper* qset = ParCompactionManager::region_array();
2374   ParallelTaskTerminator terminator(active_gc_threads, qset);
2375 
2376   PSParallelCompact::MarkAndPushClosure mark_and_push_closure(cm);
2377   PSParallelCompact::FollowStackClosure follow_stack_closure(cm);
2378 
2379   {
2380     GCTraceTime tm_m("par mark", print_phases(), true, &_gc_timer);
2381 
2382     ParallelScavengeHeap::ParStrongRootsScope psrs;
2383 
2384     GCTaskQueue* q = GCTaskQueue::create();
2385 
2386     q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::universe));


2402 
2403     gc_task_manager()->execute_and_wait(q);
2404   }
2405 
2406   // Process reference objects found during marking
2407   {
2408     GCTraceTime tm_r("reference processing", print_phases(), true, &_gc_timer);
2409 
2410     ReferenceProcessorStats stats;
2411     if (ref_processor()->processing_is_mt()) {
2412       RefProcTaskExecutor task_executor;
2413       stats = ref_processor()->process_discovered_references(
2414         is_alive_closure(), &mark_and_push_closure, &follow_stack_closure,
2415         &task_executor, &_gc_timer);
2416     } else {
2417       stats = ref_processor()->process_discovered_references(
2418         is_alive_closure(), &mark_and_push_closure, &follow_stack_closure, NULL,
2419         &_gc_timer);
2420     }
2421 
2422     gc_tracer->report_gc_reference_stats(stats);
2423   }
2424 
2425   GCTraceTime tm_c("class unloading", print_phases(), true, &_gc_timer);
2426 
2427   // Follow system dictionary roots and unload classes.
2428   bool purged_class = SystemDictionary::do_unloading(is_alive_closure());
2429 
2430   // Follow code cache roots.
2431   CodeCache::do_unloading(is_alive_closure(), &mark_and_push_closure,
2432                           purged_class);
2433   cm->follow_marking_stacks(); // Flush marking stack.
2434 
2435   // Update subklass/sibling/implementor links of live klasses
2436   // revisit_klass_stack is used in follow_weak_klass_links().
2437   follow_weak_klass_links();
2438 
2439   // Revisit memoized MDO's and clear any unmarked weak refs
2440   follow_mdo_weak_refs();
2441 
2442   // Visit interned string tables and delete unmarked oops




2344   ObjectStartArray* const start_array = old_gen->start_array();
2345   for (HeapWord* p = unused_start; p < new_top; p += oop(p)->size()) {
2346     start_array->allocate_block(p);
2347   }
2348 
2349   // Could update the promoted average here, but it is not typically updated at
2350   // full GCs and the value to use is unclear.  Something like
2351   //
2352   // cur_promoted_avg + absorb_size / number_of_scavenges_since_last_full_gc.
2353 
2354   size_policy->set_bytes_absorbed_from_eden(absorb_size);
2355   return true;
2356 }
2357 
2358 GCTaskManager* const PSParallelCompact::gc_task_manager() {
2359   assert(ParallelScavengeHeap::gc_task_manager() != NULL,
2360     "shouldn't return NULL");
2361   return ParallelScavengeHeap::gc_task_manager();
2362 }
2363 
2364 void PSParallelCompact::marking_phase(ParCompactionManager* cm, bool maximum_heap_compaction) {


2365   // Recursively traverse all live objects and mark them
2366   GCTraceTime tm("marking phase", print_phases(), true, &_gc_timer);
2367 
2368   ParallelScavengeHeap* heap = gc_heap();
2369   uint parallel_gc_threads = heap->gc_task_manager()->workers();
2370   uint active_gc_threads = heap->gc_task_manager()->active_workers();
2371   TaskQueueSetSuper* qset = ParCompactionManager::region_array();
2372   ParallelTaskTerminator terminator(active_gc_threads, qset);
2373 
2374   PSParallelCompact::MarkAndPushClosure mark_and_push_closure(cm);
2375   PSParallelCompact::FollowStackClosure follow_stack_closure(cm);
2376 
2377   {
2378     GCTraceTime tm_m("par mark", print_phases(), true, &_gc_timer);
2379 
2380     ParallelScavengeHeap::ParStrongRootsScope psrs;
2381 
2382     GCTaskQueue* q = GCTaskQueue::create();
2383 
2384     q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::universe));


2400 
2401     gc_task_manager()->execute_and_wait(q);
2402   }
2403 
2404   // Process reference objects found during marking
2405   {
2406     GCTraceTime tm_r("reference processing", print_phases(), true, &_gc_timer);
2407 
2408     ReferenceProcessorStats stats;
2409     if (ref_processor()->processing_is_mt()) {
2410       RefProcTaskExecutor task_executor;
2411       stats = ref_processor()->process_discovered_references(
2412         is_alive_closure(), &mark_and_push_closure, &follow_stack_closure,
2413         &task_executor, &_gc_timer);
2414     } else {
2415       stats = ref_processor()->process_discovered_references(
2416         is_alive_closure(), &mark_and_push_closure, &follow_stack_closure, NULL,
2417         &_gc_timer);
2418     }
2419 
2420     _gc_tracer->report_gc_reference_stats(stats);
2421   }
2422 
2423   GCTraceTime tm_c("class unloading", print_phases(), true, &_gc_timer);
2424 
2425   // Follow system dictionary roots and unload classes.
2426   bool purged_class = SystemDictionary::do_unloading(is_alive_closure());
2427 
2428   // Follow code cache roots.
2429   CodeCache::do_unloading(is_alive_closure(), &mark_and_push_closure,
2430                           purged_class);
2431   cm->follow_marking_stacks(); // Flush marking stack.
2432 
2433   // Update subklass/sibling/implementor links of live klasses
2434   // revisit_klass_stack is used in follow_weak_klass_links().
2435   follow_weak_klass_links();
2436 
2437   // Revisit memoized MDO's and clear any unmarked weak refs
2438   follow_mdo_weak_refs();
2439 
2440   // Visit interned string tables and delete unmarked oops