< prev index next >

src/hotspot/share/gc/parallel/psMarkSweep.cpp

Print this page




 170   bool young_gen_empty;
 171 
 172   {
 173     HandleMark hm;
 174 
 175     GCTraceCPUTime tcpu;
 176     GCTraceTime(Info, gc) t("Pause Full", NULL, gc_cause, true);
 177 
 178     heap->pre_full_gc_dump(_gc_timer);
 179 
 180     TraceCollectorStats tcs(counters());
 181     TraceMemoryManagerStats tms(heap->old_gc_manager(),gc_cause);
 182 
 183     if (log_is_enabled(Debug, gc, heap, exit)) {
 184       accumulated_time()->start();
 185     }
 186 
 187     // Let the size policy know we're starting
 188     size_policy->major_collection_begin();
 189 
 190     CodeCache::gc_prologue();
 191     BiasedLocking::preserve_marks();
 192 
 193     // Capture metadata size before collection for sizing.
 194     size_t metadata_prev_used = MetaspaceUtils::used_bytes();
 195 
 196     size_t old_gen_prev_used = old_gen->used_in_bytes();
 197     size_t young_gen_prev_used = young_gen->used_in_bytes();
 198 
 199     allocate_stacks();
 200 
 201 #if COMPILER2_OR_JVMCI
 202     DerivedPointerTable::clear();
 203 #endif
 204 
 205     ref_processor()->enable_discovery();
 206     ref_processor()->setup_policy(clear_all_softrefs);
 207 
 208     mark_sweep_phase1(clear_all_softrefs);
 209 
 210     mark_sweep_phase2();


 238     // input to soft ref clearing policy at the next gc.
 239     Universe::update_heap_info_at_gc();
 240 
 241     survivors_empty = young_gen->from_space()->is_empty() &&
 242                       young_gen->to_space()->is_empty();
 243     young_gen_empty = eden_empty && survivors_empty;
 244 
 245     PSCardTable* card_table = heap->card_table();
 246     MemRegion old_mr = heap->old_gen()->reserved();
 247     if (young_gen_empty) {
 248       card_table->clear(MemRegion(old_mr.start(), old_mr.end()));
 249     } else {
 250       card_table->invalidate(MemRegion(old_mr.start(), old_mr.end()));
 251     }
 252 
 253     // Delete metaspaces for unloaded class loaders and clean up loader_data graph
 254     ClassLoaderDataGraph::purge();
 255     MetaspaceUtils::verify_metrics();
 256 
 257     BiasedLocking::restore_marks();
 258     CodeCache::gc_epilogue();
 259     JvmtiExport::gc_epilogue();
 260 
 261 #if COMPILER2_OR_JVMCI
 262     DerivedPointerTable::update_pointers();
 263 #endif
 264 
 265     assert(!ref_processor()->discovery_enabled(), "Should have been disabled earlier");
 266 
 267     // Update time of last GC
 268     reset_millis_since_last_gc();
 269 
 270     // Let the size policy know we're done
 271     size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
 272 
 273     if (UseAdaptiveSizePolicy) {
 274 
 275      log_debug(gc, ergo)("AdaptiveSizeStart: collection: %d ", heap->total_collections());
 276      log_trace(gc, ergo)("old_gen_capacity: " SIZE_FORMAT " young_gen_capacity: " SIZE_FORMAT,
 277                          old_gen->capacity_in_bytes(), young_gen->capacity_in_bytes());
 278 


 508   GCTraceTime(Info, gc, phases) tm("Phase 1: Mark live objects", _gc_timer);
 509 
 510   ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();
 511 
 512   // Need to clear claim bits before the tracing starts.
 513   ClassLoaderDataGraph::clear_claimed_marks();
 514 
 515   // General strong roots.
 516   {
 517     ParallelScavengeHeap::ParStrongRootsScope psrs;
 518     Universe::oops_do(mark_and_push_closure());
 519     JNIHandles::oops_do(mark_and_push_closure());   // Global (strong) JNI handles
 520     MarkingCodeBlobClosure each_active_code_blob(mark_and_push_closure(), !CodeBlobToOopClosure::FixRelocations);
 521     Threads::oops_do(mark_and_push_closure(), &each_active_code_blob);
 522     ObjectSynchronizer::oops_do(mark_and_push_closure());
 523     Management::oops_do(mark_and_push_closure());
 524     JvmtiExport::oops_do(mark_and_push_closure());
 525     SystemDictionary::oops_do(mark_and_push_closure());
 526     ClassLoaderDataGraph::always_strong_cld_do(follow_cld_closure());
 527     // Do not treat nmethods as strong roots for mark/sweep, since we can unload them.
 528     //CodeCache::scavenge_root_nmethods_do(CodeBlobToOopClosure(mark_and_push_closure()));
 529     AOTLoader::oops_do(mark_and_push_closure());
 530   }
 531 
 532   // Flush marking stack.
 533   follow_stack();
 534 
 535   // Process reference objects found during marking
 536   {
 537     GCTraceTime(Debug, gc, phases) t("Reference Processing", _gc_timer);
 538 
 539     ref_processor()->setup_policy(clear_all_softrefs);
 540     ReferenceProcessorPhaseTimes pt(_gc_timer, ref_processor()->max_num_queues());
 541     const ReferenceProcessorStats& stats =
 542       ref_processor()->process_discovered_references(
 543         is_alive_closure(), mark_and_push_closure(), follow_stack_closure(), NULL, &pt);
 544     gc_tracer()->report_gc_reference_stats(stats);
 545     pt.print_all_references();
 546   }
 547 
 548   // This is the point where the entire marking should have completed.




 170   bool young_gen_empty;
 171 
 172   {
 173     HandleMark hm;
 174 
 175     GCTraceCPUTime tcpu;
 176     GCTraceTime(Info, gc) t("Pause Full", NULL, gc_cause, true);
 177 
 178     heap->pre_full_gc_dump(_gc_timer);
 179 
 180     TraceCollectorStats tcs(counters());
 181     TraceMemoryManagerStats tms(heap->old_gc_manager(),gc_cause);
 182 
 183     if (log_is_enabled(Debug, gc, heap, exit)) {
 184       accumulated_time()->start();
 185     }
 186 
 187     // Let the size policy know we're starting
 188     size_policy->major_collection_begin();
 189 

 190     BiasedLocking::preserve_marks();
 191 
 192     // Capture metadata size before collection for sizing.
 193     size_t metadata_prev_used = MetaspaceUtils::used_bytes();
 194 
 195     size_t old_gen_prev_used = old_gen->used_in_bytes();
 196     size_t young_gen_prev_used = young_gen->used_in_bytes();
 197 
 198     allocate_stacks();
 199 
 200 #if COMPILER2_OR_JVMCI
 201     DerivedPointerTable::clear();
 202 #endif
 203 
 204     ref_processor()->enable_discovery();
 205     ref_processor()->setup_policy(clear_all_softrefs);
 206 
 207     mark_sweep_phase1(clear_all_softrefs);
 208 
 209     mark_sweep_phase2();


 237     // input to soft ref clearing policy at the next gc.
 238     Universe::update_heap_info_at_gc();
 239 
 240     survivors_empty = young_gen->from_space()->is_empty() &&
 241                       young_gen->to_space()->is_empty();
 242     young_gen_empty = eden_empty && survivors_empty;
 243 
 244     PSCardTable* card_table = heap->card_table();
 245     MemRegion old_mr = heap->old_gen()->reserved();
 246     if (young_gen_empty) {
 247       card_table->clear(MemRegion(old_mr.start(), old_mr.end()));
 248     } else {
 249       card_table->invalidate(MemRegion(old_mr.start(), old_mr.end()));
 250     }
 251 
 252     // Delete metaspaces for unloaded class loaders and clean up loader_data graph
 253     ClassLoaderDataGraph::purge();
 254     MetaspaceUtils::verify_metrics();
 255 
 256     BiasedLocking::restore_marks();
 257     heap->prune_nmethods();
 258     JvmtiExport::gc_epilogue();
 259 
 260 #if COMPILER2_OR_JVMCI
 261     DerivedPointerTable::update_pointers();
 262 #endif
 263 
 264     assert(!ref_processor()->discovery_enabled(), "Should have been disabled earlier");
 265 
 266     // Update time of last GC
 267     reset_millis_since_last_gc();
 268 
 269     // Let the size policy know we're done
 270     size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
 271 
 272     if (UseAdaptiveSizePolicy) {
 273 
 274      log_debug(gc, ergo)("AdaptiveSizeStart: collection: %d ", heap->total_collections());
 275      log_trace(gc, ergo)("old_gen_capacity: " SIZE_FORMAT " young_gen_capacity: " SIZE_FORMAT,
 276                          old_gen->capacity_in_bytes(), young_gen->capacity_in_bytes());
 277 


 507   GCTraceTime(Info, gc, phases) tm("Phase 1: Mark live objects", _gc_timer);
 508 
 509   ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();
 510 
 511   // Need to clear claim bits before the tracing starts.
 512   ClassLoaderDataGraph::clear_claimed_marks();
 513 
 514   // General strong roots.
 515   {
 516     ParallelScavengeHeap::ParStrongRootsScope psrs;
 517     Universe::oops_do(mark_and_push_closure());
 518     JNIHandles::oops_do(mark_and_push_closure());   // Global (strong) JNI handles
 519     MarkingCodeBlobClosure each_active_code_blob(mark_and_push_closure(), !CodeBlobToOopClosure::FixRelocations);
 520     Threads::oops_do(mark_and_push_closure(), &each_active_code_blob);
 521     ObjectSynchronizer::oops_do(mark_and_push_closure());
 522     Management::oops_do(mark_and_push_closure());
 523     JvmtiExport::oops_do(mark_and_push_closure());
 524     SystemDictionary::oops_do(mark_and_push_closure());
 525     ClassLoaderDataGraph::always_strong_cld_do(follow_cld_closure());
 526     // Do not treat nmethods as strong roots for mark/sweep, since we can unload them.
 527     //ScavengableNMethods::scavengable_nmethods_do(CodeBlobToOopClosure(mark_and_push_closure()));
 528     AOTLoader::oops_do(mark_and_push_closure());
 529   }
 530 
 531   // Flush marking stack.
 532   follow_stack();
 533 
 534   // Process reference objects found during marking
 535   {
 536     GCTraceTime(Debug, gc, phases) t("Reference Processing", _gc_timer);
 537 
 538     ref_processor()->setup_policy(clear_all_softrefs);
 539     ReferenceProcessorPhaseTimes pt(_gc_timer, ref_processor()->max_num_queues());
 540     const ReferenceProcessorStats& stats =
 541       ref_processor()->process_discovered_references(
 542         is_alive_closure(), mark_and_push_closure(), follow_stack_closure(), NULL, &pt);
 543     gc_tracer()->report_gc_reference_stats(stats);
 544     pt.print_all_references();
 545   }
 546 
 547   // This is the point where the entire marking should have completed.


< prev index next >