src/share/vm/gc_implementation/parallelScavenge/psMarkSweep.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File 8004128_2 Sdiff src/share/vm/gc_implementation/parallelScavenge

src/share/vm/gc_implementation/parallelScavenge/psMarkSweep.cpp

Print this page




 163   // Filled in below to track the state of the young gen after the collection.
 164   bool eden_empty;
 165   bool survivors_empty;
 166   bool young_gen_empty;
 167 
 168   {
 169     HandleMark hm;
 170 
 171     gclog_or_tty->date_stamp(PrintGC && PrintGCDateStamps);
 172     TraceCPUTime tcpu(PrintGCDetails, true, gclog_or_tty);
 173     GCTraceTime t1(GCCauseString("Full GC", gc_cause), PrintGC, !PrintGCDetails, NULL, _gc_tracer->gc_id());
 174     TraceCollectorStats tcs(counters());
 175     TraceMemoryManagerStats tms(true /* Full GC */,gc_cause);
 176 
 177     if (TraceOldGenTime) accumulated_time()->start();
 178 
 179     // Let the size policy know we're starting
 180     size_policy->major_collection_begin();
 181 
 182     CodeCache::gc_prologue();
 183     Threads::gc_prologue();
 184     BiasedLocking::preserve_marks();
 185 
 186     // Capture heap size before collection for printing.
 187     size_t prev_used = heap->used();
 188 
 189     // Capture metadata size before collection for sizing.
 190     size_t metadata_prev_used = MetaspaceAux::used_bytes();
 191 
 192     // For PrintGCDetails
 193     size_t old_gen_prev_used = old_gen->used_in_bytes();
 194     size_t young_gen_prev_used = young_gen->used_in_bytes();
 195 
 196     allocate_stacks();
 197 
 198     COMPILER2_PRESENT(DerivedPointerTable::clear());
 199 
 200     ref_processor()->enable_discovery(true /*verify_disabled*/, true /*verify_no_refs*/);
 201     ref_processor()->setup_policy(clear_all_softrefs);
 202 
 203     mark_sweep_phase1(clear_all_softrefs);


 234     survivors_empty = young_gen->from_space()->is_empty() &&
 235                       young_gen->to_space()->is_empty();
 236     young_gen_empty = eden_empty && survivors_empty;
 237 
 238     BarrierSet* bs = heap->barrier_set();
 239     if (bs->is_a(BarrierSet::ModRef)) {
 240       ModRefBarrierSet* modBS = (ModRefBarrierSet*)bs;
 241       MemRegion old_mr = heap->old_gen()->reserved();
 242       if (young_gen_empty) {
 243         modBS->clear(MemRegion(old_mr.start(), old_mr.end()));
 244       } else {
 245         modBS->invalidate(MemRegion(old_mr.start(), old_mr.end()));
 246       }
 247     }
 248 
 249     // Delete metaspaces for unloaded class loaders and clean up loader_data graph
 250     ClassLoaderDataGraph::purge();
 251     MetaspaceAux::verify_metrics();
 252 
 253     BiasedLocking::restore_marks();
 254     Threads::gc_epilogue();
 255     CodeCache::gc_epilogue();
 256     JvmtiExport::gc_epilogue();
 257 
 258     COMPILER2_PRESENT(DerivedPointerTable::update_pointers());
 259 
 260     ref_processor()->enqueue_discovered_references(NULL);
 261 
 262     // Update time of last GC
 263     reset_millis_since_last_gc();
 264 
 265     // Let the size policy know we're done
 266     size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
 267 
 268     if (UseAdaptiveSizePolicy) {
 269 
 270       if (PrintAdaptiveSizePolicy) {
 271         gclog_or_tty->print("AdaptiveSizeStart: ");
 272         gclog_or_tty->stamp();
 273         gclog_or_tty->print_cr(" collection: %d ",
 274                        heap->total_collections());




 163   // Filled in below to track the state of the young gen after the collection.
 164   bool eden_empty;
 165   bool survivors_empty;
 166   bool young_gen_empty;
 167 
 168   {
 169     HandleMark hm;
 170 
 171     gclog_or_tty->date_stamp(PrintGC && PrintGCDateStamps);
 172     TraceCPUTime tcpu(PrintGCDetails, true, gclog_or_tty);
 173     GCTraceTime t1(GCCauseString("Full GC", gc_cause), PrintGC, !PrintGCDetails, NULL, _gc_tracer->gc_id());
 174     TraceCollectorStats tcs(counters());
 175     TraceMemoryManagerStats tms(true /* Full GC */,gc_cause);
 176 
 177     if (TraceOldGenTime) accumulated_time()->start();
 178 
 179     // Let the size policy know we're starting
 180     size_policy->major_collection_begin();
 181 
 182     CodeCache::gc_prologue();

 183     BiasedLocking::preserve_marks();
 184 
 185     // Capture heap size before collection for printing.
 186     size_t prev_used = heap->used();
 187 
 188     // Capture metadata size before collection for sizing.
 189     size_t metadata_prev_used = MetaspaceAux::used_bytes();
 190 
 191     // For PrintGCDetails
 192     size_t old_gen_prev_used = old_gen->used_in_bytes();
 193     size_t young_gen_prev_used = young_gen->used_in_bytes();
 194 
 195     allocate_stacks();
 196 
 197     COMPILER2_PRESENT(DerivedPointerTable::clear());
 198 
 199     ref_processor()->enable_discovery(true /*verify_disabled*/, true /*verify_no_refs*/);
 200     ref_processor()->setup_policy(clear_all_softrefs);
 201 
 202     mark_sweep_phase1(clear_all_softrefs);


 233     survivors_empty = young_gen->from_space()->is_empty() &&
 234                       young_gen->to_space()->is_empty();
 235     young_gen_empty = eden_empty && survivors_empty;
 236 
 237     BarrierSet* bs = heap->barrier_set();
 238     if (bs->is_a(BarrierSet::ModRef)) {
 239       ModRefBarrierSet* modBS = (ModRefBarrierSet*)bs;
 240       MemRegion old_mr = heap->old_gen()->reserved();
 241       if (young_gen_empty) {
 242         modBS->clear(MemRegion(old_mr.start(), old_mr.end()));
 243       } else {
 244         modBS->invalidate(MemRegion(old_mr.start(), old_mr.end()));
 245       }
 246     }
 247 
 248     // Delete metaspaces for unloaded class loaders and clean up loader_data graph
 249     ClassLoaderDataGraph::purge();
 250     MetaspaceAux::verify_metrics();
 251 
 252     BiasedLocking::restore_marks();

 253     CodeCache::gc_epilogue();
 254     JvmtiExport::gc_epilogue();
 255 
 256     COMPILER2_PRESENT(DerivedPointerTable::update_pointers());
 257 
 258     ref_processor()->enqueue_discovered_references(NULL);
 259 
 260     // Update time of last GC
 261     reset_millis_since_last_gc();
 262 
 263     // Let the size policy know we're done
 264     size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
 265 
 266     if (UseAdaptiveSizePolicy) {
 267 
 268       if (PrintAdaptiveSizePolicy) {
 269         gclog_or_tty->print("AdaptiveSizeStart: ");
 270         gclog_or_tty->stamp();
 271         gclog_or_tty->print_cr(" collection: %d ",
 272                        heap->total_collections());


src/share/vm/gc_implementation/parallelScavenge/psMarkSweep.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File