< prev index next >

src/hotspot/share/gc/parallel/psMarkSweep.cpp

Print this page




 175     TraceMemoryManagerStats tms(true /* Full GC */,gc_cause);
 176 
 177     if (log_is_enabled(Debug, gc, heap, exit)) {
 178       accumulated_time()->start();
 179     }
 180 
 181     // Let the size policy know we're starting
 182     size_policy->major_collection_begin();
 183 
 184     CodeCache::gc_prologue();
 185     BiasedLocking::preserve_marks();
 186 
 187     // Capture metadata size before collection for sizing.
 188     size_t metadata_prev_used = MetaspaceAux::used_bytes();
 189 
 190     size_t old_gen_prev_used = old_gen->used_in_bytes();
 191     size_t young_gen_prev_used = young_gen->used_in_bytes();
 192 
 193     allocate_stacks();
 194 
 195 #if defined(COMPILER2) || INCLUDE_JVMCI
 196     DerivedPointerTable::clear();
 197 #endif
 198 
 199     ref_processor()->enable_discovery();
 200     ref_processor()->setup_policy(clear_all_softrefs);
 201 
 202     mark_sweep_phase1(clear_all_softrefs);
 203 
 204     mark_sweep_phase2();
 205 
 206 #if defined(COMPILER2) || INCLUDE_JVMCI
 207     // Don't add any more derived pointers during phase3
 208     assert(DerivedPointerTable::is_active(), "Sanity");
 209     DerivedPointerTable::set_active(false);
 210 #endif
 211 
 212     mark_sweep_phase3();
 213 
 214     mark_sweep_phase4();
 215 
 216     restore_marks();
 217 
 218     deallocate_stacks();
 219 
 220     if (ZapUnusedHeapArea) {
 221       // Do a complete mangle (top to end) because the usage for
 222       // scratch does not maintain a top pointer.
 223       young_gen->to_space()->mangle_unused_area_complete();
 224     }
 225 
 226     eden_empty = young_gen->eden_space()->is_empty();


 235     survivors_empty = young_gen->from_space()->is_empty() &&
 236                       young_gen->to_space()->is_empty();
 237     young_gen_empty = eden_empty && survivors_empty;
 238 
 239     ModRefBarrierSet* modBS = barrier_set_cast<ModRefBarrierSet>(heap->barrier_set());
 240     MemRegion old_mr = heap->old_gen()->reserved();
 241     if (young_gen_empty) {
 242       modBS->clear(MemRegion(old_mr.start(), old_mr.end()));
 243     } else {
 244       modBS->invalidate(MemRegion(old_mr.start(), old_mr.end()));
 245     }
 246 
 247     // Delete metaspaces for unloaded class loaders and clean up loader_data graph
 248     ClassLoaderDataGraph::purge();
 249     MetaspaceAux::verify_metrics();
 250 
 251     BiasedLocking::restore_marks();
 252     CodeCache::gc_epilogue();
 253     JvmtiExport::gc_epilogue();
 254 
 255 #if defined(COMPILER2) || INCLUDE_JVMCI
 256     DerivedPointerTable::update_pointers();
 257 #endif
 258 
 259     ReferenceProcessorPhaseTimes pt(_gc_timer, ref_processor()->num_q());
 260 
 261     ref_processor()->enqueue_discovered_references(NULL, &pt);
 262 
 263     pt.print_enqueue_phase();
 264 
 265     // Update time of last GC
 266     reset_millis_since_last_gc();
 267 
 268     // Let the size policy know we're done
 269     size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
 270 
 271     if (UseAdaptiveSizePolicy) {
 272 
 273      log_debug(gc, ergo)("AdaptiveSizeStart: collection: %d ", heap->total_collections());
 274      log_trace(gc, ergo)("old_gen_capacity: " SIZE_FORMAT " young_gen_capacity: " SIZE_FORMAT,
 275                          old_gen->capacity_in_bytes(), young_gen->capacity_in_bytes());




 175     TraceMemoryManagerStats tms(true /* Full GC */,gc_cause);
 176 
 177     if (log_is_enabled(Debug, gc, heap, exit)) {
 178       accumulated_time()->start();
 179     }
 180 
 181     // Let the size policy know we're starting
 182     size_policy->major_collection_begin();
 183 
 184     CodeCache::gc_prologue();
 185     BiasedLocking::preserve_marks();
 186 
 187     // Capture metadata size before collection for sizing.
 188     size_t metadata_prev_used = MetaspaceAux::used_bytes();
 189 
 190     size_t old_gen_prev_used = old_gen->used_in_bytes();
 191     size_t young_gen_prev_used = young_gen->used_in_bytes();
 192 
 193     allocate_stacks();
 194 
 195 #if COMPILER2_OR_JVMCI
 196     DerivedPointerTable::clear();
 197 #endif
 198 
 199     ref_processor()->enable_discovery();
 200     ref_processor()->setup_policy(clear_all_softrefs);
 201 
 202     mark_sweep_phase1(clear_all_softrefs);
 203 
 204     mark_sweep_phase2();
 205 
 206 #if COMPILER2_OR_JVMCI
 207     // Don't add any more derived pointers during phase3
 208     assert(DerivedPointerTable::is_active(), "Sanity");
 209     DerivedPointerTable::set_active(false);
 210 #endif
 211 
 212     mark_sweep_phase3();
 213 
 214     mark_sweep_phase4();
 215 
 216     restore_marks();
 217 
 218     deallocate_stacks();
 219 
 220     if (ZapUnusedHeapArea) {
 221       // Do a complete mangle (top to end) because the usage for
 222       // scratch does not maintain a top pointer.
 223       young_gen->to_space()->mangle_unused_area_complete();
 224     }
 225 
 226     eden_empty = young_gen->eden_space()->is_empty();


 235     survivors_empty = young_gen->from_space()->is_empty() &&
 236                       young_gen->to_space()->is_empty();
 237     young_gen_empty = eden_empty && survivors_empty;
 238 
 239     ModRefBarrierSet* modBS = barrier_set_cast<ModRefBarrierSet>(heap->barrier_set());
 240     MemRegion old_mr = heap->old_gen()->reserved();
 241     if (young_gen_empty) {
 242       modBS->clear(MemRegion(old_mr.start(), old_mr.end()));
 243     } else {
 244       modBS->invalidate(MemRegion(old_mr.start(), old_mr.end()));
 245     }
 246 
 247     // Delete metaspaces for unloaded class loaders and clean up loader_data graph
 248     ClassLoaderDataGraph::purge();
 249     MetaspaceAux::verify_metrics();
 250 
 251     BiasedLocking::restore_marks();
 252     CodeCache::gc_epilogue();
 253     JvmtiExport::gc_epilogue();
 254 
 255 #if COMPILER2_OR_JVMCI
 256     DerivedPointerTable::update_pointers();
 257 #endif
 258 
 259     ReferenceProcessorPhaseTimes pt(_gc_timer, ref_processor()->num_q());
 260 
 261     ref_processor()->enqueue_discovered_references(NULL, &pt);
 262 
 263     pt.print_enqueue_phase();
 264 
 265     // Update time of last GC
 266     reset_millis_since_last_gc();
 267 
 268     // Let the size policy know we're done
 269     size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
 270 
 271     if (UseAdaptiveSizePolicy) {
 272 
 273      log_debug(gc, ergo)("AdaptiveSizeStart: collection: %d ", heap->total_collections());
 274      log_trace(gc, ergo)("old_gen_capacity: " SIZE_FORMAT " young_gen_capacity: " SIZE_FORMAT,
 275                          old_gen->capacity_in_bytes(), young_gen->capacity_in_bytes());


< prev index next >