< prev index next >

src/hotspot/share/gc/parallel/psParallelCompact.cpp

Print this page




1015   Universe::update_heap_info_at_gc();
1016 
1017   bool young_gen_empty = eden_empty && from_space->is_empty() &&
1018     to_space->is_empty();
1019 
1020   ModRefBarrierSet* modBS = barrier_set_cast<ModRefBarrierSet>(heap->barrier_set());
1021   MemRegion old_mr = heap->old_gen()->reserved();
1022   if (young_gen_empty) {
1023     modBS->clear(MemRegion(old_mr.start(), old_mr.end()));
1024   } else {
1025     modBS->invalidate(MemRegion(old_mr.start(), old_mr.end()));
1026   }
1027 
1028   // Delete metaspaces for unloaded class loaders and clean up loader_data graph
1029   ClassLoaderDataGraph::purge();
1030   MetaspaceAux::verify_metrics();
1031 
1032   CodeCache::gc_epilogue();
1033   JvmtiExport::gc_epilogue();
1034 
1035 #if defined(COMPILER2) || INCLUDE_JVMCI
1036   DerivedPointerTable::update_pointers();
1037 #endif
1038 
1039   ReferenceProcessorPhaseTimes pt(&_gc_timer, ref_processor()->num_q());
1040 
1041   ref_processor()->enqueue_discovered_references(NULL, &pt);
1042 
1043   pt.print_enqueue_phase();
1044 
1045   if (ZapUnusedHeapArea) {
1046     heap->gen_mangle_unused_area();
1047   }
1048 
1049   // Update time of last GC
1050   reset_millis_since_last_gc();
1051 }
1052 
1053 HeapWord*
1054 PSParallelCompact::compute_dense_prefix_via_density(const SpaceId id,
1055                                                     bool maximum_compaction)


1766     gc_task_manager()->set_active_gang();
1767     gc_task_manager()->task_idle_workers();
1768 
1769     GCTraceCPUTime tcpu;
1770     GCTraceTime(Info, gc) tm("Pause Full", NULL, gc_cause, true);
1771 
1772     heap->pre_full_gc_dump(&_gc_timer);
1773 
1774     TraceCollectorStats tcs(counters());
1775     TraceMemoryManagerStats tms(true /* Full GC */,gc_cause);
1776 
1777     if (log_is_enabled(Debug, gc, heap, exit)) {
1778       accumulated_time()->start();
1779     }
1780 
1781     // Let the size policy know we're starting
1782     size_policy->major_collection_begin();
1783 
1784     CodeCache::gc_prologue();
1785 
1786 #if defined(COMPILER2) || INCLUDE_JVMCI
1787     DerivedPointerTable::clear();
1788 #endif
1789 
1790     ref_processor()->enable_discovery();
1791     ref_processor()->setup_policy(maximum_heap_compaction);
1792 
1793     bool marked_for_unloading = false;
1794 
1795     marking_start.update();
1796     marking_phase(vmthread_cm, maximum_heap_compaction, &_gc_tracer);
1797 
1798     bool max_on_system_gc = UseMaximumCompactionOnSystemGC
1799       && GCCause::is_user_requested_gc(gc_cause);
1800     summary_phase(vmthread_cm, maximum_heap_compaction || max_on_system_gc);
1801 
1802 #if defined(COMPILER2) || INCLUDE_JVMCI
1803     assert(DerivedPointerTable::is_active(), "Sanity");
1804     DerivedPointerTable::set_active(false);
1805 #endif
1806 
1807     // adjust_roots() updates Universe::_intArrayKlassObj which is
1808     // needed by the compaction for filling holes in the dense prefix.
1809     adjust_roots(vmthread_cm);
1810 
1811     compaction_start.update();
1812     compact();
1813 
1814     // Reset the mark bitmap, summary data, and do other bookkeeping.  Must be
1815     // done before resizing.
1816     post_compact();
1817 
1818     // Let the size policy know we're done
1819     size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
1820 
1821     if (UseAdaptiveSizePolicy) {
1822       log_debug(gc, ergo)("AdaptiveSizeStart: collection: %d ", heap->total_collections());




1015   Universe::update_heap_info_at_gc();
1016 
1017   bool young_gen_empty = eden_empty && from_space->is_empty() &&
1018     to_space->is_empty();
1019 
1020   ModRefBarrierSet* modBS = barrier_set_cast<ModRefBarrierSet>(heap->barrier_set());
1021   MemRegion old_mr = heap->old_gen()->reserved();
1022   if (young_gen_empty) {
1023     modBS->clear(MemRegion(old_mr.start(), old_mr.end()));
1024   } else {
1025     modBS->invalidate(MemRegion(old_mr.start(), old_mr.end()));
1026   }
1027 
1028   // Delete metaspaces for unloaded class loaders and clean up loader_data graph
1029   ClassLoaderDataGraph::purge();
1030   MetaspaceAux::verify_metrics();
1031 
1032   CodeCache::gc_epilogue();
1033   JvmtiExport::gc_epilogue();
1034 
1035 #if COMPILER2_OR_JVMCI
1036   DerivedPointerTable::update_pointers();
1037 #endif
1038 
1039   ReferenceProcessorPhaseTimes pt(&_gc_timer, ref_processor()->num_q());
1040 
1041   ref_processor()->enqueue_discovered_references(NULL, &pt);
1042 
1043   pt.print_enqueue_phase();
1044 
1045   if (ZapUnusedHeapArea) {
1046     heap->gen_mangle_unused_area();
1047   }
1048 
1049   // Update time of last GC
1050   reset_millis_since_last_gc();
1051 }
1052 
1053 HeapWord*
1054 PSParallelCompact::compute_dense_prefix_via_density(const SpaceId id,
1055                                                     bool maximum_compaction)


1766     gc_task_manager()->set_active_gang();
1767     gc_task_manager()->task_idle_workers();
1768 
1769     GCTraceCPUTime tcpu;
1770     GCTraceTime(Info, gc) tm("Pause Full", NULL, gc_cause, true);
1771 
1772     heap->pre_full_gc_dump(&_gc_timer);
1773 
1774     TraceCollectorStats tcs(counters());
1775     TraceMemoryManagerStats tms(true /* Full GC */,gc_cause);
1776 
1777     if (log_is_enabled(Debug, gc, heap, exit)) {
1778       accumulated_time()->start();
1779     }
1780 
1781     // Let the size policy know we're starting
1782     size_policy->major_collection_begin();
1783 
1784     CodeCache::gc_prologue();
1785 
1786 #if COMPILER2_OR_JVMCI
1787     DerivedPointerTable::clear();
1788 #endif
1789 
1790     ref_processor()->enable_discovery();
1791     ref_processor()->setup_policy(maximum_heap_compaction);
1792 
1793     bool marked_for_unloading = false;
1794 
1795     marking_start.update();
1796     marking_phase(vmthread_cm, maximum_heap_compaction, &_gc_tracer);
1797 
1798     bool max_on_system_gc = UseMaximumCompactionOnSystemGC
1799       && GCCause::is_user_requested_gc(gc_cause);
1800     summary_phase(vmthread_cm, maximum_heap_compaction || max_on_system_gc);
1801 
1802 #if COMPILER2_OR_JVMCI
1803     assert(DerivedPointerTable::is_active(), "Sanity");
1804     DerivedPointerTable::set_active(false);
1805 #endif
1806 
1807     // adjust_roots() updates Universe::_intArrayKlassObj which is
1808     // needed by the compaction for filling holes in the dense prefix.
1809     adjust_roots(vmthread_cm);
1810 
1811     compaction_start.update();
1812     compact();
1813 
1814     // Reset the mark bitmap, summary data, and do other bookkeeping.  Must be
1815     // done before resizing.
1816     post_compact();
1817 
1818     // Let the size policy know we're done
1819     size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
1820 
1821     if (UseAdaptiveSizePolicy) {
1822       log_debug(gc, ergo)("AdaptiveSizeStart: collection: %d ", heap->total_collections());


< prev index next >