2052 const char* gc_cause_str = "Full GC";
2053 if (is_system_gc && PrintGCDetails) {
2054 gc_cause_str = "Full GC (System)";
2055 }
2056 gclog_or_tty->date_stamp(PrintGC && PrintGCDateStamps);
2057 TraceCPUTime tcpu(PrintGCDetails, true, gclog_or_tty);
2058 TraceTime t1(gc_cause_str, PrintGC, !PrintGCDetails, gclog_or_tty);
2059 TraceCollectorStats tcs(counters());
2060 TraceMemoryManagerStats tms(true /* Full GC */,gc_cause);
2061
2062 if (TraceGen1Time) accumulated_time()->start();
2063
2064 // Let the size policy know we're starting
2065 size_policy->major_collection_begin();
2066
2067 // When collecting the permanent generation methodOops may be moving,
2068 // so we either have to flush all bcp data or convert it into bci.
2069 CodeCache::gc_prologue();
2070 Threads::gc_prologue();
2071
2072 NOT_PRODUCT(ref_processor()->verify_no_references_recorded());
2073 COMPILER2_PRESENT(DerivedPointerTable::clear());
2074
2075 ref_processor()->enable_discovery();
2076 ref_processor()->setup_policy(maximum_heap_compaction);
2077
2078 bool marked_for_unloading = false;
2079
2080 marking_start.update();
2081 marking_phase(vmthread_cm, maximum_heap_compaction);
2082
2083 #ifndef PRODUCT
2084 if (TraceParallelOldGCMarkingPhase) {
2085 gclog_or_tty->print_cr("marking_phase: cas_tries %d cas_retries %d "
2086 "cas_by_another %d",
2087 mark_bitmap()->cas_tries(), mark_bitmap()->cas_retries(),
2088 mark_bitmap()->cas_by_another());
2089 }
2090 #endif // #ifndef PRODUCT
2091
2092 bool max_on_system_gc = UseMaximumCompactionOnSystemGC && is_system_gc;
2093 summary_phase(vmthread_cm, maximum_heap_compaction || max_on_system_gc);
2094
2095 COMPILER2_PRESENT(assert(DerivedPointerTable::is_active(), "Sanity"));
|
2052 const char* gc_cause_str = "Full GC";
2053 if (is_system_gc && PrintGCDetails) {
2054 gc_cause_str = "Full GC (System)";
2055 }
2056 gclog_or_tty->date_stamp(PrintGC && PrintGCDateStamps);
2057 TraceCPUTime tcpu(PrintGCDetails, true, gclog_or_tty);
2058 TraceTime t1(gc_cause_str, PrintGC, !PrintGCDetails, gclog_or_tty);
2059 TraceCollectorStats tcs(counters());
2060 TraceMemoryManagerStats tms(true /* Full GC */,gc_cause);
2061
2062 if (TraceGen1Time) accumulated_time()->start();
2063
2064 // Let the size policy know we're starting
2065 size_policy->major_collection_begin();
2066
2067 // When collecting the permanent generation methodOops may be moving,
2068 // so we either have to flush all bcp data or convert it into bci.
2069 CodeCache::gc_prologue();
2070 Threads::gc_prologue();
2071
2072 COMPILER2_PRESENT(DerivedPointerTable::clear());
2073
2074 ref_processor()->enable_discovery(true /*verify_disabled*/, true /*verify_no_refs*/);
2075 ref_processor()->setup_policy(maximum_heap_compaction);
2076
2077 bool marked_for_unloading = false;
2078
2079 marking_start.update();
2080 marking_phase(vmthread_cm, maximum_heap_compaction);
2081
2082 #ifndef PRODUCT
2083 if (TraceParallelOldGCMarkingPhase) {
2084 gclog_or_tty->print_cr("marking_phase: cas_tries %d cas_retries %d "
2085 "cas_by_another %d",
2086 mark_bitmap()->cas_tries(), mark_bitmap()->cas_retries(),
2087 mark_bitmap()->cas_by_another());
2088 }
2089 #endif // #ifndef PRODUCT
2090
2091 bool max_on_system_gc = UseMaximumCompactionOnSystemGC && is_system_gc;
2092 summary_phase(vmthread_cm, maximum_heap_compaction || max_on_system_gc);
2093
2094 COMPILER2_PRESENT(assert(DerivedPointerTable::is_active(), "Sanity"));
|