< prev index next >

src/share/vm/gc/parallel/psParallelCompact.cpp

Print this page
rev 13329 : [mq]: webrev.1
rev 13331 : imported patch webrev.3b
rev 13332 : [mq]: webrev.4


1024 
1025   ModRefBarrierSet* modBS = barrier_set_cast<ModRefBarrierSet>(heap->barrier_set());
1026   MemRegion old_mr = heap->old_gen()->reserved();
1027   if (young_gen_empty) {
1028     modBS->clear(MemRegion(old_mr.start(), old_mr.end()));
1029   } else {
1030     modBS->invalidate(MemRegion(old_mr.start(), old_mr.end()));
1031   }
1032 
1033   // Delete metaspaces for unloaded class loaders and clean up loader_data graph
1034   ClassLoaderDataGraph::purge();
1035   MetaspaceAux::verify_metrics();
1036 
1037   CodeCache::gc_epilogue();
1038   JvmtiExport::gc_epilogue();
1039 
1040 #if defined(COMPILER2) || INCLUDE_JVMCI
1041   DerivedPointerTable::update_pointers();
1042 #endif
1043 
1044   ref_processor()->enqueue_discovered_references(NULL);




1045 
1046   if (ZapUnusedHeapArea) {
1047     heap->gen_mangle_unused_area();
1048   }
1049 
1050   // Update time of last GC
1051   reset_millis_since_last_gc();
1052 }
1053 
1054 HeapWord*
1055 PSParallelCompact::compute_dense_prefix_via_density(const SpaceId id,
1056                                                     bool maximum_compaction)
1057 {
1058   const size_t region_size = ParallelCompactData::RegionSize;
1059   const ParallelCompactData& sd = summary_data();
1060 
1061   const MutableSpace* const space = _space_info[id].space();
1062   HeapWord* const top_aligned_up = sd.region_align_up(space->top());
1063   const RegionData* const beg_cp = sd.addr_to_region_ptr(space->bottom());
1064   const RegionData* const end_cp = sd.addr_to_region_ptr(top_aligned_up);


2086     q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::management));
2087     q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::system_dictionary));
2088     q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::class_loader_data));
2089     q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::jvmti));
2090     q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::code_cache));
2091 
2092     if (active_gc_threads > 1) {
2093       for (uint j = 0; j < active_gc_threads; j++) {
2094         q->enqueue(new StealMarkingTask(&terminator));
2095       }
2096     }
2097 
2098     gc_task_manager()->execute_and_wait(q);
2099   }
2100 
2101   // Process reference objects found during marking
2102   {
2103     GCTraceTime(Debug, gc, phases) tm("Reference Processing", &_gc_timer);
2104 
2105     ReferenceProcessorStats stats;

2106     if (ref_processor()->processing_is_mt()) {
2107       RefProcTaskExecutor task_executor;
2108       stats = ref_processor()->process_discovered_references(
2109         is_alive_closure(), &mark_and_push_closure, &follow_stack_closure,
2110         &task_executor, &_gc_timer);
2111     } else {
2112       stats = ref_processor()->process_discovered_references(
2113         is_alive_closure(), &mark_and_push_closure, &follow_stack_closure, NULL,
2114         &_gc_timer);
2115     }
2116 
2117     gc_tracer->report_gc_reference_stats(stats);

2118   }
2119 
2120   // This is the point where the entire marking should have completed.
2121   assert(cm->marking_stacks_empty(), "Marking should have completed");
2122 
2123   {
2124     GCTraceTime(Debug, gc, phases) tm_m("Class Unloading", &_gc_timer);
2125 
2126     // Follow system dictionary roots and unload classes.
2127     bool purged_class = SystemDictionary::do_unloading(is_alive_closure(), &_gc_timer);
2128 
2129     // Unload nmethods.
2130     CodeCache::do_unloading(is_alive_closure(), purged_class);
2131 
2132     // Prune dead klasses from subklass/sibling/implementor lists.
2133     Klass::clean_weak_klass_links(is_alive_closure());
2134   }
2135 
2136   {
2137     GCTraceTime(Debug, gc, phases) t("Scrub String Table", &_gc_timer);




1024 
1025   ModRefBarrierSet* modBS = barrier_set_cast<ModRefBarrierSet>(heap->barrier_set());
1026   MemRegion old_mr = heap->old_gen()->reserved();
1027   if (young_gen_empty) {
1028     modBS->clear(MemRegion(old_mr.start(), old_mr.end()));
1029   } else {
1030     modBS->invalidate(MemRegion(old_mr.start(), old_mr.end()));
1031   }
1032 
1033   // Delete metaspaces for unloaded class loaders and clean up loader_data graph
1034   ClassLoaderDataGraph::purge();
1035   MetaspaceAux::verify_metrics();
1036 
1037   CodeCache::gc_epilogue();
1038   JvmtiExport::gc_epilogue();
1039 
1040 #if defined(COMPILER2) || INCLUDE_JVMCI
1041   DerivedPointerTable::update_pointers();
1042 #endif
1043 
1044   ReferenceProcessorPhaseTimes pt(&_gc_timer, ref_processor()->num_q());
1045 
1046   ref_processor()->enqueue_discovered_references(NULL, &pt);
1047 
1048   pt.print_enqueue_phase();
1049 
1050   if (ZapUnusedHeapArea) {
1051     heap->gen_mangle_unused_area();
1052   }
1053 
1054   // Update time of last GC
1055   reset_millis_since_last_gc();
1056 }
1057 
1058 HeapWord*
1059 PSParallelCompact::compute_dense_prefix_via_density(const SpaceId id,
1060                                                     bool maximum_compaction)
1061 {
1062   const size_t region_size = ParallelCompactData::RegionSize;
1063   const ParallelCompactData& sd = summary_data();
1064 
1065   const MutableSpace* const space = _space_info[id].space();
1066   HeapWord* const top_aligned_up = sd.region_align_up(space->top());
1067   const RegionData* const beg_cp = sd.addr_to_region_ptr(space->bottom());
1068   const RegionData* const end_cp = sd.addr_to_region_ptr(top_aligned_up);


2090     q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::management));
2091     q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::system_dictionary));
2092     q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::class_loader_data));
2093     q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::jvmti));
2094     q->enqueue(new MarkFromRootsTask(MarkFromRootsTask::code_cache));
2095 
2096     if (active_gc_threads > 1) {
2097       for (uint j = 0; j < active_gc_threads; j++) {
2098         q->enqueue(new StealMarkingTask(&terminator));
2099       }
2100     }
2101 
2102     gc_task_manager()->execute_and_wait(q);
2103   }
2104 
2105   // Process reference objects found during marking
2106   {
2107     GCTraceTime(Debug, gc, phases) tm("Reference Processing", &_gc_timer);
2108 
2109     ReferenceProcessorStats stats;
2110     ReferenceProcessorPhaseTimes pt(&_gc_timer, ref_processor()->num_q());
2111     if (ref_processor()->processing_is_mt()) {
2112       RefProcTaskExecutor task_executor;
2113       stats = ref_processor()->process_discovered_references(
2114         is_alive_closure(), &mark_and_push_closure, &follow_stack_closure,
2115         &task_executor, &pt);
2116     } else {
2117       stats = ref_processor()->process_discovered_references(
2118         is_alive_closure(), &mark_and_push_closure, &follow_stack_closure, NULL,
2119         &pt);
2120     }
2121 
2122     gc_tracer->report_gc_reference_stats(stats);
2123     pt.print_all_references();
2124   }
2125 
2126   // This is the point where the entire marking should have completed.
2127   assert(cm->marking_stacks_empty(), "Marking should have completed");
2128 
2129   {
2130     GCTraceTime(Debug, gc, phases) tm_m("Class Unloading", &_gc_timer);
2131 
2132     // Follow system dictionary roots and unload classes.
2133     bool purged_class = SystemDictionary::do_unloading(is_alive_closure(), &_gc_timer);
2134 
2135     // Unload nmethods.
2136     CodeCache::do_unloading(is_alive_closure(), purged_class);
2137 
2138     // Prune dead klasses from subklass/sibling/implementor lists.
2139     Klass::clean_weak_klass_links(is_alive_closure());
2140   }
2141 
2142   {
2143     GCTraceTime(Debug, gc, phases) t("Scrub String Table", &_gc_timer);


< prev index next >