< prev index next >

src/hotspot/share/gc/parallel/psParallelCompact.cpp

Print this page
rev 60257 : [mq]: 8248401-unify-millis-since-last-gc


 826 {
 827   const size_t* const beg = (const size_t*)vspace->committed_low_addr();
 828   const size_t* const end = (const size_t*)vspace->committed_high_addr();
 829   for (const size_t* p = beg; p < end; ++p) {
 830     assert(*p == 0, "not zero");
 831   }
 832 }
 833 
 834 void ParallelCompactData::verify_clear()
 835 {
 836   verify_clear(_region_vspace);
 837   verify_clear(_block_vspace);
 838 }
 839 #endif  // #ifdef ASSERT
 840 
 841 STWGCTimer          PSParallelCompact::_gc_timer;
 842 ParallelOldTracer   PSParallelCompact::_gc_tracer;
 843 elapsedTimer        PSParallelCompact::_accumulated_time;
 844 unsigned int        PSParallelCompact::_total_invocations = 0;
 845 unsigned int        PSParallelCompact::_maximum_compaction_gc_num = 0;
 846 jlong               PSParallelCompact::_time_of_last_gc = 0;
 847 CollectorCounters*  PSParallelCompact::_counters = NULL;
 848 ParMarkBitMap       PSParallelCompact::_mark_bitmap;
 849 ParallelCompactData PSParallelCompact::_summary_data;
 850 
 851 PSParallelCompact::IsAliveClosure PSParallelCompact::_is_alive_closure;
 852 
 853 bool PSParallelCompact::IsAliveClosure::do_object_b(oop p) { return mark_bitmap()->is_marked(p); }
 854 
 855 class PCReferenceProcessor: public ReferenceProcessor {
 856 public:
 857   PCReferenceProcessor(
 858     BoolObjectClosure* is_subject_to_discovery,
 859     BoolObjectClosure* is_alive_non_header) :
 860       ReferenceProcessor(is_subject_to_discovery,
 861       ParallelRefProcEnabled && (ParallelGCThreads > 1), // mt processing
 862       ParallelGCThreads,   // mt processing degree
 863       true,                // mt discovery
 864       ParallelGCThreads,   // mt discovery degree
 865       true,                // atomic_discovery
 866       is_alive_non_header) {


1054   if (young_gen_empty) {
1055     ct->clear(MemRegion(old_mr.start(), old_mr.end()));
1056   } else {
1057     ct->invalidate(MemRegion(old_mr.start(), old_mr.end()));
1058   }
1059 
1060   // Delete metaspaces for unloaded class loaders and clean up loader_data graph
1061   ClassLoaderDataGraph::purge();
1062   MetaspaceUtils::verify_metrics();
1063 
1064   heap->prune_scavengable_nmethods();
1065 
1066 #if COMPILER2_OR_JVMCI
1067   DerivedPointerTable::update_pointers();
1068 #endif
1069 
1070   if (ZapUnusedHeapArea) {
1071     heap->gen_mangle_unused_area();
1072   }
1073 
1074   // Update time of last GC
1075   reset_millis_since_last_gc();
1076 }
1077 
1078 HeapWord*
1079 PSParallelCompact::compute_dense_prefix_via_density(const SpaceId id,
1080                                                     bool maximum_compaction)
1081 {
1082   const size_t region_size = ParallelCompactData::RegionSize;
1083   const ParallelCompactData& sd = summary_data();
1084 
1085   const MutableSpace* const space = _space_info[id].space();
1086   HeapWord* const top_aligned_up = sd.region_align_up(space->top());
1087   const RegionData* const beg_cp = sd.addr_to_region_ptr(space->bottom());
1088   const RegionData* const end_cp = sd.addr_to_region_ptr(top_aligned_up);
1089 
1090   // Skip full regions at the beginning of the space--they are necessarily part
1091   // of the dense prefix.
1092   size_t full_count = 0;
1093   const RegionData* cp;
1094   for (cp = beg_cp; cp < end_cp && cp->data_size() == region_size; ++cp) {
1095     ++full_count;


3181 
3182   size_t beg_bit = bitmap->words_to_bits(region_idx << Log2RegionSize);
3183   const size_t range_end = beg_bit + bitmap->words_to_bits(RegionSize);
3184   size_t live_bits = bitmap->words_to_bits(partial_obj_size);
3185   beg_bit = bitmap->find_obj_beg(beg_bit + live_bits, range_end);
3186   while (beg_bit < range_end) {
3187     const size_t new_block = beg_bit >> Log2BitsPerBlock;
3188     if (new_block != cur_block) {
3189       cur_block = new_block;
3190       sd.block(cur_block)->set_offset(bitmap->bits_to_words(live_bits));
3191     }
3192 
3193     const size_t end_bit = bitmap->find_obj_end(beg_bit, range_end);
3194     if (end_bit < range_end - 1) {
3195       live_bits += end_bit - beg_bit + 1;
3196       beg_bit = bitmap->find_obj_beg(end_bit + 1, range_end);
3197     } else {
3198       return;
3199     }
3200   }
3201 }
3202 
3203 jlong PSParallelCompact::millis_since_last_gc() {
3204   // We need a monotonically non-decreasing time in ms but
3205   // os::javaTimeMillis() does not guarantee monotonicity.
3206   jlong now = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
3207   jlong ret_val = now - _time_of_last_gc;
3208   // XXX See note in genCollectedHeap::millis_since_last_gc().
3209   if (ret_val < 0) {
3210     NOT_PRODUCT(log_warning(gc)("time warp: " JLONG_FORMAT, ret_val);)
3211     return 0;
3212   }
3213   return ret_val;
3214 }
3215 
3216 void PSParallelCompact::reset_millis_since_last_gc() {
3217   // We need a monotonically non-decreasing time in ms but
3218   // os::javaTimeMillis() does not guarantee monotonicity.
3219   _time_of_last_gc = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
3220 }
3221 
3222 ParMarkBitMap::IterationStatus MoveAndUpdateClosure::copy_until_full()
3223 {
3224   if (source() != copy_destination()) {
3225     DEBUG_ONLY(PSParallelCompact::check_new_location(source(), destination());)
3226     Copy::aligned_conjoint_words(source(), copy_destination(), words_remaining());
3227   }
3228   update_state(words_remaining());
3229   assert(is_full(), "sanity");
3230   return ParMarkBitMap::full;
3231 }
3232 
3233 void MoveAndUpdateClosure::copy_partial_obj()
3234 {
3235   size_t words = words_remaining();
3236 
3237   HeapWord* const range_end = MIN2(source() + words, bitmap()->region_end());
3238   HeapWord* const end_addr = bitmap()->find_obj_end(source(), range_end);
3239   if (end_addr < range_end) {




 826 {
 827   const size_t* const beg = (const size_t*)vspace->committed_low_addr();
 828   const size_t* const end = (const size_t*)vspace->committed_high_addr();
 829   for (const size_t* p = beg; p < end; ++p) {
 830     assert(*p == 0, "not zero");
 831   }
 832 }
 833 
 834 void ParallelCompactData::verify_clear()
 835 {
 836   verify_clear(_region_vspace);
 837   verify_clear(_block_vspace);
 838 }
 839 #endif  // #ifdef ASSERT
 840 
 841 STWGCTimer          PSParallelCompact::_gc_timer;
 842 ParallelOldTracer   PSParallelCompact::_gc_tracer;
 843 elapsedTimer        PSParallelCompact::_accumulated_time;
 844 unsigned int        PSParallelCompact::_total_invocations = 0;
 845 unsigned int        PSParallelCompact::_maximum_compaction_gc_num = 0;

 846 CollectorCounters*  PSParallelCompact::_counters = NULL;
 847 ParMarkBitMap       PSParallelCompact::_mark_bitmap;
 848 ParallelCompactData PSParallelCompact::_summary_data;
 849 
 850 PSParallelCompact::IsAliveClosure PSParallelCompact::_is_alive_closure;
 851 
 852 bool PSParallelCompact::IsAliveClosure::do_object_b(oop p) { return mark_bitmap()->is_marked(p); }
 853 
 854 class PCReferenceProcessor: public ReferenceProcessor {
 855 public:
 856   PCReferenceProcessor(
 857     BoolObjectClosure* is_subject_to_discovery,
 858     BoolObjectClosure* is_alive_non_header) :
 859       ReferenceProcessor(is_subject_to_discovery,
 860       ParallelRefProcEnabled && (ParallelGCThreads > 1), // mt processing
 861       ParallelGCThreads,   // mt processing degree
 862       true,                // mt discovery
 863       ParallelGCThreads,   // mt discovery degree
 864       true,                // atomic_discovery
 865       is_alive_non_header) {


1053   if (young_gen_empty) {
1054     ct->clear(MemRegion(old_mr.start(), old_mr.end()));
1055   } else {
1056     ct->invalidate(MemRegion(old_mr.start(), old_mr.end()));
1057   }
1058 
1059   // Delete metaspaces for unloaded class loaders and clean up loader_data graph
1060   ClassLoaderDataGraph::purge();
1061   MetaspaceUtils::verify_metrics();
1062 
1063   heap->prune_scavengable_nmethods();
1064 
1065 #if COMPILER2_OR_JVMCI
1066   DerivedPointerTable::update_pointers();
1067 #endif
1068 
1069   if (ZapUnusedHeapArea) {
1070     heap->gen_mangle_unused_area();
1071   }
1072 
1073   Universe::heap()->next_whole_heap_examined();

1074 }
1075 
1076 HeapWord*
1077 PSParallelCompact::compute_dense_prefix_via_density(const SpaceId id,
1078                                                     bool maximum_compaction)
1079 {
1080   const size_t region_size = ParallelCompactData::RegionSize;
1081   const ParallelCompactData& sd = summary_data();
1082 
1083   const MutableSpace* const space = _space_info[id].space();
1084   HeapWord* const top_aligned_up = sd.region_align_up(space->top());
1085   const RegionData* const beg_cp = sd.addr_to_region_ptr(space->bottom());
1086   const RegionData* const end_cp = sd.addr_to_region_ptr(top_aligned_up);
1087 
1088   // Skip full regions at the beginning of the space--they are necessarily part
1089   // of the dense prefix.
1090   size_t full_count = 0;
1091   const RegionData* cp;
1092   for (cp = beg_cp; cp < end_cp && cp->data_size() == region_size; ++cp) {
1093     ++full_count;


3179 
3180   size_t beg_bit = bitmap->words_to_bits(region_idx << Log2RegionSize);
3181   const size_t range_end = beg_bit + bitmap->words_to_bits(RegionSize);
3182   size_t live_bits = bitmap->words_to_bits(partial_obj_size);
3183   beg_bit = bitmap->find_obj_beg(beg_bit + live_bits, range_end);
3184   while (beg_bit < range_end) {
3185     const size_t new_block = beg_bit >> Log2BitsPerBlock;
3186     if (new_block != cur_block) {
3187       cur_block = new_block;
3188       sd.block(cur_block)->set_offset(bitmap->bits_to_words(live_bits));
3189     }
3190 
3191     const size_t end_bit = bitmap->find_obj_end(beg_bit, range_end);
3192     if (end_bit < range_end - 1) {
3193       live_bits += end_bit - beg_bit + 1;
3194       beg_bit = bitmap->find_obj_beg(end_bit + 1, range_end);
3195     } else {
3196       return;
3197     }
3198   }



















3199 }
3200 
3201 ParMarkBitMap::IterationStatus MoveAndUpdateClosure::copy_until_full()
3202 {
3203   if (source() != copy_destination()) {
3204     DEBUG_ONLY(PSParallelCompact::check_new_location(source(), destination());)
3205     Copy::aligned_conjoint_words(source(), copy_destination(), words_remaining());
3206   }
3207   update_state(words_remaining());
3208   assert(is_full(), "sanity");
3209   return ParMarkBitMap::full;
3210 }
3211 
3212 void MoveAndUpdateClosure::copy_partial_obj()
3213 {
3214   size_t words = words_remaining();
3215 
3216   HeapWord* const range_end = MIN2(source() + words, bitmap()->region_end());
3217   HeapWord* const end_addr = bitmap()->find_obj_end(source(), range_end);
3218   if (end_addr < range_end) {


< prev index next >