< prev index next >

src/hotspot/share/gc/parallel/psParallelCompact.cpp

Print this page
rev 60421 : [mq]: 8248401-stefank-review


 826 {
 827   const size_t* const beg = (const size_t*)vspace->committed_low_addr();
 828   const size_t* const end = (const size_t*)vspace->committed_high_addr();
 829   for (const size_t* p = beg; p < end; ++p) {
 830     assert(*p == 0, "not zero");
 831   }
 832 }
 833 
 834 void ParallelCompactData::verify_clear()
 835 {
 836   verify_clear(_region_vspace);
 837   verify_clear(_block_vspace);
 838 }
 839 #endif  // #ifdef ASSERT
 840 
 841 STWGCTimer          PSParallelCompact::_gc_timer;
 842 ParallelOldTracer   PSParallelCompact::_gc_tracer;
 843 elapsedTimer        PSParallelCompact::_accumulated_time;
 844 unsigned int        PSParallelCompact::_total_invocations = 0;
 845 unsigned int        PSParallelCompact::_maximum_compaction_gc_num = 0;
 846 jlong               PSParallelCompact::_time_of_last_gc = 0;
 847 CollectorCounters*  PSParallelCompact::_counters = NULL;
 848 ParMarkBitMap       PSParallelCompact::_mark_bitmap;
 849 ParallelCompactData PSParallelCompact::_summary_data;
 850 
 851 PSParallelCompact::IsAliveClosure PSParallelCompact::_is_alive_closure;
 852 
 853 bool PSParallelCompact::IsAliveClosure::do_object_b(oop p) { return mark_bitmap()->is_marked(p); }
 854 
 855 class PCReferenceProcessor: public ReferenceProcessor {
 856 public:
 857   PCReferenceProcessor(
 858     BoolObjectClosure* is_subject_to_discovery,
 859     BoolObjectClosure* is_alive_non_header) :
 860       ReferenceProcessor(is_subject_to_discovery,
 861       ParallelRefProcEnabled && (ParallelGCThreads > 1), // mt processing
 862       ParallelGCThreads,   // mt processing degree
 863       true,                // mt discovery
 864       ParallelGCThreads,   // mt discovery degree
 865       true,                // atomic_discovery
 866       is_alive_non_header) {


1028   ParCompactionManager::remove_all_shadow_regions();
1029 
1030   for (unsigned int id = old_space_id; id < last_space_id; ++id) {
1031     // Clear the marking bitmap, summary data and split info.
1032     clear_data_covering_space(SpaceId(id));
1033     // Update top().  Must be done after clearing the bitmap and summary data.
1034     _space_info[id].publish_new_top();
1035   }
1036 
1037   MutableSpace* const eden_space = _space_info[eden_space_id].space();
1038   MutableSpace* const from_space = _space_info[from_space_id].space();
1039   MutableSpace* const to_space   = _space_info[to_space_id].space();
1040 
1041   ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();
1042   bool eden_empty = eden_space->is_empty();
1043 
1044   // Update heap occupancy information which is used as input to the soft ref
1045   // clearing policy at the next gc.
1046   Universe::update_heap_info_at_gc();
1047 


1048   bool young_gen_empty = eden_empty && from_space->is_empty() &&
1049     to_space->is_empty();
1050 
1051   PSCardTable* ct = heap->card_table();
1052   MemRegion old_mr = heap->old_gen()->reserved();
1053   if (young_gen_empty) {
1054     ct->clear(MemRegion(old_mr.start(), old_mr.end()));
1055   } else {
1056     ct->invalidate(MemRegion(old_mr.start(), old_mr.end()));
1057   }
1058 
1059   // Delete metaspaces for unloaded class loaders and clean up loader_data graph
1060   ClassLoaderDataGraph::purge();
1061   MetaspaceUtils::verify_metrics();
1062 
1063   heap->prune_scavengable_nmethods();
1064 
1065 #if COMPILER2_OR_JVMCI
1066   DerivedPointerTable::update_pointers();
1067 #endif
1068 
1069   if (ZapUnusedHeapArea) {
1070     heap->gen_mangle_unused_area();
1071   }
1072 
1073   // Update time of last GC
1074   reset_millis_since_last_gc();
1075 }
1076 
1077 HeapWord*
1078 PSParallelCompact::compute_dense_prefix_via_density(const SpaceId id,
1079                                                     bool maximum_compaction)
1080 {
1081   const size_t region_size = ParallelCompactData::RegionSize;
1082   const ParallelCompactData& sd = summary_data();
1083 
1084   const MutableSpace* const space = _space_info[id].space();
1085   HeapWord* const top_aligned_up = sd.region_align_up(space->top());
1086   const RegionData* const beg_cp = sd.addr_to_region_ptr(space->bottom());
1087   const RegionData* const end_cp = sd.addr_to_region_ptr(top_aligned_up);
1088 
1089   // Skip full regions at the beginning of the space--they are necessarily part
1090   // of the dense prefix.
1091   size_t full_count = 0;
1092   const RegionData* cp;
1093   for (cp = beg_cp; cp < end_cp && cp->data_size() == region_size; ++cp) {
1094     ++full_count;


3178 
3179   size_t beg_bit = bitmap->words_to_bits(region_idx << Log2RegionSize);
3180   const size_t range_end = beg_bit + bitmap->words_to_bits(RegionSize);
3181   size_t live_bits = bitmap->words_to_bits(partial_obj_size);
3182   beg_bit = bitmap->find_obj_beg(beg_bit + live_bits, range_end);
3183   while (beg_bit < range_end) {
3184     const size_t new_block = beg_bit >> Log2BitsPerBlock;
3185     if (new_block != cur_block) {
3186       cur_block = new_block;
3187       sd.block(cur_block)->set_offset(bitmap->bits_to_words(live_bits));
3188     }
3189 
3190     const size_t end_bit = bitmap->find_obj_end(beg_bit, range_end);
3191     if (end_bit < range_end - 1) {
3192       live_bits += end_bit - beg_bit + 1;
3193       beg_bit = bitmap->find_obj_beg(end_bit + 1, range_end);
3194     } else {
3195       return;
3196     }
3197   }
3198 }
3199 
3200 jlong PSParallelCompact::millis_since_last_gc() {
3201   // We need a monotonically non-decreasing time in ms but
3202   // os::javaTimeMillis() does not guarantee monotonicity.
3203   jlong now = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
3204   jlong ret_val = now - _time_of_last_gc;
3205   // XXX See note in genCollectedHeap::millis_since_last_gc().
3206   if (ret_val < 0) {
3207     NOT_PRODUCT(log_warning(gc)("time warp: " JLONG_FORMAT, ret_val);)
3208     return 0;
3209   }
3210   return ret_val;
3211 }
3212 
3213 void PSParallelCompact::reset_millis_since_last_gc() {
3214   // We need a monotonically non-decreasing time in ms but
3215   // os::javaTimeMillis() does not guarantee monotonicity.
3216   _time_of_last_gc = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
3217 }
3218 
3219 ParMarkBitMap::IterationStatus MoveAndUpdateClosure::copy_until_full()
3220 {
3221   if (source() != copy_destination()) {
3222     DEBUG_ONLY(PSParallelCompact::check_new_location(source(), destination());)
3223     Copy::aligned_conjoint_words(source(), copy_destination(), words_remaining());
3224   }
3225   update_state(words_remaining());
3226   assert(is_full(), "sanity");
3227   return ParMarkBitMap::full;
3228 }
3229 
3230 void MoveAndUpdateClosure::copy_partial_obj()
3231 {
3232   size_t words = words_remaining();
3233 
3234   HeapWord* const range_end = MIN2(source() + words, bitmap()->region_end());
3235   HeapWord* const end_addr = bitmap()->find_obj_end(source(), range_end);
3236   if (end_addr < range_end) {




 826 {
 827   const size_t* const beg = (const size_t*)vspace->committed_low_addr();
 828   const size_t* const end = (const size_t*)vspace->committed_high_addr();
 829   for (const size_t* p = beg; p < end; ++p) {
 830     assert(*p == 0, "not zero");
 831   }
 832 }
 833 
 834 void ParallelCompactData::verify_clear()
 835 {
 836   verify_clear(_region_vspace);
 837   verify_clear(_block_vspace);
 838 }
 839 #endif  // #ifdef ASSERT
 840 
 841 STWGCTimer          PSParallelCompact::_gc_timer;
 842 ParallelOldTracer   PSParallelCompact::_gc_tracer;
 843 elapsedTimer        PSParallelCompact::_accumulated_time;
 844 unsigned int        PSParallelCompact::_total_invocations = 0;
 845 unsigned int        PSParallelCompact::_maximum_compaction_gc_num = 0;

 846 CollectorCounters*  PSParallelCompact::_counters = NULL;
 847 ParMarkBitMap       PSParallelCompact::_mark_bitmap;
 848 ParallelCompactData PSParallelCompact::_summary_data;
 849 
 850 PSParallelCompact::IsAliveClosure PSParallelCompact::_is_alive_closure;
 851 
 852 bool PSParallelCompact::IsAliveClosure::do_object_b(oop p) { return mark_bitmap()->is_marked(p); }
 853 
 854 class PCReferenceProcessor: public ReferenceProcessor {
 855 public:
 856   PCReferenceProcessor(
 857     BoolObjectClosure* is_subject_to_discovery,
 858     BoolObjectClosure* is_alive_non_header) :
 859       ReferenceProcessor(is_subject_to_discovery,
 860       ParallelRefProcEnabled && (ParallelGCThreads > 1), // mt processing
 861       ParallelGCThreads,   // mt processing degree
 862       true,                // mt discovery
 863       ParallelGCThreads,   // mt discovery degree
 864       true,                // atomic_discovery
 865       is_alive_non_header) {


1027   ParCompactionManager::remove_all_shadow_regions();
1028 
1029   for (unsigned int id = old_space_id; id < last_space_id; ++id) {
1030     // Clear the marking bitmap, summary data and split info.
1031     clear_data_covering_space(SpaceId(id));
1032     // Update top().  Must be done after clearing the bitmap and summary data.
1033     _space_info[id].publish_new_top();
1034   }
1035 
1036   MutableSpace* const eden_space = _space_info[eden_space_id].space();
1037   MutableSpace* const from_space = _space_info[from_space_id].space();
1038   MutableSpace* const to_space   = _space_info[to_space_id].space();
1039 
1040   ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();
1041   bool eden_empty = eden_space->is_empty();
1042 
1043   // Update heap occupancy information which is used as input to the soft ref
1044   // clearing policy at the next gc.
1045   Universe::update_heap_info_at_gc();
1046 
1047   Universe::heap()->next_whole_heap_examined();
1048 
1049   bool young_gen_empty = eden_empty && from_space->is_empty() &&
1050     to_space->is_empty();
1051 
1052   PSCardTable* ct = heap->card_table();
1053   MemRegion old_mr = heap->old_gen()->reserved();
1054   if (young_gen_empty) {
1055     ct->clear(MemRegion(old_mr.start(), old_mr.end()));
1056   } else {
1057     ct->invalidate(MemRegion(old_mr.start(), old_mr.end()));
1058   }
1059 
1060   // Delete metaspaces for unloaded class loaders and clean up loader_data graph
1061   ClassLoaderDataGraph::purge();
1062   MetaspaceUtils::verify_metrics();
1063 
1064   heap->prune_scavengable_nmethods();
1065 
1066 #if COMPILER2_OR_JVMCI
1067   DerivedPointerTable::update_pointers();
1068 #endif
1069 
1070   if (ZapUnusedHeapArea) {
1071     heap->gen_mangle_unused_area();
1072   }



1073 }
1074 
1075 HeapWord*
1076 PSParallelCompact::compute_dense_prefix_via_density(const SpaceId id,
1077                                                     bool maximum_compaction)
1078 {
1079   const size_t region_size = ParallelCompactData::RegionSize;
1080   const ParallelCompactData& sd = summary_data();
1081 
1082   const MutableSpace* const space = _space_info[id].space();
1083   HeapWord* const top_aligned_up = sd.region_align_up(space->top());
1084   const RegionData* const beg_cp = sd.addr_to_region_ptr(space->bottom());
1085   const RegionData* const end_cp = sd.addr_to_region_ptr(top_aligned_up);
1086 
1087   // Skip full regions at the beginning of the space--they are necessarily part
1088   // of the dense prefix.
1089   size_t full_count = 0;
1090   const RegionData* cp;
1091   for (cp = beg_cp; cp < end_cp && cp->data_size() == region_size; ++cp) {
1092     ++full_count;


3176 
3177   size_t beg_bit = bitmap->words_to_bits(region_idx << Log2RegionSize);
3178   const size_t range_end = beg_bit + bitmap->words_to_bits(RegionSize);
3179   size_t live_bits = bitmap->words_to_bits(partial_obj_size);
3180   beg_bit = bitmap->find_obj_beg(beg_bit + live_bits, range_end);
3181   while (beg_bit < range_end) {
3182     const size_t new_block = beg_bit >> Log2BitsPerBlock;
3183     if (new_block != cur_block) {
3184       cur_block = new_block;
3185       sd.block(cur_block)->set_offset(bitmap->bits_to_words(live_bits));
3186     }
3187 
3188     const size_t end_bit = bitmap->find_obj_end(beg_bit, range_end);
3189     if (end_bit < range_end - 1) {
3190       live_bits += end_bit - beg_bit + 1;
3191       beg_bit = bitmap->find_obj_beg(end_bit + 1, range_end);
3192     } else {
3193       return;
3194     }
3195   }



















3196 }
3197 
3198 ParMarkBitMap::IterationStatus MoveAndUpdateClosure::copy_until_full()
3199 {
3200   if (source() != copy_destination()) {
3201     DEBUG_ONLY(PSParallelCompact::check_new_location(source(), destination());)
3202     Copy::aligned_conjoint_words(source(), copy_destination(), words_remaining());
3203   }
3204   update_state(words_remaining());
3205   assert(is_full(), "sanity");
3206   return ParMarkBitMap::full;
3207 }
3208 
3209 void MoveAndUpdateClosure::copy_partial_obj()
3210 {
3211   size_t words = words_remaining();
3212 
3213   HeapWord* const range_end = MIN2(source() + words, bitmap()->region_end());
3214   HeapWord* const end_addr = bitmap()->find_obj_end(source(), range_end);
3215   if (end_addr < range_end) {


< prev index next >