< prev index next >

src/hotspot/share/gc/parallel/psParallelCompact.cpp

Print this page




3121       // region.
3122       assert(closure.source() < end_addr, "sanity");
3123       HeapWord* const obj_beg = closure.source();
3124       HeapWord* const range_end = MIN2(obj_beg + closure.words_remaining(),
3125                                        src_space_top);
3126       HeapWord* const obj_end = bitmap->find_obj_end(obj_beg, range_end);
3127       if (obj_end < range_end) {
3128         // The end was found; the entire object will fit.
3129         status = closure.do_addr(obj_beg, bitmap->obj_size(obj_beg, obj_end));
3130         assert(status != ParMarkBitMap::would_overflow, "sanity");
3131       } else {
3132         // The end was not found; the object will not fit.
3133         assert(range_end < src_space_top, "obj cannot cross space boundary");
3134         status = ParMarkBitMap::would_overflow;
3135       }
3136     }
3137 
3138     if (status == ParMarkBitMap::would_overflow) {
3139       // The last object did not fit.  Note that interior oop updates were
3140       // deferred, then copy enough of the object to fill the region.
3141       closure.set_deferred_obj_addr_for(region_ptr);

3142       status = closure.copy_until_full(); // copies from closure.source()
3143 
3144       decrement_destination_counts(cm, src_space_id, src_region_idx,
3145                                    closure.source());
3146       closure.complete_region(cm, dest_addr, region_ptr);
3147       return;
3148     }
3149 
3150     if (status == ParMarkBitMap::full) {
3151       decrement_destination_counts(cm, src_space_id, src_region_idx,
3152                                    closure.source());
3153       region_ptr->set_deferred_obj_addr(NULL);
3154       closure.complete_region(cm, dest_addr, region_ptr);
3155       return;
3156     }
3157 
3158     decrement_destination_counts(cm, src_space_id, src_region_idx, end_addr);
3159 
3160     // Move to the next source region, possibly switching spaces as well.  All
3161     // args except end_addr may be modified.


3163                                      end_addr);
3164   } while (true);
3165 }
3166 
3167 void PSParallelCompact::fill_and_update_region(ParCompactionManager* cm, size_t region_idx) {
3168   MoveAndUpdateClosure cl(mark_bitmap(), cm, region_idx);
3169   fill_region(cm, cl, region_idx);
3170 }
3171 
3172 void PSParallelCompact::fill_shadow_region(ParCompactionManager* cm, size_t region_idx)
3173 {
3174   // Acquire a shadow region at first
3175   ParallelCompactData& sd = summary_data();
3176   RegionData* const region_ptr = sd.region(region_idx);
3177   size_t shadow_region = cm->acquire_shadow_region(region_ptr);
3178   // The zero return value indicates the corresponding heap region is available,
3179   // so use MoveAndUpdateClosure to fill the normal region. Otherwise, use
3180   // ShadowClosure to fill the acquired shadow region.
3181   if (shadow_region == 0) {
3182     MoveAndUpdateClosure cl(mark_bitmap(), cm, region_idx);

3183     return fill_region(cm, cl, region_idx);
3184   } else {
3185     ShadowClosure cl(mark_bitmap(), cm, region_idx, shadow_region);
3186     return fill_region(cm, cl, region_idx);
3187   }
3188 }
3189 
3190 void PSParallelCompact::copy_back(HeapWord *shadow_addr, HeapWord *region_addr) {
3191   Copy::aligned_conjoint_words(shadow_addr, region_addr, _summary_data.RegionSize);
3192 }
3193 
3194 bool PSParallelCompact::steal_shadow_region(ParCompactionManager* cm, size_t &region_idx) {
3195   size_t record = cm->shadow_record();
3196   ParallelCompactData& sd = _summary_data;
3197   size_t old_new_top = sd.addr_to_region_idx(_space_info[old_space_id].new_top());
3198   uint active_gc_threads = ParallelScavengeHeap::heap()->workers().active_workers();
3199 
3200   while (record < old_new_top) {
3201     if (sd.region(record)->try_steal()) {
3202       region_idx = record;


3288   // We need a monotonically non-decreasing time in ms but
3289   // os::javaTimeMillis() does not guarantee monotonicity.
3290   jlong now = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
3291   jlong ret_val = now - _time_of_last_gc;
3292   // XXX See note in genCollectedHeap::millis_since_last_gc().
3293   if (ret_val < 0) {
3294     NOT_PRODUCT(log_warning(gc)("time warp: " JLONG_FORMAT, ret_val);)
3295     return 0;
3296   }
3297   return ret_val;
3298 }
3299 
3300 void PSParallelCompact::reset_millis_since_last_gc() {
3301   // We need a monotonically non-decreasing time in ms but
3302   // os::javaTimeMillis() does not guarantee monotonicity.
3303   _time_of_last_gc = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
3304 }
3305 
3306 ParMarkBitMap::IterationStatus MoveAndUpdateClosure::copy_until_full()
3307 {
3308   if (source() != destination()) {
3309     DEBUG_ONLY(PSParallelCompact::check_new_location(source(), destination());)
3310     Copy::aligned_conjoint_words(source(), destination(), words_remaining());
3311   }
3312   update_state(words_remaining());
3313   assert(is_full(), "sanity");
3314   return ParMarkBitMap::full;
3315 }
3316 
3317 void MoveAndUpdateClosure::copy_partial_obj()
3318 {
3319   size_t words = words_remaining();
3320 
3321   HeapWord* const range_end = MIN2(source() + words, bitmap()->region_end());
3322   HeapWord* const end_addr = bitmap()->find_obj_end(source(), range_end);
3323   if (end_addr < range_end) {
3324     words = bitmap()->obj_size(source(), end_addr);
3325   }
3326 
3327   // This test is necessary; if omitted, the pointer updates to a partial object
3328   // that crosses the dense prefix boundary could be overwritten.
3329   if (source() != destination()) {
3330     DEBUG_ONLY(PSParallelCompact::check_new_location(source(), destination());)
3331     Copy::aligned_conjoint_words(source(), destination(), words);
3332   }
3333   update_state(words);
3334 }
3335 
3336 void MoveAndUpdateClosure::complete_region(ParCompactionManager *cm, HeapWord *dest_addr,
3337                                            PSParallelCompact::RegionData *region_ptr) {
3338   assert(region_ptr->shadow_state() == ParallelCompactData::RegionData::FINISH, "Region should be finished");
3339   region_ptr->set_completed();
3340 }
3341 
3342 void MoveAndUpdateClosure::allocate_block() {
3343   assert(destination() != NULL, "sanity");
3344   assert(PSParallelCompact::summary_data().calc_new_pointer(source(), compaction_manager()) ==
3345          destination(), "wrong destination");
3346 
3347   // The start_array must be updated even if the object is not moving.
3348   if (_start_array != NULL) {
3349       _start_array->allocate_block(destination());
3350   }
3351 }
3352 
3353 void MoveAndUpdateClosure::set_deferred_obj_addr_for(PSParallelCompact::RegionData *region_ptr) {
3354   region_ptr->set_deferred_obj_addr(destination());
3355 }
3356 
3357 ParMarkBitMapClosure::IterationStatus
3358 MoveAndUpdateClosure::do_addr(HeapWord* addr, size_t words) {

3359   assert(bitmap()->obj_size(addr) == words, "bad size");
3360 
3361   _source = addr;


3362 
3363   if (words > words_remaining()) {
3364     return ParMarkBitMap::would_overflow;
3365   }
3366 
3367   allocate_block();



3368 
3369   if (destination() != source()) {
3370     DEBUG_ONLY(PSParallelCompact::check_new_location(source(), destination());)
3371     Copy::aligned_conjoint_words(source(), destination(), words);
3372   }
3373 
3374   oop moved_oop = (oop) destination();
3375   compaction_manager()->update_contents(moved_oop);
3376   assert(oopDesc::is_oop_or_null(moved_oop), "Expected an oop or NULL at " PTR_FORMAT, p2i(moved_oop));
3377 
3378   update_state(words);
3379   assert(destination() == (HeapWord*)moved_oop + moved_oop->size(), "sanity");
3380   return is_full() ? ParMarkBitMap::full : ParMarkBitMap::incomplete;
3381 }
3382 
3383 void ShadowClosure::complete_region(ParCompactionManager *cm, HeapWord *dest_addr,
3384                                     PSParallelCompact::RegionData *region_ptr) {
3385   assert(region_ptr->shadow_state() == ParallelCompactData::RegionData::FINISH, "Region should be finished");
3386   // Record the shadow region index
3387   region_ptr->set_shadow_region(_shadow);
3388   // Mark the shadow region filled
3389   region_ptr->mark_filled();
3390   // Try to copy the content of the shadow region back to its corresponding
3391   // heap region if available
3392   if (((region_ptr->available() && region_ptr->claim()) || region_ptr->claimed()) && region_ptr->try_copy()) {
3393     region_ptr->set_completed();
3394     PSParallelCompact::copy_back(PSParallelCompact::summary_data().region_to_addr(_shadow), dest_addr);
3395     cm->release_shadow_region(_shadow);
3396   }
3397 }
3398 
3399 void ShadowClosure::allocate_block() {
3400   HeapWord *real_dest = destination() - _offset;
3401   assert(real_dest != NULL, "sanity");
3402   assert(PSParallelCompact::summary_data().calc_new_pointer(source(), compaction_manager()) ==
3403          real_dest, "wrong destination");
3404 
3405   // The start_array must be updated even if the object is not moving.
3406   if (_start_array != NULL) {
3407       _start_array->allocate_block(real_dest);
3408   }
3409 }
3410 
3411 void ShadowClosure::set_deferred_obj_addr_for(PSParallelCompact::RegionData *region_ptr) {
3412   region_ptr->set_deferred_obj_addr(destination() - _offset);
3413 }
3414 
3415 UpdateOnlyClosure::UpdateOnlyClosure(ParMarkBitMap* mbm,
3416                                      ParCompactionManager* cm,
3417                                      PSParallelCompact::SpaceId space_id) :
3418   ParMarkBitMapClosure(mbm, cm),
3419   _space_id(space_id),
3420   _start_array(PSParallelCompact::start_array(space_id))
3421 {
3422 }
3423 
3424 // Updates the references in the object to their new values.
3425 ParMarkBitMapClosure::IterationStatus
3426 UpdateOnlyClosure::do_addr(HeapWord* addr, size_t words) {
3427   do_addr(addr);
3428   return ParMarkBitMap::incomplete;
3429 }
3430 
3431 FillClosure::FillClosure(ParCompactionManager* cm, PSParallelCompact::SpaceId space_id) :
3432   ParMarkBitMapClosure(PSParallelCompact::mark_bitmap(), cm),


3121       // region.
3122       assert(closure.source() < end_addr, "sanity");
3123       HeapWord* const obj_beg = closure.source();
3124       HeapWord* const range_end = MIN2(obj_beg + closure.words_remaining(),
3125                                        src_space_top);
3126       HeapWord* const obj_end = bitmap->find_obj_end(obj_beg, range_end);
3127       if (obj_end < range_end) {
3128         // The end was found; the entire object will fit.
3129         status = closure.do_addr(obj_beg, bitmap->obj_size(obj_beg, obj_end));
3130         assert(status != ParMarkBitMap::would_overflow, "sanity");
3131       } else {
3132         // The end was not found; the object will not fit.
3133         assert(range_end < src_space_top, "obj cannot cross space boundary");
3134         status = ParMarkBitMap::would_overflow;
3135       }
3136     }
3137 
3138     if (status == ParMarkBitMap::would_overflow) {
3139       // The last object did not fit.  Note that interior oop updates were
3140       // deferred, then copy enough of the object to fill the region.
3141       region_ptr->set_deferred_obj_addr(closure.destination());
3142 
3143       status = closure.copy_until_full(); // copies from closure.source()
3144 
3145       decrement_destination_counts(cm, src_space_id, src_region_idx,
3146                                    closure.source());
3147       closure.complete_region(cm, dest_addr, region_ptr);
3148       return;
3149     }
3150 
3151     if (status == ParMarkBitMap::full) {
3152       decrement_destination_counts(cm, src_space_id, src_region_idx,
3153                                    closure.source());
3154       region_ptr->set_deferred_obj_addr(NULL);
3155       closure.complete_region(cm, dest_addr, region_ptr);
3156       return;
3157     }
3158 
3159     decrement_destination_counts(cm, src_space_id, src_region_idx, end_addr);
3160 
3161     // Move to the next source region, possibly switching spaces as well.  All
3162     // args except end_addr may be modified.


3164                                      end_addr);
3165   } while (true);
3166 }
3167 
3168 void PSParallelCompact::fill_and_update_region(ParCompactionManager* cm, size_t region_idx) {
3169   MoveAndUpdateClosure cl(mark_bitmap(), cm, region_idx);
3170   fill_region(cm, cl, region_idx);
3171 }
3172 
3173 void PSParallelCompact::fill_shadow_region(ParCompactionManager* cm, size_t region_idx)
3174 {
3175   // Acquire a shadow region at first
3176   ParallelCompactData& sd = summary_data();
3177   RegionData* const region_ptr = sd.region(region_idx);
3178   size_t shadow_region = cm->acquire_shadow_region(region_ptr);
3179   // The zero return value indicates the corresponding heap region is available,
3180   // so use MoveAndUpdateClosure to fill the normal region. Otherwise, use
3181   // ShadowClosure to fill the acquired shadow region.
3182   if (shadow_region == 0) {
3183     MoveAndUpdateClosure cl(mark_bitmap(), cm, region_idx);
3184     region_ptr->mark_normal();
3185     return fill_region(cm, cl, region_idx);
3186   } else {
3187     ShadowClosure cl(mark_bitmap(), cm, region_idx, shadow_region);
3188     return fill_region(cm, cl, region_idx);
3189   }
3190 }
3191 
3192 void PSParallelCompact::copy_back(HeapWord *shadow_addr, HeapWord *region_addr) {
3193   Copy::aligned_conjoint_words(shadow_addr, region_addr, _summary_data.RegionSize);
3194 }
3195 
3196 bool PSParallelCompact::steal_shadow_region(ParCompactionManager* cm, size_t &region_idx) {
3197   size_t record = cm->shadow_record();
3198   ParallelCompactData& sd = _summary_data;
3199   size_t old_new_top = sd.addr_to_region_idx(_space_info[old_space_id].new_top());
3200   uint active_gc_threads = ParallelScavengeHeap::heap()->workers().active_workers();
3201 
3202   while (record < old_new_top) {
3203     if (sd.region(record)->try_steal()) {
3204       region_idx = record;


3290   // We need a monotonically non-decreasing time in ms but
3291   // os::javaTimeMillis() does not guarantee monotonicity.
3292   jlong now = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
3293   jlong ret_val = now - _time_of_last_gc;
3294   // XXX See note in genCollectedHeap::millis_since_last_gc().
3295   if (ret_val < 0) {
3296     NOT_PRODUCT(log_warning(gc)("time warp: " JLONG_FORMAT, ret_val);)
3297     return 0;
3298   }
3299   return ret_val;
3300 }
3301 
3302 void PSParallelCompact::reset_millis_since_last_gc() {
3303   // We need a monotonically non-decreasing time in ms but
3304   // os::javaTimeMillis() does not guarantee monotonicity.
3305   _time_of_last_gc = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
3306 }
3307 
3308 ParMarkBitMap::IterationStatus MoveAndUpdateClosure::copy_until_full()
3309 {
3310   if (source() != copy_destination()) {
3311     DEBUG_ONLY(PSParallelCompact::check_new_location(source(), destination());)
3312     Copy::aligned_conjoint_words(source(), copy_destination(), words_remaining());
3313   }
3314   update_state(words_remaining());
3315   assert(is_full(), "sanity");
3316   return ParMarkBitMap::full;
3317 }
3318 
3319 void MoveAndUpdateClosure::copy_partial_obj()
3320 {
3321   size_t words = words_remaining();
3322 
3323   HeapWord* const range_end = MIN2(source() + words, bitmap()->region_end());
3324   HeapWord* const end_addr = bitmap()->find_obj_end(source(), range_end);
3325   if (end_addr < range_end) {
3326     words = bitmap()->obj_size(source(), end_addr);
3327   }
3328 
3329   // This test is necessary; if omitted, the pointer updates to a partial object
3330   // that crosses the dense prefix boundary could be overwritten.
3331   if (source() != copy_destination()) {
3332     DEBUG_ONLY(PSParallelCompact::check_new_location(source(), destination());)
3333     Copy::aligned_conjoint_words(source(), copy_destination(), words);
3334   }
3335   update_state(words);
3336 }
3337 
3338 void MoveAndUpdateClosure::complete_region(ParCompactionManager *cm, HeapWord *dest_addr,
3339                                            PSParallelCompact::RegionData *region_ptr) {
3340   assert(region_ptr->shadow_state() == ParallelCompactData::RegionData::FINISH, "Region should be finished");
3341   region_ptr->set_completed();
3342 }
3343 















3344 ParMarkBitMapClosure::IterationStatus
3345 MoveAndUpdateClosure::do_addr(HeapWord* addr, size_t words) {
3346   assert(destination() != NULL, "sanity");
3347   assert(bitmap()->obj_size(addr) == words, "bad size");
3348 
3349   _source = addr;
3350   assert(PSParallelCompact::summary_data().calc_new_pointer(source(), compaction_manager()) ==
3351          destination(), "wrong destination");
3352 
3353   if (words > words_remaining()) {
3354     return ParMarkBitMap::would_overflow;
3355   }
3356 
3357   // The start_array must be updated even if the object is not moving.
3358   if (_start_array != NULL) {
3359     _start_array->allocate_block(destination());
3360   }
3361 
3362   if (copy_destination() != source()) {
3363     DEBUG_ONLY(PSParallelCompact::check_new_location(source(), destination());)
3364     Copy::aligned_conjoint_words(source(), copy_destination(), words);
3365   }
3366 
3367   oop moved_oop = (oop) copy_destination();
3368   compaction_manager()->update_contents(moved_oop);
3369   assert(oopDesc::is_oop_or_null(moved_oop), "Expected an oop or NULL at " PTR_FORMAT, p2i(moved_oop));
3370 
3371   update_state(words);
3372   assert(copy_destination() == (HeapWord*)moved_oop + moved_oop->size(), "sanity");
3373   return is_full() ? ParMarkBitMap::full : ParMarkBitMap::incomplete;
3374 }
3375 
3376 void ShadowClosure::complete_region(ParCompactionManager *cm, HeapWord *dest_addr,
3377                                     PSParallelCompact::RegionData *region_ptr) {
3378   assert(region_ptr->shadow_state() == ParallelCompactData::RegionData::SHADOW, "Region should be shadow");
3379   // Record the shadow region index
3380   region_ptr->set_shadow_region(_shadow);
3381   // Mark the shadow region filled
3382   region_ptr->mark_filled();
3383   // Try to copy the content of the shadow region back to its corresponding
3384   // heap region if available
3385   if (((region_ptr->available() && region_ptr->claim()) || region_ptr->claimed()) && region_ptr->try_copy()) {
3386     region_ptr->set_completed();
3387     PSParallelCompact::copy_back(PSParallelCompact::summary_data().region_to_addr(_shadow), dest_addr);
3388     cm->release_shadow_region(_shadow);
3389   }
















3390 }
3391 
3392 UpdateOnlyClosure::UpdateOnlyClosure(ParMarkBitMap* mbm,
3393                                      ParCompactionManager* cm,
3394                                      PSParallelCompact::SpaceId space_id) :
3395   ParMarkBitMapClosure(mbm, cm),
3396   _space_id(space_id),
3397   _start_array(PSParallelCompact::start_array(space_id))
3398 {
3399 }
3400 
3401 // Updates the references in the object to their new values.
3402 ParMarkBitMapClosure::IterationStatus
3403 UpdateOnlyClosure::do_addr(HeapWord* addr, size_t words) {
3404   do_addr(addr);
3405   return ParMarkBitMap::incomplete;
3406 }
3407 
3408 FillClosure::FillClosure(ParCompactionManager* cm, PSParallelCompact::SpaceId space_id) :
3409   ParMarkBitMapClosure(PSParallelCompact::mark_bitmap(), cm),
< prev index next >