< prev index next >

src/hotspot/share/gc/parallel/psParallelCompact.cpp

Print this page
rev 57190 : [mq]: shadow-regions.5
rev 57191 : [mq]: shadow-regions.5_to_6
rev 57192 : imported patch shadow-regions-final-fixes


3117       assert(closure.source() < end_addr, "sanity");
3118       HeapWord* const obj_beg = closure.source();
3119       HeapWord* const range_end = MIN2(obj_beg + closure.words_remaining(),
3120                                        src_space_top);
3121       HeapWord* const obj_end = bitmap->find_obj_end(obj_beg, range_end);
3122       if (obj_end < range_end) {
3123         // The end was found; the entire object will fit.
3124         status = closure.do_addr(obj_beg, bitmap->obj_size(obj_beg, obj_end));
3125         assert(status != ParMarkBitMap::would_overflow, "sanity");
3126       } else {
3127         // The end was not found; the object will not fit.
3128         assert(range_end < src_space_top, "obj cannot cross space boundary");
3129         status = ParMarkBitMap::would_overflow;
3130       }
3131     }
3132 
3133     if (status == ParMarkBitMap::would_overflow) {
3134       // The last object did not fit.  Note that interior oop updates were
3135       // deferred, then copy enough of the object to fill the region.
3136       region_ptr->set_deferred_obj_addr(closure.destination());
3137 
3138       status = closure.copy_until_full(); // copies from closure.source()
3139 
3140       decrement_destination_counts(cm, src_space_id, src_region_idx,
3141                                    closure.source());
3142       closure.complete_region(cm, dest_addr, region_ptr);
3143       return;
3144     }
3145 
3146     if (status == ParMarkBitMap::full) {
3147       decrement_destination_counts(cm, src_space_id, src_region_idx,
3148                                    closure.source());
3149       region_ptr->set_deferred_obj_addr(NULL);
3150       closure.complete_region(cm, dest_addr, region_ptr);
3151       return;
3152     }
3153 
3154     decrement_destination_counts(cm, src_space_id, src_region_idx, end_addr);
3155 
3156     // Move to the next source region, possibly switching spaces as well.  All
3157     // args except end_addr may be modified.
3158     src_region_idx = next_src_region(closure, src_space_id, src_space_top,
3159                                      end_addr);
3160   } while (true);
3161 }
3162 
3163 void PSParallelCompact::fill_and_update_region(ParCompactionManager* cm, size_t region_idx)
3164 {
3165   MoveAndUpdateClosure cl(mark_bitmap(), cm, region_idx);
3166   fill_region(cm, cl, region_idx);
3167 }
3168 
3169 void PSParallelCompact::fill_and_update_shadow_region(ParCompactionManager* cm, size_t region_idx)
3170 {
3171   // Get a shadow region first
3172   ParallelCompactData& sd = summary_data();
3173   RegionData* const region_ptr = sd.region(region_idx);
3174   size_t shadow_region = cm->pop_shadow_region_mt_safe(region_ptr);
3175   // The InvalidShadow return value indicates the corresponding heap region is available,
3176   // so use MoveAndUpdateClosure to fill the normal region. Otherwise, use
3177   // MoveAndUpdateShadowClosure to fill the acquired shadow region.
3178   if (shadow_region == cm->InvalidShadow) {
3179     MoveAndUpdateClosure cl(mark_bitmap(), cm, region_idx);
3180     region_ptr->shadow_to_normal();
3181     return fill_region(cm, cl, region_idx);
3182   } else {
3183     MoveAndUpdateShadowClosure cl(mark_bitmap(), cm, region_idx, shadow_region);
3184     return fill_region(cm, cl, region_idx);
3185   }
3186 }
3187 
3188 void PSParallelCompact::copy_back(HeapWord *shadow_addr, HeapWord *region_addr)
3189 {
3190   Copy::aligned_conjoint_words(shadow_addr, region_addr, _summary_data.RegionSize);
3191 }
3192 
3193 bool PSParallelCompact::steal_unavailable_region(ParCompactionManager* cm, size_t &region_idx)
3194 {


3335 {
3336   size_t words = words_remaining();
3337 
3338   HeapWord* const range_end = MIN2(source() + words, bitmap()->region_end());
3339   HeapWord* const end_addr = bitmap()->find_obj_end(source(), range_end);
3340   if (end_addr < range_end) {
3341     words = bitmap()->obj_size(source(), end_addr);
3342   }
3343 
3344   // This test is necessary; if omitted, the pointer updates to a partial object
3345   // that crosses the dense prefix boundary could be overwritten.
3346   if (source() != copy_destination()) {
3347     DEBUG_ONLY(PSParallelCompact::check_new_location(source(), destination());)
3348     Copy::aligned_conjoint_words(source(), copy_destination(), words);
3349   }
3350   update_state(words);
3351 }
3352 
3353 void MoveAndUpdateClosure::complete_region(ParCompactionManager *cm, HeapWord *dest_addr,
3354                                            PSParallelCompact::RegionData *region_ptr) {
3355   assert(region_ptr->shadow_state() == ParallelCompactData::RegionData::FinishedShadow, "Region should be finished");
3356   region_ptr->set_completed();
3357 }
3358 
3359 ParMarkBitMapClosure::IterationStatus
3360 MoveAndUpdateClosure::do_addr(HeapWord* addr, size_t words) {
3361   assert(destination() != NULL, "sanity");
3362   assert(bitmap()->obj_size(addr) == words, "bad size");
3363 
3364   _source = addr;
3365   assert(PSParallelCompact::summary_data().calc_new_pointer(source(), compaction_manager()) ==
3366          destination(), "wrong destination");
3367 
3368   if (words > words_remaining()) {
3369     return ParMarkBitMap::would_overflow;
3370   }
3371 
3372   // The start_array must be updated even if the object is not moving.
3373   if (_start_array != NULL) {
3374     _start_array->allocate_block(destination());
3375   }
3376 
3377   if (copy_destination() != source()) {
3378     DEBUG_ONLY(PSParallelCompact::check_new_location(source(), destination());)
3379     Copy::aligned_conjoint_words(source(), copy_destination(), words);
3380   }
3381 
3382   oop moved_oop = (oop) copy_destination();
3383   compaction_manager()->update_contents(moved_oop);
3384   assert(oopDesc::is_oop_or_null(moved_oop), "Expected an oop or NULL at " PTR_FORMAT, p2i(moved_oop));
3385 
3386   update_state(words);
3387   assert(copy_destination() == (HeapWord*)moved_oop + moved_oop->size(), "sanity");
3388   return is_full() ? ParMarkBitMap::full : ParMarkBitMap::incomplete;
3389 }
3390 
3391 void MoveAndUpdateShadowClosure::complete_region(ParCompactionManager *cm, HeapWord *dest_addr,
3392                                                  PSParallelCompact::RegionData *region_ptr) {
3393   assert(region_ptr->shadow_state() == ParallelCompactData::RegionData::Shadow, "Region should be shadow");
3394   // Record the shadow region index
3395   region_ptr->set_shadow_region(_shadow);
3396   // Mark the shadow region as filled to indicate the data is ready to be
3397   // copied back
3398   region_ptr->mark_filled();
3399   // Try to copy the content of the shadow region back to its corresponding
3400   // heap region if available; the GC thread that decreases the destination
3401   // count to zero will do the copying otherwise (see
3402   // PSParallelCompact::decrement_destination_counts).
3403   if (((region_ptr->available() && region_ptr->claim()) || region_ptr->claimed()) && region_ptr->mark_copied()) {
3404     region_ptr->set_completed();
3405     PSParallelCompact::copy_back(PSParallelCompact::summary_data().region_to_addr(_shadow), dest_addr);
3406     cm->push_shadow_region_mt_safe(_shadow);
3407   }
3408 }
3409 
3410 UpdateOnlyClosure::UpdateOnlyClosure(ParMarkBitMap* mbm,
3411                                      ParCompactionManager* cm,
3412                                      PSParallelCompact::SpaceId space_id) :
3413   ParMarkBitMapClosure(mbm, cm),




3117       assert(closure.source() < end_addr, "sanity");
3118       HeapWord* const obj_beg = closure.source();
3119       HeapWord* const range_end = MIN2(obj_beg + closure.words_remaining(),
3120                                        src_space_top);
3121       HeapWord* const obj_end = bitmap->find_obj_end(obj_beg, range_end);
3122       if (obj_end < range_end) {
3123         // The end was found; the entire object will fit.
3124         status = closure.do_addr(obj_beg, bitmap->obj_size(obj_beg, obj_end));
3125         assert(status != ParMarkBitMap::would_overflow, "sanity");
3126       } else {
3127         // The end was not found; the object will not fit.
3128         assert(range_end < src_space_top, "obj cannot cross space boundary");
3129         status = ParMarkBitMap::would_overflow;
3130       }
3131     }
3132 
3133     if (status == ParMarkBitMap::would_overflow) {
3134       // The last object did not fit.  Note that interior oop updates were
3135       // deferred, then copy enough of the object to fill the region.
3136       region_ptr->set_deferred_obj_addr(closure.destination());

3137       status = closure.copy_until_full(); // copies from closure.source()
3138 
3139       decrement_destination_counts(cm, src_space_id, src_region_idx,
3140                                    closure.source());
3141       closure.complete_region(cm, dest_addr, region_ptr);
3142       return;
3143     }
3144 
3145     if (status == ParMarkBitMap::full) {
3146       decrement_destination_counts(cm, src_space_id, src_region_idx,
3147                                    closure.source());
3148       region_ptr->set_deferred_obj_addr(NULL);
3149       closure.complete_region(cm, dest_addr, region_ptr);
3150       return;
3151     }
3152 
3153     decrement_destination_counts(cm, src_space_id, src_region_idx, end_addr);
3154 
3155     // Move to the next source region, possibly switching spaces as well.  All
3156     // args except end_addr may be modified.
3157     src_region_idx = next_src_region(closure, src_space_id, src_space_top,
3158                                      end_addr);
3159   } while (true);
3160 }
3161 
3162 void PSParallelCompact::fill_and_update_region(ParCompactionManager* cm, size_t region_idx)
3163 {
3164   MoveAndUpdateClosure cl(mark_bitmap(), cm, region_idx);
3165   fill_region(cm, cl, region_idx);
3166 }
3167 
3168 void PSParallelCompact::fill_and_update_shadow_region(ParCompactionManager* cm, size_t region_idx)
3169 {
3170   // Get a shadow region first
3171   ParallelCompactData& sd = summary_data();
3172   RegionData* const region_ptr = sd.region(region_idx);
3173   size_t shadow_region = ParCompactionManager::pop_shadow_region_mt_safe(region_ptr);
3174   // The InvalidShadow return value indicates the corresponding heap region is available,
3175   // so use MoveAndUpdateClosure to fill the normal region. Otherwise, use
3176   // MoveAndUpdateShadowClosure to fill the acquired shadow region.
3177   if (shadow_region == cm->InvalidShadow) {
3178     MoveAndUpdateClosure cl(mark_bitmap(), cm, region_idx);
3179     region_ptr->shadow_to_normal();
3180     return fill_region(cm, cl, region_idx);
3181   } else {
3182     MoveAndUpdateShadowClosure cl(mark_bitmap(), cm, region_idx, shadow_region);
3183     return fill_region(cm, cl, region_idx);
3184   }
3185 }
3186 
3187 void PSParallelCompact::copy_back(HeapWord *shadow_addr, HeapWord *region_addr)
3188 {
3189   Copy::aligned_conjoint_words(shadow_addr, region_addr, _summary_data.RegionSize);
3190 }
3191 
3192 bool PSParallelCompact::steal_unavailable_region(ParCompactionManager* cm, size_t &region_idx)
3193 {


3334 {
3335   size_t words = words_remaining();
3336 
3337   HeapWord* const range_end = MIN2(source() + words, bitmap()->region_end());
3338   HeapWord* const end_addr = bitmap()->find_obj_end(source(), range_end);
3339   if (end_addr < range_end) {
3340     words = bitmap()->obj_size(source(), end_addr);
3341   }
3342 
3343   // This test is necessary; if omitted, the pointer updates to a partial object
3344   // that crosses the dense prefix boundary could be overwritten.
3345   if (source() != copy_destination()) {
3346     DEBUG_ONLY(PSParallelCompact::check_new_location(source(), destination());)
3347     Copy::aligned_conjoint_words(source(), copy_destination(), words);
3348   }
3349   update_state(words);
3350 }
3351 
3352 void MoveAndUpdateClosure::complete_region(ParCompactionManager *cm, HeapWord *dest_addr,
3353                                            PSParallelCompact::RegionData *region_ptr) {
3354   assert(region_ptr->shadow_state() == ParallelCompactData::RegionData::NormalRegion, "Region should be finished");
3355   region_ptr->set_completed();
3356 }
3357 
3358 ParMarkBitMapClosure::IterationStatus
3359 MoveAndUpdateClosure::do_addr(HeapWord* addr, size_t words) {
3360   assert(destination() != NULL, "sanity");
3361   assert(bitmap()->obj_size(addr) == words, "bad size");
3362 
3363   _source = addr;
3364   assert(PSParallelCompact::summary_data().calc_new_pointer(source(), compaction_manager()) ==
3365          destination(), "wrong destination");
3366 
3367   if (words > words_remaining()) {
3368     return ParMarkBitMap::would_overflow;
3369   }
3370 
3371   // The start_array must be updated even if the object is not moving.
3372   if (_start_array != NULL) {
3373     _start_array->allocate_block(destination());
3374   }
3375 
3376   if (copy_destination() != source()) {
3377     DEBUG_ONLY(PSParallelCompact::check_new_location(source(), destination());)
3378     Copy::aligned_conjoint_words(source(), copy_destination(), words);
3379   }
3380 
3381   oop moved_oop = (oop) copy_destination();
3382   compaction_manager()->update_contents(moved_oop);
3383   assert(oopDesc::is_oop_or_null(moved_oop), "Expected an oop or NULL at " PTR_FORMAT, p2i(moved_oop));
3384 
3385   update_state(words);
3386   assert(copy_destination() == (HeapWord*)moved_oop + moved_oop->size(), "sanity");
3387   return is_full() ? ParMarkBitMap::full : ParMarkBitMap::incomplete;
3388 }
3389 
3390 void MoveAndUpdateShadowClosure::complete_region(ParCompactionManager *cm, HeapWord *dest_addr,
3391                                                  PSParallelCompact::RegionData *region_ptr) {
3392   assert(region_ptr->shadow_state() == ParallelCompactData::RegionData::ShadowRegion, "Region should be shadow");
3393   // Record the shadow region index
3394   region_ptr->set_shadow_region(_shadow);
3395   // Mark the shadow region as filled to indicate the data is ready to be
3396   // copied back
3397   region_ptr->mark_filled();
3398   // Try to copy the content of the shadow region back to its corresponding
3399   // heap region if available; the GC thread that decreases the destination
3400   // count to zero will do the copying otherwise (see
3401   // PSParallelCompact::decrement_destination_counts).
3402   if (((region_ptr->available() && region_ptr->claim()) || region_ptr->claimed()) && region_ptr->mark_copied()) {
3403     region_ptr->set_completed();
3404     PSParallelCompact::copy_back(PSParallelCompact::summary_data().region_to_addr(_shadow), dest_addr);
3405     cm->push_shadow_region_mt_safe(_shadow);
3406   }
3407 }
3408 
3409 UpdateOnlyClosure::UpdateOnlyClosure(ParMarkBitMap* mbm,
3410                                      ParCompactionManager* cm,
3411                                      PSParallelCompact::SpaceId space_id) :
3412   ParMarkBitMapClosure(mbm, cm),


< prev index next >