1 /*
   2  * Copyright (c) 2014, 2015, Red Hat, Inc. and/or its affiliates.
   3  *
   4  * This code is free software; you can redistribute it and/or modify it
   5  * under the terms of the GNU General Public License version 2 only, as
   6  * published by the Free Software Foundation.
   7  *
   8  * This code is distributed in the hope that it will be useful, but WITHOUT
   9  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  10  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  11  * version 2 for more details (a copy is included in the LICENSE file that
  12  * accompanied this code).
  13  *
  14  * You should have received a copy of the GNU General Public License version
  15  * 2 along with this work; if not, write to the Free Software Foundation,
  16  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  17  *
  18  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  19  * or visit www.oracle.com if you need additional information or have any
  20  * questions.
  21  *
  22  */
  23 
  24 #include "code/codeCache.hpp"
  25 #include "gc/shared/gcTraceTime.inline.hpp"
  26 #include "gc/shared/isGCActiveMark.hpp"
  27 #include "gc/shenandoah/brooksPointer.hpp"
  28 #include "gc/shenandoah/shenandoahCollectorPolicy.hpp"
  29 #include "gc/shenandoah/shenandoahMarkCompact.hpp"
  30 #include "gc/shenandoah/shenandoahBarrierSet.hpp"
  31 #include "gc/shenandoah/shenandoahHeapRegionSet.hpp"
  32 #include "gc/shenandoah/shenandoahHeap.hpp"
  33 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
  34 #include "gc/shenandoah/shenandoahRootProcessor.hpp"
  35 #include "gc/shenandoah/vm_operations_shenandoah.hpp"
  36 #include "oops/oop.inline.hpp"
  37 #include "runtime/biasedLocking.hpp"
  38 #include "runtime/thread.hpp"
  39 #include "utilities/copy.hpp"
  40 #include "gc/shared/taskqueue.inline.hpp"
  41 #include "gc/shared/workgroup.hpp"
  42 
  43 class ShenandoahMarkCompactBarrierSet : public ShenandoahBarrierSet {
  44 public:
  45   ShenandoahMarkCompactBarrierSet(ShenandoahHeap* heap) : ShenandoahBarrierSet(heap) {
  46   }
  47   oop read_barrier(oop src) {
  48     return src;
  49   }
  50 #ifdef ASSERT
  51   bool is_safe(oop o) {
  52     if (o == NULL) return true;
  53     if (! oopDesc::unsafe_equals(o, read_barrier(o))) {
  54       return false;
  55     }
  56     return true;
  57   }
  58   bool is_safe(narrowOop o) {
  59     oop obj = oopDesc::decode_heap_oop(o);
  60     return is_safe(obj);
  61   }
  62 #endif
  63 };
  64 
  65 class ClearInCollectionSetHeapRegionClosure: public ShenandoahHeapRegionClosure {
  66 private:
  67   ShenandoahHeap* _heap;
  68 public:
  69 
  70   ClearInCollectionSetHeapRegionClosure() : _heap(ShenandoahHeap::heap()) {
  71   }
  72 
  73   bool doHeapRegion(ShenandoahHeapRegion* r) {
  74     _heap->set_next_top_at_mark_start(r->bottom(), r->top());
  75     r->clear_live_data();
  76     r->set_concurrent_iteration_safe_limit(r->top());
  77     return false;
  78   }
  79 };
  80 
  81 STWGCTimer* ShenandoahMarkCompact::_gc_timer = NULL;
  82 
  83 void ShenandoahMarkCompact::initialize() {
  84   _gc_timer = new (ResourceObj::C_HEAP, mtGC) STWGCTimer();
  85 }
  86 
  87 void ShenandoahMarkCompact::do_mark_compact(GCCause::Cause gc_cause) {
  88 
  89   ShenandoahHeap* _heap = ShenandoahHeap::heap();
  90   ShenandoahCollectorPolicy* policy = _heap->shenandoahPolicy();
  91 
  92   _gc_timer->register_gc_start();
  93 
  94   _heap->set_full_gc_in_progress(true);
  95 
  96   assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint");
  97   IsGCActiveMark is_active;
  98 
  99   assert(Thread::current()->is_VM_thread(), "Do full GC only while world is stopped");
 100   assert(_heap->is_next_bitmap_clear(), "require cleared bitmap");
 101   assert(!_heap->concurrent_mark_in_progress(), "can't do full-GC while marking is in progress");
 102   assert(!_heap->is_evacuation_in_progress(), "can't do full-GC while evacuation is in progress");
 103 
 104   policy->record_phase_start(ShenandoahCollectorPolicy::full_gc);
 105 
 106   ClearInCollectionSetHeapRegionClosure cl;
 107   _heap->heap_region_iterate(&cl, false, false);
 108 
 109   /*
 110   if (ShenandoahVerify) {
 111     // Full GC should only be called between regular concurrent cycles, therefore
 112     // those verifications should be valid.
 113     _heap->verify_heap_after_evacuation();
 114     _heap->verify_heap_after_update_refs();
 115   }
 116   */
 117 
 118   BarrierSet* old_bs = oopDesc::bs();
 119   ShenandoahMarkCompactBarrierSet bs(_heap);
 120   oopDesc::set_bs(&bs);
 121 
 122   {
 123   GCTraceTime(Info, gc) time("Pause Full", _gc_timer, gc_cause, true);
 124 
 125   if (UseTLAB) {
 126     _heap->ensure_parsability(true);
 127   }
 128 
 129   CodeCache::gc_prologue();
 130 
 131   // We should save the marks of the currently locked biased monitors.
 132   // The marking doesn't preserve the marks of biased objects.
 133   //BiasedLocking::preserve_marks();
 134 
 135   _heap->set_need_update_refs(true);
 136 
 137   OrderAccess::fence();
 138 
 139   policy->record_phase_start(ShenandoahCollectorPolicy::full_gc_mark);
 140   phase1_mark_heap();
 141   policy->record_phase_end(ShenandoahCollectorPolicy::full_gc_mark);
 142 
 143   OrderAccess::fence();
 144 
 145   policy->record_phase_start(ShenandoahCollectorPolicy::full_gc_calculate_addresses);
 146   ShenandoahHeapRegionSet* copy_queues[_heap->max_parallel_workers()];
 147   phase2_calculate_target_addresses(copy_queues);
 148   policy->record_phase_end(ShenandoahCollectorPolicy::full_gc_calculate_addresses);
 149 
 150   OrderAccess::fence();
 151 
 152   policy->record_phase_start(ShenandoahCollectorPolicy::full_gc_adjust_pointers);
 153   phase3_update_references();
 154   policy->record_phase_end(ShenandoahCollectorPolicy::full_gc_adjust_pointers);
 155 
 156   policy->record_phase_start(ShenandoahCollectorPolicy::full_gc_copy_objects);
 157   phase4_compact_objects(copy_queues);
 158   policy->record_phase_end(ShenandoahCollectorPolicy::full_gc_copy_objects);
 159 
 160   CodeCache::gc_epilogue();
 161   JvmtiExport::gc_epilogue();
 162 
 163   // refs processing: clean slate
 164   // rp.enqueue_discovered_references();
 165 
 166   if (ShenandoahVerify) {
 167     _heap->verify_heap_after_evacuation();
 168   }
 169 
 170   _heap->set_bytes_allocated_since_cm(0);
 171 
 172   _heap->set_need_update_refs(false);
 173 
 174   _heap->set_full_gc_in_progress(false);
 175   }
 176 
 177   _gc_timer->register_gc_end();
 178 
 179   policy->record_phase_end(ShenandoahCollectorPolicy::full_gc);
 180 
 181   oopDesc::set_bs(old_bs);
 182 }
 183 
 184 #ifdef ASSERT
 185 class VerifyNotForwardedPointersClosure : public MetadataAwareOopClosure {
 186 private:
 187   template <class T>
 188   inline void do_oop_work(T* p) {
 189     T o = oopDesc::load_heap_oop(p);
 190     if (! oopDesc::is_null(o)) {
 191       oop obj = oopDesc::decode_heap_oop_not_null(o);
 192       assert(oopDesc::unsafe_equals(obj, ShenandoahBarrierSet::resolve_oop_static_not_null(obj)),
 193              "expect forwarded oop");
 194       ShenandoahHeap* heap = ShenandoahHeap::heap();
 195       if (! heap->is_marked_complete(obj)) {
 196         tty->print_cr("ref region humongous? %s", BOOL_TO_STR(heap->heap_region_containing(p)->is_humongous()));
 197       }
 198       assert(heap->is_marked_complete(obj), "must be marked");
 199       assert(! heap->allocated_after_complete_mark_start((HeapWord*) obj), "must be truly marked");
 200     }
 201   }
 202 public:
 203   void do_oop(oop* p) {
 204     do_oop_work(p);
 205   }
 206   void do_oop(narrowOop* p) {
 207     do_oop_work(p);
 208   }
 209 };
 210 
 211 class ShenandoahMCVerifyAfterMarkingObjectClosure : public ObjectClosure {
 212 public:
 213   void do_object(oop p) {
 214     ShenandoahHeap* heap = ShenandoahHeap::heap();
 215     assert(oopDesc::unsafe_equals(p, ShenandoahBarrierSet::resolve_oop_static_not_null(p)),
 216            "expect forwarded oop");
 217     assert(heap->is_marked_complete(p), "must be marked");
 218     assert(! heap->allocated_after_complete_mark_start((HeapWord*) p), "must be truly marked");
 219     VerifyNotForwardedPointersClosure cl;
 220     p->oop_iterate(&cl);
 221   }
 222 };
 223 
 224 class ShenandoahMCVerifyAfterMarkingRegionClosure : public ShenandoahHeapRegionClosure {
 225   bool doHeapRegion(ShenandoahHeapRegion* r) {
 226     ShenandoahMCVerifyAfterMarkingObjectClosure cl;
 227     if (! r->is_humongous_continuation()) {
 228       ShenandoahHeap::heap()->marked_object_iterate(r, &cl);
 229     }
 230     return false;
 231   }
 232 };
 233 
 234 #endif
 235 
 236 void ShenandoahMarkCompact::phase1_mark_heap() {
 237   GCTraceTime(Info, gc, phases) time("Phase 1: Mark live objects", _gc_timer);
 238   ShenandoahHeap* _heap = ShenandoahHeap::heap();
 239 
 240   ShenandoahConcurrentMark* cm = _heap->concurrentMark();
 241 
 242   cm->set_process_references(true);
 243   cm->set_unload_classes(true);
 244 
 245   ReferenceProcessor* rp = _heap->ref_processor();
 246   // enable ("weak") refs discovery
 247   rp->enable_discovery(true /*verify_no_refs*/);
 248   rp->setup_policy(true); // snapshot the soft ref policy to be used in this cycle
 249   rp->set_active_mt_degree(_heap->max_parallel_workers());
 250 
 251   COMPILER2_PRESENT(DerivedPointerTable::clear());
 252   cm->update_roots();
 253   COMPILER2_PRESENT(DerivedPointerTable::update_pointers());
 254 
 255   cm->mark_roots();
 256   cm->shared_finish_mark_from_roots(/* full_gc = */ true);
 257 
 258   _heap->swap_mark_bitmaps();
 259 
 260   if (VerifyDuringGC) {
 261     HandleMark hm;  // handle scope
 262     //    Universe::heap()->prepare_for_verify();
 263     _heap->prepare_for_verify();
 264     // Note: we can verify only the heap here. When an object is
 265     // marked, the previous value of the mark word (including
 266     // identity hash values, ages, etc) is preserved, and the mark
 267     // word is set to markOop::marked_value - effectively removing
 268     // any hash values from the mark word. These hash values are
 269     // used when verifying the dictionaries and so removing them
 270     // from the mark word can make verification of the dictionaries
 271     // fail. At the end of the GC, the original mark word values
 272     // (including hash values) are restored to the appropriate
 273     // objects.
 274     //    Universe::heap()->verify(VerifySilently, VerifyOption_G1UseMarkWord);
 275     _heap->verify(VerifyOption_G1UseMarkWord);
 276   }
 277 
 278 #ifdef ASSERT
 279   ShenandoahMCVerifyAfterMarkingRegionClosure cl;
 280   _heap->heap_region_iterate(&cl);
 281 #endif
 282 }
 283 
 284 class ShenandoahMCReclaimHumongousRegionClosure : public ShenandoahHeapRegionClosure {
 285 private:
 286   ShenandoahHeap* _heap;
 287 public:
 288   ShenandoahMCReclaimHumongousRegionClosure() : _heap(ShenandoahHeap::heap()) {
 289   }
 290 
 291   bool doHeapRegion(ShenandoahHeapRegion* r) {
 292     if (r->is_humongous_start()) {
 293       oop humongous_obj = oop(r->bottom() + BrooksPointer::word_size());
 294       if (! _heap->is_marked_complete(humongous_obj)) {
 295         _heap->reclaim_humongous_region_at(r);
 296       }
 297     }
 298     return false;
 299   }
 300 };
 301 
 302 
 303 class ShenandoahPrepareForCompactionObjectClosure : public ObjectClosure {
 304 
 305 private:
 306 
 307   ShenandoahHeap* _heap;
 308   ShenandoahHeapRegionSet* _to_regions;
 309   ShenandoahHeapRegion* _to_region;
 310   ShenandoahHeapRegion* _from_region;
 311   HeapWord* _compact_point;
 312 
 313 public:
 314 
 315   ShenandoahPrepareForCompactionObjectClosure(ShenandoahHeapRegionSet* to_regions, ShenandoahHeapRegion* to_region) :
 316     _heap(ShenandoahHeap::heap()),
 317     _to_regions(to_regions),
 318     _to_region(to_region),
 319     _from_region(NULL),
 320     _compact_point(to_region->bottom()) {
 321   }
 322 
 323   void set_from_region(ShenandoahHeapRegion* from_region) {
 324     _from_region = from_region;
 325   }
 326 
 327   ShenandoahHeapRegion* to_region() const {
 328     return _to_region;
 329   }
 330   HeapWord* compact_point() const {
 331     return _compact_point;
 332   }
 333   void do_object(oop p) {
 334     assert(_from_region != NULL, "must set before work");
 335     assert(_heap->is_marked_complete(p), "must be marked");
 336     assert(! _heap->allocated_after_complete_mark_start((HeapWord*) p), "must be truly marked");
 337     size_t size = p->size();
 338     size_t obj_size = size + BrooksPointer::word_size();
 339     if (_compact_point + obj_size > _to_region->end()) {
 340       // Object doesn't fit. Pick next to-region and start compacting there.
 341       _to_region->set_new_top(_compact_point);
 342       ShenandoahHeapRegion* new_to_region = _to_regions->next();
 343       if (new_to_region == NULL) {
 344         new_to_region = _from_region;
 345       }
 346       assert(new_to_region != _to_region, "must not reuse same to-region");
 347       assert(new_to_region != NULL, "must not be NULL");
 348       _to_region = new_to_region;
 349       _compact_point = _to_region->bottom();
 350     }
 351     assert(_compact_point + obj_size <= _to_region->end(), "must fit");
 352     assert(oopDesc::unsafe_equals(p, ShenandoahBarrierSet::resolve_oop_static_not_null(p)),
 353            "expect forwarded oop");
 354     BrooksPointer::set_raw(p, _compact_point + BrooksPointer::word_size());
 355     _compact_point += obj_size;
 356   }
 357 };
 358 
 359 class ShenandoahPrepareForCompactionTask : public AbstractGangTask {
 360 private:
 361 
 362   ShenandoahHeapRegionSet** _copy_queues;
 363   ShenandoahHeapRegionSet* _from_regions;
 364 
 365   ShenandoahHeapRegion* next_from_region(ShenandoahHeapRegionSet* copy_queue) {
 366     ShenandoahHeapRegion* from_region = _from_regions->claim_next();
 367     while (from_region != NULL && (from_region->is_humongous() || from_region->is_pinned())) {
 368       from_region = _from_regions->claim_next();
 369     }
 370     if (from_region != NULL) {
 371       assert(copy_queue != NULL, "sanity");
 372       assert(! from_region->is_humongous(), "must not get humongous regions here");
 373       assert(! from_region->is_pinned(), "no pinned region in mark-compact");
 374       copy_queue->add_region(from_region);
 375     }
 376     return from_region;
 377   }
 378 
 379 public:
 380   ShenandoahPrepareForCompactionTask(ShenandoahHeapRegionSet* from_regions, ShenandoahHeapRegionSet** copy_queues) :
 381     AbstractGangTask("Shenandoah Prepare For Compaction Task"),
 382     _from_regions(from_regions), _copy_queues(copy_queues) {
 383   }
 384 
 385   void work(uint worker_id) {
 386     ShenandoahHeap* heap = ShenandoahHeap::heap();
 387     ShenandoahHeapRegionSet* copy_queue = _copy_queues[worker_id];
 388     ShenandoahHeapRegion* from_region = next_from_region(copy_queue);
 389     if (from_region == NULL) return;
 390     ShenandoahHeapRegionSet* to_regions = new ShenandoahHeapRegionSet(ShenandoahHeap::heap()->max_regions());
 391     ShenandoahPrepareForCompactionObjectClosure cl(to_regions, from_region);
 392     while (from_region != NULL) {
 393       assert(from_region != NULL, "sanity");
 394       cl.set_from_region(from_region);
 395       heap->marked_object_iterate(from_region, &cl);
 396       if (from_region != cl.to_region()) {
 397         assert(from_region != NULL, "sanity");
 398         to_regions->add_region(from_region);
 399       }
 400       from_region = next_from_region(copy_queue);
 401     }
 402     assert(cl.to_region() != NULL, "should not happen");
 403     cl.to_region()->set_new_top(cl.compact_point());
 404     while (to_regions->count() > 0) {
 405       ShenandoahHeapRegion* r = to_regions->next();
 406       if (r == NULL) {
 407         to_regions->print();
 408       }
 409       assert(r != NULL, "should not happen");
 410       r->set_new_top(r->bottom());
 411     }
 412     delete to_regions;
 413   }
 414 };
 415 
 416 void ShenandoahMarkCompact::phase2_calculate_target_addresses(ShenandoahHeapRegionSet** copy_queues) {
 417   GCTraceTime(Info, gc, phases) time("Phase 2: Compute new object addresses", _gc_timer);
 418   ShenandoahHeap* heap = ShenandoahHeap::heap();
 419 
 420   ShenandoahMCReclaimHumongousRegionClosure cl;
 421   heap->heap_region_iterate(&cl);
 422 
 423   // Initialize copy queues.
 424   for (uint i = 0; i < heap->max_parallel_workers(); i++) {
 425     copy_queues[i] = new ShenandoahHeapRegionSet(heap->max_regions());
 426   }
 427 
 428   ShenandoahHeapRegionSet* from_regions = heap->regions();
 429   from_regions->clear_current_index();
 430   ShenandoahPrepareForCompactionTask prepare_task(from_regions, copy_queues);
 431   heap->workers()->run_task(&prepare_task);
 432 }
 433 
 434 class ShenandoahAdjustPointersClosure : public MetadataAwareOopClosure {
 435 private:
 436   ShenandoahHeap* _heap;
 437 
 438 public:
 439 
 440   ShenandoahAdjustPointersClosure() : _heap(ShenandoahHeap::heap()) {
 441   }
 442 
 443 private:
 444   template <class T>
 445   inline void do_oop_work(T* p) {
 446     T o = oopDesc::load_heap_oop(p);
 447     if (! oopDesc::is_null(o)) {
 448       oop obj = oopDesc::decode_heap_oop_not_null(o);
 449       assert(_heap->is_marked_complete(obj), "must be marked");
 450       oop forw = oop(BrooksPointer::get_raw(obj));
 451       oopDesc::encode_store_heap_oop(p, forw);
 452     }
 453   }
 454 public:
 455   void do_oop(oop* p) {
 456     do_oop_work(p);
 457   }
 458   void do_oop(narrowOop* p) {
 459     do_oop_work(p);
 460   }
 461 };
 462 
 463 class ShenandoahAdjustPointersObjectClosure : public ObjectClosure {
 464 private:
 465   ShenandoahAdjustPointersClosure* _cl;
 466   ShenandoahHeap* _heap;
 467 public:
 468   ShenandoahAdjustPointersObjectClosure(ShenandoahAdjustPointersClosure* cl) :
 469     _cl(cl), _heap(ShenandoahHeap::heap()) {
 470   }
 471   void do_object(oop p) {
 472     assert(_heap->is_marked_complete(p), "must be marked");
 473     p->oop_iterate(_cl);
 474   }
 475 };
 476 
 477 class ShenandoahAdjustPointersTask : public AbstractGangTask {
 478 private:
 479   ShenandoahHeapRegionSet* _regions;
 480 public:
 481 
 482   ShenandoahAdjustPointersTask(ShenandoahHeapRegionSet* regions) :
 483     AbstractGangTask("Shenandoah Adjust Pointers Task"),
 484     _regions(regions) {
 485   }
 486 
 487   void work(uint worker_id) {
 488     ShenandoahHeap* heap = ShenandoahHeap::heap();
 489     ShenandoahHeapRegion* r = _regions->claim_next();
 490     ShenandoahAdjustPointersClosure cl;
 491     ShenandoahAdjustPointersObjectClosure obj_cl(&cl);
 492     while (r != NULL) {
 493       if (! r->is_humongous_continuation()) {
 494         heap->marked_object_iterate(r, &obj_cl);
 495       }
 496       r = _regions->claim_next();
 497     }
 498   }
 499 };
 500 
 501 class ShenandoahAdjustRootPointersTask : public AbstractGangTask {
 502 private:
 503   ShenandoahRootProcessor* _rp;
 504 
 505 public:
 506 
 507   ShenandoahAdjustRootPointersTask(ShenandoahRootProcessor* rp) :
 508     AbstractGangTask("Shenandoah Adjust Root Pointers Task"),
 509     _rp(rp) {
 510   }
 511 
 512   void work(uint worker_id) {
 513     ShenandoahAdjustPointersClosure cl;
 514     CLDToOopClosure adjust_cld_closure(&cl, true);
 515     MarkingCodeBlobClosure adjust_code_closure(&cl,
 516                                              CodeBlobToOopClosure::FixRelocations);
 517 
 518     _rp->process_all_roots(&cl, &cl,
 519                            &adjust_cld_closure,
 520                            &adjust_code_closure, worker_id);
 521   }
 522 };
 523 
 524 void ShenandoahMarkCompact::phase3_update_references() {
 525   GCTraceTime(Info, gc, phases) time("Phase 2: Adjust pointers", _gc_timer);
 526   ShenandoahHeap* heap = ShenandoahHeap::heap();
 527 
 528     // Need cleared claim bits for the roots processing
 529   ClassLoaderDataGraph::clear_claimed_marks();
 530 
 531   {
 532     COMPILER2_PRESENT(DerivedPointerTable::clear());
 533     ShenandoahRootProcessor rp(heap, heap->max_parallel_workers());
 534     ShenandoahAdjustRootPointersTask task(&rp);
 535     heap->workers()->run_task(&task);
 536     COMPILER2_PRESENT(DerivedPointerTable::update_pointers());
 537   }
 538 
 539   ShenandoahHeapRegionSet* regions = heap->regions();
 540   regions->clear_current_index();
 541   ShenandoahAdjustPointersTask adjust_pointers_task(regions);
 542   heap->workers()->run_task(&adjust_pointers_task);
 543 }
 544 
 545 class ShenandoahCompactObjectsClosure : public ObjectClosure {
 546 private:
 547   ShenandoahHeap* _heap;
 548 public:
 549   ShenandoahCompactObjectsClosure() : _heap(ShenandoahHeap::heap()) {
 550   }
 551   void do_object(oop p) {
 552     assert(_heap->is_marked_complete(p), "must be marked");
 553     size_t size = p->size();
 554     HeapWord* compact_to = BrooksPointer::get_raw(p);
 555     HeapWord* compact_from = (HeapWord*) p;
 556     if (compact_from != compact_to) {
 557       Copy::aligned_conjoint_words(compact_from, compact_to, size);
 558     }
 559     oop new_obj = oop(compact_to);
 560     // new_obj->init_mark();
 561     BrooksPointer::initialize(new_obj);
 562   }
 563 };
 564 
 565 class ShenandoahCompactObjectsTask : public AbstractGangTask {
 566   ShenandoahHeapRegionSet** _regions;
 567 public:
 568   ShenandoahCompactObjectsTask(ShenandoahHeapRegionSet** regions) :
 569     AbstractGangTask("Shenandoah Compact Objects Task"),
 570     _regions(regions) {
 571   }
 572   void work(uint worker_id) {
 573     ShenandoahHeap* heap = ShenandoahHeap::heap();
 574     ShenandoahHeapRegionSet* copy_queue = _regions[worker_id];
 575     copy_queue->clear_current_index();
 576     ShenandoahCompactObjectsClosure cl;
 577     ShenandoahHeapRegion* r = copy_queue->next();
 578     while (r != NULL) {
 579       assert(! r->is_humongous(), "must not get humongous regions here");
 580       heap->marked_object_iterate(r, &cl);
 581       r->set_top(r->new_top());
 582       r = copy_queue->next();
 583     }
 584   }
 585 };
 586 
 587 class ShenandoahPostCompactClosure : public ShenandoahHeapRegionClosure {
 588   size_t _live;
 589   ShenandoahHeap* _heap;
 590 public:
 591 
 592   ShenandoahPostCompactClosure() : _live(0), _heap(ShenandoahHeap::heap()) {
 593     _heap->clear_free_regions();
 594   }
 595 
 596   bool doHeapRegion(ShenandoahHeapRegion* r) {
 597     // Need to reset the complete-top-at-mark-start pointer here because
 598     // the complete marking bitmap is no longer valid. This ensures
 599     // size-based iteration in marked_object_iterate().
 600     _heap->set_complete_top_at_mark_start(r->bottom(), r->bottom());
 601     r->set_in_collection_set(false);
 602     if (r->is_humongous()) {
 603       _live += ShenandoahHeapRegion::RegionSizeBytes;
 604     } else {
 605       size_t live = r->used();
 606       if (live == 0) {
 607         r->recycle();
 608         _heap->add_free_region(r);
 609       }
 610       r->set_live_data(live);
 611       _live += live;
 612     }
 613     return false;
 614   }
 615 
 616   size_t get_live() { return _live; }
 617 
 618 };
 619 
 620 void ShenandoahMarkCompact::phase4_compact_objects(ShenandoahHeapRegionSet** copy_queues) {
 621   GCTraceTime(Info, gc, phases) time("Phase 4: Move objects", _gc_timer);
 622   ShenandoahHeap* heap = ShenandoahHeap::heap();
 623   ShenandoahCompactObjectsTask compact_task(copy_queues);
 624   heap->workers()->run_task(&compact_task);
 625 
 626   heap->clear_cset_fast_test();
 627 
 628   // Reset complete bitmap. We're about to reset the complete-top-at-mark-start pointer
 629   // and must ensure the bitmap is in sync.
 630   heap->reset_complete_mark_bitmap(heap->workers());
 631 
 632   ShenandoahPostCompactClosure post_compact;
 633   heap->heap_region_iterate(&post_compact);
 634 
 635   heap->clear_cancelled_concgc();
 636 
 637   // Also clear the next bitmap in preparation for next marking.
 638   heap->reset_next_mark_bitmap(heap->workers());
 639 
 640   heap->set_used(post_compact.get_live());
 641 
 642   for (uint i = 0; i < heap->max_parallel_workers(); i++) {
 643     delete copy_queues[i];
 644   }
 645 
 646 }