1 /* 2 * Copyright (c) 2014, 2015, Red Hat, Inc. and/or its affiliates. 3 * 4 * This code is free software; you can redistribute it and/or modify it 5 * under the terms of the GNU General Public License version 2 only, as 6 * published by the Free Software Foundation. 7 * 8 * This code is distributed in the hope that it will be useful, but WITHOUT 9 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 10 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 11 * version 2 for more details (a copy is included in the LICENSE file that 12 * accompanied this code). 13 * 14 * You should have received a copy of the GNU General Public License version 15 * 2 along with this work; if not, write to the Free Software Foundation, 16 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 17 * 18 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 19 * or visit www.oracle.com if you need additional information or have any 20 * questions. 21 * 22 */ 23 24 #include "code/codeCache.hpp" 25 #include "gc/shared/isGCActiveMark.hpp" 26 #include "gc/shenandoah/brooksPointer.hpp" 27 #include "gc/shenandoah/shenandoahMarkCompact.hpp" 28 #include "gc/shenandoah/shenandoahBarrierSet.hpp" 29 #include "gc/shenandoah/shenandoahHeap.hpp" 30 #include "gc/shenandoah/shenandoahHeap.inline.hpp" 31 #include "gc/shenandoah/shenandoahRootProcessor.hpp" 32 #include "gc/shenandoah/vm_operations_shenandoah.hpp" 33 #include "gc/serial/markSweep.inline.hpp" 34 #include "oops/oop.inline.hpp" 35 #include "runtime/biasedLocking.hpp" 36 #include "runtime/thread.hpp" 37 #include "utilities/copy.hpp" 38 #include "gc/shared/taskqueue.inline.hpp" 39 #include "gc/shared/workgroup.hpp" 40 41 42 43 void ShenandoahMarkCompact::allocate_stacks() { 44 MarkSweep::_preserved_count_max = 0; 45 MarkSweep::_preserved_marks = NULL; 46 MarkSweep::_preserved_count = 0; 47 } 48 49 void ShenandoahMarkCompact::do_mark_compact() { 50 51 COMPILER2_PRESENT(DerivedPointerTable::clear()); 52 53 ShenandoahHeap* _heap = ShenandoahHeap::heap(); 54 55 assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint"); 56 IsGCActiveMark is_active; 57 58 assert(Thread::current()->is_VM_thread(), "Do full GC only while world is stopped"); 59 assert(_heap->is_bitmap_clear(), "require cleared bitmap"); 60 assert(!_heap->concurrent_mark_in_progress(), "can't do full-GC while marking is in progress"); 61 assert(!_heap->is_evacuation_in_progress(), "can't do full-GC while evacuation is in progress"); 62 assert(!_heap->is_update_references_in_progress(), "can't do full-GC while updating of references is in progress"); 63 BarrierSet* _old_barrier_set = oopDesc::bs(); 64 65 oopDesc::set_bs(new ShenandoahMarkCompactBarrierSet()); 66 67 _heap->shenandoahPolicy()->record_phase_start(ShenandoahCollectorPolicy::full_gc); 68 69 // We need to clear the is_in_collection_set flag in all regions. 70 ShenandoahHeapRegion** regions = _heap->heap_regions(); 71 size_t num_regions = _heap->num_regions(); 72 for (size_t i = 0; i < num_regions; i++) { 73 regions[i]->set_is_in_collection_set(false); 74 } 75 _heap->clear_cset_fast_test(); 76 77 /* 78 if (ShenandoahVerify) { 79 // Full GC should only be called between regular concurrent cycles, therefore 80 // those verifications should be valid. 81 _heap->verify_heap_after_evacuation(); 82 _heap->verify_heap_after_update_refs(); 83 } 84 */ 85 86 if (ShenandoahTraceFullGC) { 87 gclog_or_tty->print_cr("Shenandoah-full-gc: start with heap used: "SIZE_FORMAT" MB", _heap->used() / M); 88 gclog_or_tty->print_cr("Shenandoah-full-gc: phase 1: marking the heap"); 89 // _heap->print_heap_regions(); 90 } 91 92 if (UseTLAB) { 93 _heap->ensure_parsability(true); 94 } 95 96 _heap->cleanup_after_cancelconcgc(); 97 98 ReferenceProcessor* rp = _heap->ref_processor(); 99 100 // hook up weak ref data so it can be used during Mark-Sweep 101 assert(MarkSweep::ref_processor() == NULL, "no stomping"); 102 assert(rp != NULL, "should be non-NULL"); 103 assert(rp == ShenandoahHeap::heap()->ref_processor(), "Precondition"); 104 bool clear_all_softrefs = true; //fixme 105 MarkSweep::_ref_processor = rp; 106 rp->setup_policy(clear_all_softrefs); 107 108 CodeCache::gc_prologue(); 109 allocate_stacks(); 110 111 // We should save the marks of the currently locked biased monitors. 112 // The marking doesn't preserve the marks of biased objects. 113 BiasedLocking::preserve_marks(); 114 115 phase1_mark_heap(); 116 117 if (ShenandoahTraceFullGC) { 118 gclog_or_tty->print_cr("Shenandoah-full-gc: phase 2: calculating target addresses"); 119 } 120 phase2_calculate_target_addresses(); 121 122 if (ShenandoahTraceFullGC) { 123 gclog_or_tty->print_cr("Shenandoah-full-gc: phase 3: updating references"); 124 } 125 126 // Don't add any more derived pointers during phase3 127 COMPILER2_PRESENT(DerivedPointerTable::set_active(false)); 128 129 phase3_update_references(); 130 131 if (ShenandoahTraceFullGC) { 132 gclog_or_tty->print_cr("Shenandoah-full-gc: phase 4: compacting objects"); 133 } 134 135 phase4_compact_objects(); 136 137 138 MarkSweep::restore_marks(); 139 BiasedLocking::restore_marks(); 140 GenMarkSweep::deallocate_stacks(); 141 142 CodeCache::gc_epilogue(); 143 JvmtiExport::gc_epilogue(); 144 145 // refs processing: clean slate 146 MarkSweep::_ref_processor = NULL; 147 148 149 if (ShenandoahVerify) { 150 _heap->verify_heap_after_evacuation(); 151 _heap->verify_heap_after_update_refs(); 152 } 153 154 if (ShenandoahTraceFullGC) { 155 gclog_or_tty->print_cr("Shenandoah-full-gc: finish with heap used: "SIZE_FORMAT" MB", _heap->used() / M); 156 } 157 158 _heap->_bytesAllocSinceCM = 0; 159 160 oopDesc::set_bs(_old_barrier_set); 161 162 _heap->set_need_update_refs(false); 163 164 COMPILER2_PRESENT(DerivedPointerTable::update_pointers()); 165 166 _heap->shenandoahPolicy()->record_phase_end(ShenandoahCollectorPolicy::full_gc); 167 } 168 169 class UpdateRefsClosure: public ExtendedOopClosure { 170 public: 171 virtual void do_oop(oop* p) { 172 oop obj = oopDesc::load_heap_oop(p); 173 if (! oopDesc::is_null(obj)) { 174 ShenandoahBarrierSet::resolve_and_update_oop_static(p, obj); 175 } 176 } 177 virtual void do_oop(narrowOop* p) { 178 Unimplemented(); 179 } 180 }; 181 182 void ShenandoahMarkCompact::phase1_mark_heap() { 183 ShenandoahHeap* _heap = ShenandoahHeap::heap(); 184 ReferenceProcessor* rp = _heap->ref_processor(); 185 186 MarkSweep::_ref_processor = rp; 187 188 // First, update _all_ references in GC roots to point to to-space. 189 { 190 // Need cleared claim bits for the roots processing 191 /* 192 ClassLoaderDataGraph::clear_claimed_marks(); 193 UpdateRefsClosure uprefs; 194 CLDToOopClosure cld_uprefs(&uprefs); 195 CodeBlobToOopClosure code_uprefs(&uprefs, CodeBlobToOopClosure::FixRelocations); 196 ShenandoahRootProcessor rp(_heap, 1); 197 rp.process_all_roots(&uprefs, 198 &cld_uprefs, 199 &code_uprefs); 200 */ 201 } 202 203 { 204 MarkingCodeBlobClosure follow_code_closure(&MarkSweep::follow_root_closure, CodeBlobToOopClosure::FixRelocations); 205 // Need cleared claim bits for the roots processing 206 ClassLoaderDataGraph::clear_claimed_marks(); 207 ShenandoahRootProcessor rp(_heap, 1); 208 rp.process_strong_roots(&MarkSweep::follow_root_closure, 209 &MarkSweep::follow_cld_closure, 210 &follow_code_closure); 211 212 // Also update (without marking) weak CLD refs, in case they're reachable. 213 UpdateRefsClosure uprefs; 214 CLDToOopClosure cld_uprefs(&uprefs); 215 ClassLoaderDataGraph::roots_cld_do(NULL, &cld_uprefs); 216 217 // Same for weak JNI handles. 218 ShenandoahAlwaysTrueClosure always_true; 219 JNIHandles::weak_oops_do(&always_true, &uprefs); 220 } 221 222 _heap->shenandoahPolicy()->record_phase_start(ShenandoahCollectorPolicy::weakrefs); 223 bool clear_soft_refs = false; //fixme 224 rp->setup_policy(clear_soft_refs); 225 226 const ReferenceProcessorStats& stats = 227 rp->process_discovered_references(&MarkSweep::is_alive, 228 &MarkSweep::keep_alive, 229 &MarkSweep::follow_stack_closure, 230 NULL, 231 NULL, 232 _heap->tracer()->gc_id()); 233 234 // heap->tracer()->report_gc_reference_stats(stats); 235 236 _heap->shenandoahPolicy()->record_phase_end(ShenandoahCollectorPolicy::weakrefs); 237 238 // Unload classes and purge the SystemDictionary. 239 bool purged_class = SystemDictionary::do_unloading(&MarkSweep::is_alive); 240 241 // Unload nmethods. 242 CodeCache::do_unloading(&MarkSweep::is_alive, purged_class); 243 244 // Prune dead klasses from subklass/sibling/implementor lists. 245 Klass::clean_weak_klass_links(&MarkSweep::is_alive); 246 247 // Delete entries for dead interned string and clean up unreferenced symbols in symbol table. 248 _heap->unlink_string_and_symbol_table(&MarkSweep::is_alive); 249 250 if (VerifyDuringGC) { 251 HandleMark hm; // handle scope 252 COMPILER2_PRESENT(DerivedPointerTableDeactivate dpt_deact); 253 // Universe::heap()->prepare_for_verify(); 254 _heap->prepare_for_verify(); 255 // Note: we can verify only the heap here. When an object is 256 // marked, the previous value of the mark word (including 257 // identity hash values, ages, etc) is preserved, and the mark 258 // word is set to markOop::marked_value - effectively removing 259 // any hash values from the mark word. These hash values are 260 // used when verifying the dictionaries and so removing them 261 // from the mark word can make verification of the dictionaries 262 // fail. At the end of the GC, the original mark word values 263 // (including hash values) are restored to the appropriate 264 // objects. 265 if (!VerifySilently) { 266 gclog_or_tty->print(" VerifyDuringGC:(full)[Verifying "); 267 } 268 // Universe::heap()->verify(VerifySilently, VerifyOption_G1UseMarkWord); 269 _heap->verify(VerifySilently, VerifyOption_G1UseMarkWord); 270 if (!VerifySilently) { 271 gclog_or_tty->print_cr("]"); 272 } 273 } 274 } 275 276 class ShenandoahPrepareForCompaction : public ShenandoahHeapRegionClosure { 277 CompactPoint _cp; 278 ShenandoahHeap* _heap; 279 bool _dead_humongous; 280 281 public: 282 ShenandoahPrepareForCompaction() : 283 _heap(ShenandoahHeap::heap()), 284 _dead_humongous(false) { 285 } 286 287 bool doHeapRegion(ShenandoahHeapRegion* r) { 288 // We need to save the contents 289 if (!r->is_humongous()) { 290 if (_cp.space == NULL) { 291 _cp.space = r; 292 _cp.threshold = r->initialize_threshold(); 293 } 294 _dead_humongous = false; 295 r->prepare_for_compaction(&_cp); 296 } else { 297 if (r->is_humongous_start()) { 298 oop obj = oop(r->bottom() + BrooksPointer::BROOKS_POINTER_OBJ_SIZE); 299 if (obj->is_gc_marked()) { 300 obj->forward_to(obj); 301 _dead_humongous = false; 302 } else { 303 if (_cp.space == NULL) { 304 _cp.space = r; 305 _cp.threshold = r->initialize_threshold(); 306 } 307 _dead_humongous = true; 308 guarantee(r->region_number() >= ((ShenandoahHeapRegion*)_cp.space)->region_number(), 309 "only reset regions that are not yet used for compaction"); 310 r->reset(); 311 r->prepare_for_compaction(&_cp); 312 } 313 } else { 314 assert(r->is_humongous_continuation(), "expect humongous continuation"); 315 if (_dead_humongous) { 316 guarantee(r->region_number() > ((ShenandoahHeapRegion*)_cp.space)->region_number(), 317 "only reset regions that are not yet used for compaction"); 318 r->reset(); 319 r->prepare_for_compaction(&_cp); 320 } 321 } 322 } 323 return false; 324 } 325 }; 326 327 void ShenandoahMarkCompact::phase2_calculate_target_addresses() { 328 ShenandoahPrepareForCompaction prepare; 329 ShenandoahHeap::heap()->heap_region_iterate(&prepare); 330 } 331 332 333 class ShenandoahMarkCompactAdjustPointersClosure : public ShenandoahHeapRegionClosure { 334 bool doHeapRegion(ShenandoahHeapRegion* r) { 335 if (r->is_humongous()) { 336 if (r->is_humongous_start()) { 337 // We must adjust the pointers on the single H object. 338 oop obj = oop(r->bottom() + BrooksPointer::BROOKS_POINTER_OBJ_SIZE); 339 assert(obj->is_gc_marked(), "should be marked"); 340 // point all the oops to the new location 341 MarkSweep::adjust_pointers(obj); 342 } 343 } else { 344 r->adjust_pointers(); 345 } 346 return false; 347 } 348 }; 349 350 void ShenandoahMarkCompact::phase3_update_references() { 351 ShenandoahHeap* heap = ShenandoahHeap::heap(); 352 353 // Need cleared claim bits for the roots processing 354 ClassLoaderDataGraph::clear_claimed_marks(); 355 356 CodeBlobToOopClosure adjust_code_closure(&MarkSweep::adjust_pointer_closure, 357 CodeBlobToOopClosure::FixRelocations); 358 359 { 360 ShenandoahRootProcessor rp(heap, 1); 361 rp.process_all_roots(&MarkSweep::adjust_pointer_closure, 362 &MarkSweep::adjust_cld_closure, 363 &adjust_code_closure); 364 } 365 366 assert(MarkSweep::ref_processor() == heap->ref_processor(), "Sanity"); 367 368 // Now adjust pointers in remaining weak roots. (All of which should 369 // have been cleared if they pointed to non-surviving objects.) 370 heap->weak_roots_iterate(&MarkSweep::adjust_pointer_closure); 371 372 // if (G1StringDedup::is_enabled()) { 373 // G1StringDedup::oops_do(&MarkSweep::adjust_pointer_closure); 374 // } 375 376 MarkSweep::adjust_marks(); 377 378 ShenandoahMarkCompactAdjustPointersClosure apc; 379 heap->heap_region_iterate(&apc); 380 } 381 382 class ShenandoahCleanupObjectClosure : public ObjectClosure { 383 void do_object(oop p) { 384 ShenandoahHeap::heap()->initialize_brooks_ptr(p); 385 } 386 }; 387 388 class CompactObjectsClosure : public ShenandoahHeapRegionClosure { 389 390 public: 391 392 CompactObjectsClosure() { 393 } 394 395 bool doHeapRegion(ShenandoahHeapRegion* r) { 396 if (r->is_humongous()) { 397 if (r->is_humongous_start()) { 398 oop obj = oop(r->bottom() + BrooksPointer::BROOKS_POINTER_OBJ_SIZE); 399 assert(obj->is_gc_marked(), "expect marked humongous object"); 400 obj->init_mark(); 401 } 402 } else { 403 r->compact(); 404 } 405 406 return false; 407 } 408 409 }; 410 411 class ShenandoahPostCompactClosure : public ShenandoahHeapRegionClosure { 412 size_t _live; 413 ShenandoahHeap* _heap; 414 public: 415 416 ShenandoahPostCompactClosure() : _live(0), _heap(ShenandoahHeap::heap()) { 417 _heap->clear_free_regions(); 418 } 419 420 bool doHeapRegion(ShenandoahHeapRegion* r) { 421 if (r->is_humongous()) { 422 _live += ShenandoahHeapRegion::RegionSizeBytes; 423 424 } else { 425 size_t live = r->used(); 426 if (live == 0) { 427 r->recycle(); 428 _heap->add_free_region(r); 429 } 430 r->setLiveData(live); 431 _live += live; 432 } 433 434 return false; 435 } 436 437 size_t getLive() { return _live;} 438 439 }; 440 441 void ShenandoahMarkCompact::phase4_compact_objects() { 442 ShenandoahHeap* heap = ShenandoahHeap::heap(); 443 CompactObjectsClosure coc; 444 heap->heap_region_iterate(&coc); 445 446 ShenandoahCleanupObjectClosure cleanup; 447 heap->object_iterate(&cleanup); 448 449 ShenandoahPostCompactClosure post_compact; 450 heap->heap_region_iterate(&post_compact); 451 452 heap->set_used(post_compact.getLive()); 453 }