1 /* 2 * Copyright (c) 2014, 2015, Red Hat, Inc. and/or its affiliates. 3 * 4 * This code is free software; you can redistribute it and/or modify it 5 * under the terms of the GNU General Public License version 2 only, as 6 * published by the Free Software Foundation. 7 * 8 * This code is distributed in the hope that it will be useful, but WITHOUT 9 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 10 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 11 * version 2 for more details (a copy is included in the LICENSE file that 12 * accompanied this code). 13 * 14 * You should have received a copy of the GNU General Public License version 15 * 2 along with this work; if not, write to the Free Software Foundation, 16 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 17 * 18 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 19 * or visit www.oracle.com if you need additional information or have any 20 * questions. 21 * 22 */ 23 24 #include "code/codeCache.hpp" 25 #include "gc/shared/isGCActiveMark.hpp" 26 #include "gc/shenandoah/brooksPointer.hpp" 27 #include "gc/shenandoah/shenandoahMarkCompact.hpp" 28 #include "gc/shenandoah/shenandoahBarrierSet.hpp" 29 #include "gc/shenandoah/shenandoahHeap.hpp" 30 #include "gc/shenandoah/shenandoahHeap.inline.hpp" 31 #include "gc/shenandoah/shenandoahRootProcessor.hpp" 32 #include "gc/shenandoah/vm_operations_shenandoah.hpp" 33 #include "gc/serial/markSweep.inline.hpp" 34 #include "oops/oop.inline.hpp" 35 #include "runtime/biasedLocking.hpp" 36 #include "runtime/thread.hpp" 37 #include "utilities/copy.hpp" 38 #include "gc/shared/taskqueue.inline.hpp" 39 #include "gc/shared/workgroup.hpp" 40 41 42 43 void ShenandoahMarkCompact::allocate_stacks() { 44 MarkSweep::_preserved_count_max = 0; 45 MarkSweep::_preserved_marks = NULL; 46 MarkSweep::_preserved_count = 0; 47 } 48 49 void ShenandoahMarkCompact::do_mark_compact() { 50 51 COMPILER2_PRESENT(DerivedPointerTable::clear()); 52 53 ShenandoahHeap* _heap = ShenandoahHeap::heap(); 54 55 assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint"); 56 IsGCActiveMark is_active; 57 58 assert(Thread::current()->is_VM_thread(), "Do full GC only while world is stopped"); 59 assert(_heap->is_bitmap_clear(), "require cleared bitmap"); 60 assert(!_heap->concurrent_mark_in_progress(), "can't do full-GC while marking is in progress"); 61 assert(!_heap->is_evacuation_in_progress(), "can't do full-GC while evacuation is in progress"); 62 assert(!_heap->is_update_references_in_progress(), "can't do full-GC while updating of references is in progress"); 63 64 _heap->shenandoahPolicy()->record_phase_start(ShenandoahCollectorPolicy::full_gc); 65 66 // We need to clear the is_in_collection_set flag in all regions. 67 ShenandoahHeapRegion** regions = _heap->heap_regions(); 68 size_t num_regions = _heap->num_regions(); 69 for (size_t i = 0; i < num_regions; i++) { 70 regions[i]->set_is_in_collection_set(false); 71 } 72 _heap->clear_cset_fast_test(); 73 74 /* 75 if (ShenandoahVerify) { 76 // Full GC should only be called between regular concurrent cycles, therefore 77 // those verifications should be valid. 78 _heap->verify_heap_after_evacuation(); 79 _heap->verify_heap_after_update_refs(); 80 } 81 */ 82 83 if (ShenandoahTraceFullGC) { 84 gclog_or_tty->print_cr("Shenandoah-full-gc: start with heap used: "SIZE_FORMAT" MB", _heap->used() / M); 85 gclog_or_tty->print_cr("Shenandoah-full-gc: phase 1: marking the heap"); 86 // _heap->print_heap_regions(); 87 } 88 89 if (UseTLAB) { 90 _heap->ensure_parsability(true); 91 } 92 93 _heap->cleanup_after_cancelconcgc(); 94 95 ReferenceProcessor* rp = _heap->ref_processor(); 96 97 // hook up weak ref data so it can be used during Mark-Sweep 98 assert(MarkSweep::ref_processor() == NULL, "no stomping"); 99 assert(rp != NULL, "should be non-NULL"); 100 assert(rp == ShenandoahHeap::heap()->ref_processor(), "Precondition"); 101 bool clear_all_softrefs = true; //fixme 102 MarkSweep::_ref_processor = rp; 103 rp->setup_policy(clear_all_softrefs); 104 105 CodeCache::gc_prologue(); 106 allocate_stacks(); 107 108 // We should save the marks of the currently locked biased monitors. 109 // The marking doesn't preserve the marks of biased objects. 110 BiasedLocking::preserve_marks(); 111 112 phase1_mark_heap(); 113 114 if (ShenandoahTraceFullGC) { 115 gclog_or_tty->print_cr("Shenandoah-full-gc: phase 2: calculating target addresses"); 116 } 117 phase2_calculate_target_addresses(); 118 119 if (ShenandoahTraceFullGC) { 120 gclog_or_tty->print_cr("Shenandoah-full-gc: phase 3: updating references"); 121 } 122 123 // Don't add any more derived pointers during phase3 124 COMPILER2_PRESENT(DerivedPointerTable::set_active(false)); 125 126 phase3_update_references(); 127 128 if (ShenandoahTraceFullGC) { 129 gclog_or_tty->print_cr("Shenandoah-full-gc: phase 4: compacting objects"); 130 } 131 132 phase4_compact_objects(); 133 134 135 MarkSweep::restore_marks(); 136 BiasedLocking::restore_marks(); 137 GenMarkSweep::deallocate_stacks(); 138 139 CodeCache::gc_epilogue(); 140 JvmtiExport::gc_epilogue(); 141 142 // refs processing: clean slate 143 MarkSweep::_ref_processor = NULL; 144 145 146 if (ShenandoahVerify) { 147 _heap->verify_heap_after_evacuation(); 148 _heap->verify_heap_after_update_refs(); 149 } 150 151 if (ShenandoahTraceFullGC) { 152 gclog_or_tty->print_cr("Shenandoah-full-gc: finish with heap used: "SIZE_FORMAT" MB", _heap->used() / M); 153 } 154 155 _heap->_bytesAllocSinceCM = 0; 156 157 _heap->set_need_update_refs(false); 158 159 COMPILER2_PRESENT(DerivedPointerTable::update_pointers()); 160 161 _heap->shenandoahPolicy()->record_phase_end(ShenandoahCollectorPolicy::full_gc); 162 } 163 164 class UpdateRefsClosure: public ExtendedOopClosure { 165 public: 166 virtual void do_oop(oop* p) { 167 oop obj = oopDesc::load_heap_oop(p); 168 if (! oopDesc::is_null(obj)) { 169 ShenandoahBarrierSet::resolve_and_update_oop_static(p, obj); 170 } 171 } 172 virtual void do_oop(narrowOop* p) { 173 Unimplemented(); 174 } 175 }; 176 177 void ShenandoahMarkCompact::phase1_mark_heap() { 178 ShenandoahHeap* _heap = ShenandoahHeap::heap(); 179 ReferenceProcessor* rp = _heap->ref_processor(); 180 181 MarkSweep::_ref_processor = rp; 182 183 // First, update _all_ references in GC roots to point to to-space. 184 { 185 // Need cleared claim bits for the roots processing 186 /* 187 ClassLoaderDataGraph::clear_claimed_marks(); 188 UpdateRefsClosure uprefs; 189 CLDToOopClosure cld_uprefs(&uprefs); 190 CodeBlobToOopClosure code_uprefs(&uprefs, CodeBlobToOopClosure::FixRelocations); 191 ShenandoahRootProcessor rp(_heap, 1); 192 rp.process_all_roots(&uprefs, 193 &cld_uprefs, 194 &code_uprefs); 195 */ 196 } 197 198 { 199 MarkingCodeBlobClosure follow_code_closure(&MarkSweep::follow_root_closure, CodeBlobToOopClosure::FixRelocations); 200 // Need cleared claim bits for the roots processing 201 ClassLoaderDataGraph::clear_claimed_marks(); 202 ShenandoahRootProcessor rp(_heap, 1); 203 rp.process_strong_roots(&MarkSweep::follow_root_closure, 204 &MarkSweep::follow_cld_closure, 205 &follow_code_closure); 206 207 // Also update (without marking) weak CLD refs, in case they're reachable. 208 UpdateRefsClosure uprefs; 209 CLDToOopClosure cld_uprefs(&uprefs); 210 ClassLoaderDataGraph::roots_cld_do(NULL, &cld_uprefs); 211 212 // Same for weak JNI handles. 213 ShenandoahAlwaysTrueClosure always_true; 214 JNIHandles::weak_oops_do(&always_true, &uprefs); 215 } 216 217 _heap->shenandoahPolicy()->record_phase_start(ShenandoahCollectorPolicy::weakrefs); 218 bool clear_soft_refs = false; //fixme 219 rp->setup_policy(clear_soft_refs); 220 221 const ReferenceProcessorStats& stats = 222 rp->process_discovered_references(&MarkSweep::is_alive, 223 &MarkSweep::keep_alive, 224 &MarkSweep::follow_stack_closure, 225 NULL, 226 NULL, 227 _heap->tracer()->gc_id()); 228 229 // heap->tracer()->report_gc_reference_stats(stats); 230 231 _heap->shenandoahPolicy()->record_phase_end(ShenandoahCollectorPolicy::weakrefs); 232 233 // Unload classes and purge the SystemDictionary. 234 bool purged_class = SystemDictionary::do_unloading(&MarkSweep::is_alive); 235 236 // Unload nmethods. 237 CodeCache::do_unloading(&MarkSweep::is_alive, purged_class); 238 239 // Prune dead klasses from subklass/sibling/implementor lists. 240 Klass::clean_weak_klass_links(&MarkSweep::is_alive); 241 242 // Delete entries for dead interned string and clean up unreferenced symbols in symbol table. 243 _heap->unlink_string_and_symbol_table(&MarkSweep::is_alive); 244 245 if (VerifyDuringGC) { 246 HandleMark hm; // handle scope 247 COMPILER2_PRESENT(DerivedPointerTableDeactivate dpt_deact); 248 // Universe::heap()->prepare_for_verify(); 249 _heap->prepare_for_verify(); 250 // Note: we can verify only the heap here. When an object is 251 // marked, the previous value of the mark word (including 252 // identity hash values, ages, etc) is preserved, and the mark 253 // word is set to markOop::marked_value - effectively removing 254 // any hash values from the mark word. These hash values are 255 // used when verifying the dictionaries and so removing them 256 // from the mark word can make verification of the dictionaries 257 // fail. At the end of the GC, the original mark word values 258 // (including hash values) are restored to the appropriate 259 // objects. 260 if (!VerifySilently) { 261 gclog_or_tty->print(" VerifyDuringGC:(full)[Verifying "); 262 } 263 // Universe::heap()->verify(VerifySilently, VerifyOption_G1UseMarkWord); 264 _heap->verify(VerifySilently, VerifyOption_G1UseMarkWord); 265 if (!VerifySilently) { 266 gclog_or_tty->print_cr("]"); 267 } 268 } 269 } 270 271 class ShenandoahPrepareForCompaction : public ShenandoahHeapRegionClosure { 272 CompactPoint _cp; 273 ShenandoahHeap* _heap; 274 bool _dead_humongous; 275 276 public: 277 ShenandoahPrepareForCompaction() : 278 _heap(ShenandoahHeap::heap()), 279 _dead_humongous(false) { 280 } 281 282 bool doHeapRegion(ShenandoahHeapRegion* r) { 283 // We need to save the contents 284 if (!r->is_humongous()) { 285 if (_cp.space == NULL) { 286 _cp.space = r; 287 _cp.threshold = r->initialize_threshold(); 288 } 289 _dead_humongous = false; 290 r->prepare_for_compaction(&_cp); 291 } else { 292 if (r->is_humongous_start()) { 293 oop obj = oop(r->bottom() + BrooksPointer::BROOKS_POINTER_OBJ_SIZE); 294 if (obj->is_gc_marked()) { 295 obj->forward_to(obj); 296 _dead_humongous = false; 297 } else { 298 if (_cp.space == NULL) { 299 _cp.space = r; 300 _cp.threshold = r->initialize_threshold(); 301 } 302 _dead_humongous = true; 303 guarantee(r->region_number() >= ((ShenandoahHeapRegion*)_cp.space)->region_number(), 304 "only reset regions that are not yet used for compaction"); 305 r->reset(); 306 r->prepare_for_compaction(&_cp); 307 } 308 } else { 309 assert(r->is_humongous_continuation(), "expect humongous continuation"); 310 if (_dead_humongous) { 311 guarantee(r->region_number() > ((ShenandoahHeapRegion*)_cp.space)->region_number(), 312 "only reset regions that are not yet used for compaction"); 313 r->reset(); 314 r->prepare_for_compaction(&_cp); 315 } 316 } 317 } 318 return false; 319 } 320 }; 321 322 void ShenandoahMarkCompact::phase2_calculate_target_addresses() { 323 ShenandoahPrepareForCompaction prepare; 324 ShenandoahHeap::heap()->heap_region_iterate(&prepare); 325 } 326 327 328 class ShenandoahMarkCompactAdjustPointersClosure : public ShenandoahHeapRegionClosure { 329 bool doHeapRegion(ShenandoahHeapRegion* r) { 330 if (r->is_humongous()) { 331 if (r->is_humongous_start()) { 332 // We must adjust the pointers on the single H object. 333 oop obj = oop(r->bottom() + BrooksPointer::BROOKS_POINTER_OBJ_SIZE); 334 assert(obj->is_gc_marked(), "should be marked"); 335 // point all the oops to the new location 336 MarkSweep::adjust_pointers(obj); 337 } 338 } else { 339 r->adjust_pointers(); 340 } 341 return false; 342 } 343 }; 344 345 void ShenandoahMarkCompact::phase3_update_references() { 346 ShenandoahHeap* heap = ShenandoahHeap::heap(); 347 348 // Need cleared claim bits for the roots processing 349 ClassLoaderDataGraph::clear_claimed_marks(); 350 351 CodeBlobToOopClosure adjust_code_closure(&MarkSweep::adjust_pointer_closure, 352 CodeBlobToOopClosure::FixRelocations); 353 354 { 355 ShenandoahRootProcessor rp(heap, 1); 356 rp.process_all_roots(&MarkSweep::adjust_pointer_closure, 357 &MarkSweep::adjust_cld_closure, 358 &adjust_code_closure); 359 } 360 361 assert(MarkSweep::ref_processor() == heap->ref_processor(), "Sanity"); 362 363 // Now adjust pointers in remaining weak roots. (All of which should 364 // have been cleared if they pointed to non-surviving objects.) 365 heap->weak_roots_iterate(&MarkSweep::adjust_pointer_closure); 366 367 // if (G1StringDedup::is_enabled()) { 368 // G1StringDedup::oops_do(&MarkSweep::adjust_pointer_closure); 369 // } 370 371 MarkSweep::adjust_marks(); 372 373 ShenandoahMarkCompactAdjustPointersClosure apc; 374 heap->heap_region_iterate(&apc); 375 } 376 377 class ShenandoahCleanupObjectClosure : public ObjectClosure { 378 void do_object(oop p) { 379 ShenandoahHeap::heap()->initialize_brooks_ptr(p); 380 } 381 }; 382 383 class CompactObjectsClosure : public ShenandoahHeapRegionClosure { 384 385 public: 386 387 CompactObjectsClosure() { 388 } 389 390 bool doHeapRegion(ShenandoahHeapRegion* r) { 391 if (r->is_humongous()) { 392 if (r->is_humongous_start()) { 393 oop obj = oop(r->bottom() + BrooksPointer::BROOKS_POINTER_OBJ_SIZE); 394 assert(obj->is_gc_marked(), "expect marked humongous object"); 395 obj->init_mark(); 396 } 397 } else { 398 r->compact(); 399 } 400 401 return false; 402 } 403 404 }; 405 406 class ShenandoahPostCompactClosure : public ShenandoahHeapRegionClosure { 407 size_t _live; 408 ShenandoahHeap* _heap; 409 public: 410 411 ShenandoahPostCompactClosure() : _live(0), _heap(ShenandoahHeap::heap()) { 412 _heap->clear_free_regions(); 413 } 414 415 bool doHeapRegion(ShenandoahHeapRegion* r) { 416 if (r->is_humongous()) { 417 _live += ShenandoahHeapRegion::RegionSizeBytes; 418 419 } else { 420 size_t live = r->used(); 421 if (live == 0) { 422 r->recycle(); 423 _heap->add_free_region(r); 424 } 425 r->setLiveData(live); 426 _live += live; 427 } 428 429 return false; 430 } 431 432 size_t getLive() { return _live;} 433 434 }; 435 436 void ShenandoahMarkCompact::phase4_compact_objects() { 437 ShenandoahHeap* heap = ShenandoahHeap::heap(); 438 CompactObjectsClosure coc; 439 heap->heap_region_iterate(&coc); 440 441 ShenandoahCleanupObjectClosure cleanup; 442 heap->object_iterate(&cleanup); 443 444 ShenandoahPostCompactClosure post_compact; 445 heap->heap_region_iterate(&post_compact); 446 447 heap->set_used(post_compact.getLive()); 448 }