1 /* 2 * Copyright (c) 2001, 2018, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "classfile/javaClasses.hpp" 27 #include "classfile/symbolTable.hpp" 28 #include "classfile/systemDictionary.hpp" 29 #include "classfile/vmSymbols.hpp" 30 #include "code/codeCache.hpp" 31 #include "code/icBuffer.hpp" 32 #include "gc_implementation/g1/g1Log.hpp" 33 #include "gc_implementation/g1/g1MarkSweep.hpp" 34 #include "gc_implementation/g1/g1RootProcessor.hpp" 35 #include "gc_implementation/g1/g1StringDedup.hpp" 36 #include "gc_implementation/shared/gcHeapSummary.hpp" 37 #include "gc_implementation/shared/gcTimer.hpp" 38 #include "gc_implementation/shared/gcTrace.hpp" 39 #include "gc_implementation/shared/gcTraceTime.hpp" 40 #include "memory/gcLocker.hpp" 41 #include "memory/genCollectedHeap.hpp" 42 #include "memory/modRefBarrierSet.hpp" 43 #include "memory/referencePolicy.hpp" 44 #include "memory/space.hpp" 45 #include "oops/instanceRefKlass.hpp" 46 #include "oops/oop.inline.hpp" 47 #include "prims/jvmtiExport.hpp" 48 #include "runtime/biasedLocking.hpp" 49 #include "runtime/fprofiler.hpp" 50 #include "runtime/synchronizer.hpp" 51 #include "runtime/thread.hpp" 52 #include "runtime/vmThread.hpp" 53 #include "utilities/copy.hpp" 54 #include "utilities/events.hpp" 55 56 class HeapRegion; 57 58 void G1MarkSweep::invoke_at_safepoint(ReferenceProcessor* rp, 59 bool clear_all_softrefs) { 60 assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint"); 61 62 SharedHeap* sh = SharedHeap::heap(); 63 #ifdef ASSERT 64 if (sh->collector_policy()->should_clear_all_soft_refs()) { 65 assert(clear_all_softrefs, "Policy should have been checked earler"); 66 } 67 #endif 68 // hook up weak ref data so it can be used during Mark-Sweep 69 assert(GenMarkSweep::ref_processor() == NULL, "no stomping"); 70 assert(rp != NULL, "should be non-NULL"); 71 assert(rp == G1CollectedHeap::heap()->ref_processor_stw(), "Precondition"); 72 73 GenMarkSweep::_ref_processor = rp; 74 rp->setup_policy(clear_all_softrefs); 75 76 // When collecting the permanent generation Method*s may be moving, 77 // so we either have to flush all bcp data or convert it into bci. 78 CodeCache::gc_prologue(); 79 Threads::gc_prologue(); 80 81 bool marked_for_unloading = false; 82 83 allocate_stacks(); 84 85 // We should save the marks of the currently locked biased monitors. 86 // The marking doesn't preserve the marks of biased objects. 87 BiasedLocking::preserve_marks(); 88 89 mark_sweep_phase1(marked_for_unloading, clear_all_softrefs); 90 91 mark_sweep_phase2(); 92 93 // Don't add any more derived pointers during phase3 94 COMPILER2_PRESENT(DerivedPointerTable::set_active(false)); 95 96 mark_sweep_phase3(); 97 98 mark_sweep_phase4(); 99 100 GenMarkSweep::restore_marks(); 101 BiasedLocking::restore_marks(); 102 GenMarkSweep::deallocate_stacks(); 103 104 // "free at last gc" is calculated from these. 105 // CHF: cheating for now!!! 106 // Universe::set_heap_capacity_at_last_gc(Universe::heap()->capacity()); 107 // Universe::set_heap_used_at_last_gc(Universe::heap()->used()); 108 109 Threads::gc_epilogue(); 110 CodeCache::gc_epilogue(); 111 JvmtiExport::gc_epilogue(); 112 113 // refs processing: clean slate 114 GenMarkSweep::_ref_processor = NULL; 115 } 116 117 118 void G1MarkSweep::allocate_stacks() { 119 GenMarkSweep::_preserved_count_max = 0; 120 GenMarkSweep::_preserved_marks = NULL; 121 GenMarkSweep::_preserved_count = 0; 122 } 123 124 void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading, 125 bool clear_all_softrefs) { 126 // Recursively traverse all live objects and mark them 127 GCTraceTime tm("phase 1", G1Log::fine() && Verbose, true, gc_timer(), gc_tracer()->gc_id()); 128 GenMarkSweep::trace(" 1"); 129 130 G1CollectedHeap* g1h = G1CollectedHeap::heap(); 131 132 // Need cleared claim bits for the roots processing 133 ClassLoaderDataGraph::clear_claimed_marks(); 134 135 MarkingCodeBlobClosure follow_code_closure(&GenMarkSweep::follow_root_closure, !CodeBlobToOopClosure::FixRelocations); 136 { 137 G1RootProcessor root_processor(g1h); 138 if (ClassUnloading) { 139 root_processor.process_strong_roots(&GenMarkSweep::follow_root_closure, 140 &GenMarkSweep::follow_cld_closure, 141 &follow_code_closure); 142 } else { 143 root_processor.process_all_roots_no_string_table( 144 &GenMarkSweep::follow_root_closure, 145 &GenMarkSweep::follow_cld_closure, 146 &follow_code_closure); 147 } 148 } 149 150 // Process reference objects found during marking 151 ReferenceProcessor* rp = GenMarkSweep::ref_processor(); 152 assert(rp == g1h->ref_processor_stw(), "Sanity"); 153 154 rp->setup_policy(clear_all_softrefs); 155 const ReferenceProcessorStats& stats = 156 rp->process_discovered_references(&GenMarkSweep::is_alive, 157 &GenMarkSweep::keep_alive, 158 &GenMarkSweep::follow_stack_closure, 159 NULL, 160 gc_timer(), 161 gc_tracer()->gc_id()); 162 gc_tracer()->report_gc_reference_stats(stats); 163 164 165 // This is the point where the entire marking should have completed. 166 assert(GenMarkSweep::_marking_stack.is_empty(), "Marking should have completed"); 167 168 if (ClassUnloading) { 169 170 // Unload classes and purge the SystemDictionary. 171 bool purged_class = SystemDictionary::do_unloading(&GenMarkSweep::is_alive); 172 173 // Unload nmethods. 174 CodeCache::do_unloading(&GenMarkSweep::is_alive, purged_class); 175 176 // Prune dead klasses from subklass/sibling/implementor lists. 177 Klass::clean_weak_klass_links(&GenMarkSweep::is_alive); 178 } 179 // Delete entries for dead interned string and clean up unreferenced symbols in symbol table. 180 G1CollectedHeap::heap()->unlink_string_and_symbol_table(&GenMarkSweep::is_alive); 181 182 if (VerifyDuringGC) { 183 HandleMark hm; // handle scope 184 COMPILER2_PRESENT(DerivedPointerTableDeactivate dpt_deact); 185 Universe::heap()->prepare_for_verify(); 186 // Note: we can verify only the heap here. When an object is 187 // marked, the previous value of the mark word (including 188 // identity hash values, ages, etc) is preserved, and the mark 189 // word is set to markOop::marked_value - effectively removing 190 // any hash values from the mark word. These hash values are 191 // used when verifying the dictionaries and so removing them 192 // from the mark word can make verification of the dictionaries 193 // fail. At the end of the GC, the orginal mark word values 194 // (including hash values) are restored to the appropriate 195 // objects. 196 if (!VerifySilently) { 197 gclog_or_tty->print(" VerifyDuringGC:(full)[Verifying "); 198 } 199 Universe::heap()->verify(VerifySilently, VerifyOption_G1UseMarkWord); 200 if (!VerifySilently) { 201 gclog_or_tty->print_cr("]"); 202 } 203 } 204 205 gc_tracer()->report_object_count_after_gc(&GenMarkSweep::is_alive); 206 } 207 208 209 void G1MarkSweep::mark_sweep_phase2() { 210 // Now all live objects are marked, compute the new object addresses. 211 212 // It is not required that we traverse spaces in the same order in 213 // phase2, phase3 and phase4, but the ValidateMarkSweep live oops 214 // tracking expects us to do so. See comment under phase4. 215 216 GCTraceTime tm("phase 2", G1Log::fine() && Verbose, true, gc_timer(), gc_tracer()->gc_id()); 217 GenMarkSweep::trace("2"); 218 219 prepare_compaction(); 220 } 221 222 class G1AdjustPointersClosure: public HeapRegionClosure { 223 public: 224 bool doHeapRegion(HeapRegion* r) { 225 if (r->isHumongous()) { 226 if (r->startsHumongous()) { 227 // We must adjust the pointers on the single H object. 228 oop obj = oop(r->bottom()); 229 // point all the oops to the new location 230 obj->adjust_pointers(); 231 } 232 } else { 233 // This really ought to be "as_CompactibleSpace"... 234 r->adjust_pointers(); 235 } 236 return false; 237 } 238 }; 239 240 class G1AlwaysTrueClosure: public BoolObjectClosure { 241 public: 242 bool do_object_b(oop p) { return true; } 243 }; 244 static G1AlwaysTrueClosure always_true; 245 246 void G1MarkSweep::mark_sweep_phase3() { 247 G1CollectedHeap* g1h = G1CollectedHeap::heap(); 248 249 // Adjust the pointers to reflect the new locations 250 GCTraceTime tm("phase 3", G1Log::fine() && Verbose, true, gc_timer(), gc_tracer()->gc_id()); 251 GenMarkSweep::trace("3"); 252 253 // Need cleared claim bits for the roots processing 254 ClassLoaderDataGraph::clear_claimed_marks(); 255 256 CodeBlobToOopClosure adjust_code_closure(&GenMarkSweep::adjust_pointer_closure, CodeBlobToOopClosure::FixRelocations); 257 { 258 G1RootProcessor root_processor(g1h); 259 root_processor.process_all_roots(&GenMarkSweep::adjust_pointer_closure, 260 &GenMarkSweep::adjust_cld_closure, 261 &adjust_code_closure); 262 } 263 264 assert(GenMarkSweep::ref_processor() == g1h->ref_processor_stw(), "Sanity"); 265 g1h->ref_processor_stw()->weak_oops_do(&GenMarkSweep::adjust_pointer_closure); 266 267 // Now adjust pointers in remaining weak roots. (All of which should 268 // have been cleared if they pointed to non-surviving objects.) 269 JNIHandles::weak_oops_do(&always_true, &GenMarkSweep::adjust_pointer_closure); 270 271 if (G1StringDedup::is_enabled()) { 272 G1StringDedup::oops_do(&GenMarkSweep::adjust_pointer_closure); 273 } 274 275 GenMarkSweep::adjust_marks(); 276 277 G1AdjustPointersClosure blk; 278 g1h->heap_region_iterate(&blk); 279 } 280 281 class G1SpaceCompactClosure: public HeapRegionClosure { 282 public: 283 G1SpaceCompactClosure() {} 284 285 bool doHeapRegion(HeapRegion* hr) { 286 if (hr->isHumongous()) { 287 if (hr->startsHumongous()) { 288 oop obj = oop(hr->bottom()); 289 if (obj->is_gc_marked()) { 290 obj->init_mark(); 291 } else { 292 assert(hr->is_empty(), "Should have been cleared in phase 2."); 293 } 294 hr->reset_during_compaction(); 295 } 296 } else { 297 hr->compact(); 298 } 299 return false; 300 } 301 }; 302 303 void G1MarkSweep::mark_sweep_phase4() { 304 // All pointers are now adjusted, move objects accordingly 305 306 // The ValidateMarkSweep live oops tracking expects us to traverse spaces 307 // in the same order in phase2, phase3 and phase4. We don't quite do that 308 // here (code and comment not fixed for perm removal), so we tell the validate code 309 // to use a higher index (saved from phase2) when verifying perm_gen. 310 G1CollectedHeap* g1h = G1CollectedHeap::heap(); 311 312 GCTraceTime tm("phase 4", G1Log::fine() && Verbose, true, gc_timer(), gc_tracer()->gc_id()); 313 GenMarkSweep::trace("4"); 314 315 G1SpaceCompactClosure blk; 316 g1h->heap_region_iterate(&blk); 317 318 } 319 320 void G1MarkSweep::prepare_compaction_work(G1PrepareCompactClosure* blk) { 321 G1CollectedHeap* g1h = G1CollectedHeap::heap(); 322 g1h->heap_region_iterate(blk); 323 blk->update_sets(); 324 } 325 326 void G1PrepareCompactClosure::free_humongous_region(HeapRegion* hr) { 327 HeapWord* end = hr->end(); 328 FreeRegionList dummy_free_list("Dummy Free List for G1MarkSweep"); 329 330 assert(hr->startsHumongous(), 331 "Only the start of a humongous region should be freed."); 332 333 hr->set_containing_set(NULL); 334 _humongous_regions_removed.increment(1u, hr->capacity()); 335 336 _g1h->free_humongous_region(hr, &dummy_free_list, false /* par */); 337 prepare_for_compaction(hr, end); 338 dummy_free_list.remove_all(); 339 } 340 341 void G1PrepareCompactClosure::prepare_for_compaction(HeapRegion* hr, HeapWord* end) { 342 // If this is the first live region that we came across which we can compact, 343 // initialize the CompactPoint. 344 if (!is_cp_initialized()) { 345 _cp.space = hr; 346 _cp.threshold = hr->initialize_threshold(); 347 } 348 prepare_for_compaction_work(&_cp, hr, end); 349 } 350 351 void G1PrepareCompactClosure::prepare_for_compaction_work(CompactPoint* cp, 352 HeapRegion* hr, 353 HeapWord* end) { 354 hr->prepare_for_compaction(cp); 355 // Also clear the part of the card table that will be unused after 356 // compaction. 357 _mrbs->clear(MemRegion(hr->compaction_top(), end)); 358 } 359 360 void G1PrepareCompactClosure::update_sets() { 361 // We'll recalculate total used bytes and recreate the free list 362 // at the end of the GC, so no point in updating those values here. 363 HeapRegionSetCount empty_set; 364 _g1h->remove_from_old_sets(empty_set, _humongous_regions_removed); 365 } 366 367 bool G1PrepareCompactClosure::doHeapRegion(HeapRegion* hr) { 368 if (hr->isHumongous()) { 369 if (hr->startsHumongous()) { 370 oop obj = oop(hr->bottom()); 371 if (obj->is_gc_marked()) { 372 obj->forward_to(obj); 373 } else { 374 free_humongous_region(hr); 375 } 376 } else { 377 assert(hr->continuesHumongous(), "Invalid humongous."); 378 } 379 } else { 380 prepare_for_compaction(hr, hr->end()); 381 } 382 return false; 383 }