1 /* 2 * Copyright (c) 2001, 2017, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "classfile/javaClasses.hpp" 27 #include "classfile/symbolTable.hpp" 28 #include "classfile/systemDictionary.hpp" 29 #include "classfile/vmSymbols.hpp" 30 #include "code/codeCache.hpp" 31 #include "code/icBuffer.hpp" 32 #include "gc/g1/g1MarkSweep.hpp" 33 #include "gc/g1/g1RootProcessor.hpp" 34 #include "gc/g1/g1StringDedup.hpp" 35 #include "gc/serial/markSweep.inline.hpp" 36 #include "gc/shared/gcHeapSummary.hpp" 37 #include "gc/shared/gcLocker.hpp" 38 #include "gc/shared/gcTimer.hpp" 39 #include "gc/shared/gcTrace.hpp" 40 #include "gc/shared/gcTraceTime.inline.hpp" 41 #include "gc/shared/genCollectedHeap.hpp" 42 #include "gc/shared/modRefBarrierSet.hpp" 43 #include "gc/shared/referencePolicy.hpp" 44 #include "gc/shared/space.hpp" 45 #include "oops/instanceRefKlass.hpp" 46 #include "oops/oop.inline.hpp" 47 #include "prims/jvmtiExport.hpp" 48 #include "runtime/atomic.hpp" 49 #include "runtime/biasedLocking.hpp" 50 #include "runtime/fprofiler.hpp" 51 #include "runtime/synchronizer.hpp" 52 #include "runtime/thread.hpp" 53 #include "runtime/vmThread.hpp" 54 #include "utilities/copy.hpp" 55 #include "utilities/events.hpp" 56 57 class HeapRegion; 58 59 void G1MarkSweep::invoke_at_safepoint(ReferenceProcessor* rp, 60 bool clear_all_softrefs) { 61 assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint"); 62 63 #ifdef ASSERT 64 if (G1CollectedHeap::heap()->collector_policy()->should_clear_all_soft_refs()) { 65 assert(clear_all_softrefs, "Policy should have been checked earler"); 66 } 67 #endif 68 // hook up weak ref data so it can be used during Mark-Sweep 69 assert(GenMarkSweep::ref_processor() == NULL, "no stomping"); 70 assert(rp != NULL, "should be non-NULL"); 71 assert(rp == G1CollectedHeap::heap()->ref_processor_stw(), "Precondition"); 72 73 GenMarkSweep::set_ref_processor(rp); 74 rp->setup_policy(clear_all_softrefs); 75 76 // When collecting the permanent generation Method*s may be moving, 77 // so we either have to flush all bcp data or convert it into bci. 78 CodeCache::gc_prologue(); 79 80 bool marked_for_unloading = false; 81 82 allocate_stacks(); 83 84 // We should save the marks of the currently locked biased monitors. 85 // The marking doesn't preserve the marks of biased objects. 86 BiasedLocking::preserve_marks(); 87 88 mark_sweep_phase1(marked_for_unloading, clear_all_softrefs); 89 90 mark_sweep_phase2(); 91 92 #if defined(COMPILER2) || INCLUDE_JVMCI 93 // Don't add any more derived pointers during phase3 94 DerivedPointerTable::set_active(false); 95 #endif 96 97 mark_sweep_phase3(); 98 99 mark_sweep_phase4(); 100 101 GenMarkSweep::restore_marks(); 102 BiasedLocking::restore_marks(); 103 GenMarkSweep::deallocate_stacks(); 104 105 CodeCache::gc_epilogue(); 106 JvmtiExport::gc_epilogue(); 107 108 // refs processing: clean slate 109 GenMarkSweep::set_ref_processor(NULL); 110 } 111 112 113 void G1MarkSweep::allocate_stacks() { 114 GenMarkSweep::_preserved_count_max = 0; 115 GenMarkSweep::_preserved_marks = NULL; 116 GenMarkSweep::_preserved_count = 0; 117 } 118 119 void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading, 120 bool clear_all_softrefs) { 121 // Recursively traverse all live objects and mark them 122 GCTraceTime(Info, gc, phases) tm("Phase 1: Mark live objects", gc_timer()); 123 124 G1CollectedHeap* g1h = G1CollectedHeap::heap(); 125 126 // Need cleared claim bits for the roots processing 127 ClassLoaderDataGraph::clear_claimed_marks(); 128 129 MarkingCodeBlobClosure follow_code_closure(&GenMarkSweep::follow_root_closure, !CodeBlobToOopClosure::FixRelocations); 130 { 131 G1RootProcessor root_processor(g1h, 1); 132 if (ClassUnloading) { 133 root_processor.process_strong_roots(&GenMarkSweep::follow_root_closure, 134 &GenMarkSweep::follow_cld_closure, 135 &follow_code_closure); 136 } else { 137 root_processor.process_all_roots_no_string_table( 138 &GenMarkSweep::follow_root_closure, 139 &GenMarkSweep::follow_cld_closure, 140 &follow_code_closure); 141 } 142 } 143 144 { 145 GCTraceTime(Debug, gc, phases) trace("Reference Processing", gc_timer()); 146 147 // Process reference objects found during marking 148 ReferenceProcessor* rp = GenMarkSweep::ref_processor(); 149 assert(rp == g1h->ref_processor_stw(), "Sanity"); 150 151 rp->setup_policy(clear_all_softrefs); 152 const ReferenceProcessorStats& stats = 153 rp->process_discovered_references(&GenMarkSweep::is_alive, 154 &GenMarkSweep::keep_alive, 155 &GenMarkSweep::follow_stack_closure, 156 NULL, 157 gc_timer()); 158 gc_tracer()->report_gc_reference_stats(stats); 159 } 160 161 // This is the point where the entire marking should have completed. 162 assert(GenMarkSweep::_marking_stack.is_empty(), "Marking should have completed"); 163 164 if (ClassUnloading) { 165 GCTraceTime(Debug, gc, phases) trace("Class Unloading", gc_timer()); 166 167 // Unload classes and purge the SystemDictionary. 168 bool purged_class = SystemDictionary::do_unloading(&GenMarkSweep::is_alive); 169 170 // Unload nmethods. 171 CodeCache::do_unloading(&GenMarkSweep::is_alive, purged_class); 172 173 // Prune dead klasses from subklass/sibling/implementor lists. 174 Klass::clean_weak_klass_links(&GenMarkSweep::is_alive); 175 } 176 177 { 178 GCTraceTime(Debug, gc, phases) trace("Scrub String and Symbol Tables", gc_timer()); 179 // Delete entries for dead interned string and clean up unreferenced symbols in symbol table. 180 g1h->unlink_string_and_symbol_table(&GenMarkSweep::is_alive); 181 } 182 183 if (G1StringDedup::is_enabled()) { 184 GCTraceTime(Debug, gc, phases) trace("String Deduplication Unlink", gc_timer()); 185 G1StringDedup::unlink(&GenMarkSweep::is_alive); 186 } 187 188 if (VerifyDuringGC) { 189 HandleMark hm; // handle scope 190 #if defined(COMPILER2) || INCLUDE_JVMCI 191 DerivedPointerTableDeactivate dpt_deact; 192 #endif 193 g1h->prepare_for_verify(); 194 // Note: we can verify only the heap here. When an object is 195 // marked, the previous value of the mark word (including 196 // identity hash values, ages, etc) is preserved, and the mark 197 // word is set to markOop::marked_value - effectively removing 198 // any hash values from the mark word. These hash values are 199 // used when verifying the dictionaries and so removing them 200 // from the mark word can make verification of the dictionaries 201 // fail. At the end of the GC, the original mark word values 202 // (including hash values) are restored to the appropriate 203 // objects. 204 GCTraceTime(Info, gc, verify)("During GC (full)"); 205 g1h->verify(VerifyOption_G1UseMarkWord); 206 } 207 208 gc_tracer()->report_object_count_after_gc(&GenMarkSweep::is_alive); 209 } 210 211 212 void G1MarkSweep::mark_sweep_phase2() { 213 // Now all live objects are marked, compute the new object addresses. 214 215 // It is not required that we traverse spaces in the same order in 216 // phase2, phase3 and phase4, but the ValidateMarkSweep live oops 217 // tracking expects us to do so. See comment under phase4. 218 219 GCTraceTime(Info, gc, phases) tm("Phase 2: Compute new object addresses", gc_timer()); 220 221 prepare_compaction(); 222 } 223 224 class G1AdjustPointersClosure: public HeapRegionClosure { 225 public: 226 bool doHeapRegion(HeapRegion* r) { 227 if (r->is_humongous()) { 228 if (r->is_starts_humongous()) { 229 // We must adjust the pointers on the single H object. 230 oop obj = oop(r->bottom()); 231 // point all the oops to the new location 232 MarkSweep::adjust_pointers(obj); 233 } 234 } else if (!r->is_pinned()) { 235 // This really ought to be "as_CompactibleSpace"... 236 r->adjust_pointers(); 237 } 238 return false; 239 } 240 }; 241 242 void G1MarkSweep::mark_sweep_phase3() { 243 G1CollectedHeap* g1h = G1CollectedHeap::heap(); 244 245 // Adjust the pointers to reflect the new locations 246 GCTraceTime(Info, gc, phases) tm("Phase 3: Adjust pointers", gc_timer()); 247 248 // Need cleared claim bits for the roots processing 249 ClassLoaderDataGraph::clear_claimed_marks(); 250 251 CodeBlobToOopClosure adjust_code_closure(&GenMarkSweep::adjust_pointer_closure, CodeBlobToOopClosure::FixRelocations); 252 { 253 G1RootProcessor root_processor(g1h, 1); 254 root_processor.process_all_roots(&GenMarkSweep::adjust_pointer_closure, 255 &GenMarkSweep::adjust_cld_closure, 256 &adjust_code_closure); 257 } 258 259 assert(GenMarkSweep::ref_processor() == g1h->ref_processor_stw(), "Sanity"); 260 g1h->ref_processor_stw()->weak_oops_do(&GenMarkSweep::adjust_pointer_closure); 261 262 // Now adjust pointers in remaining weak roots. (All of which should 263 // have been cleared if they pointed to non-surviving objects.) 264 JNIHandles::weak_oops_do(&GenMarkSweep::adjust_pointer_closure); 265 266 if (G1StringDedup::is_enabled()) { 267 G1StringDedup::oops_do(&GenMarkSweep::adjust_pointer_closure); 268 } 269 270 GenMarkSweep::adjust_marks(); 271 272 G1AdjustPointersClosure blk; 273 g1h->heap_region_iterate(&blk); 274 } 275 276 class G1SpaceCompactClosure: public HeapRegionClosure { 277 public: 278 G1SpaceCompactClosure() {} 279 280 bool doHeapRegion(HeapRegion* hr) { 281 if (hr->is_humongous()) { 282 if (hr->is_starts_humongous()) { 283 oop obj = oop(hr->bottom()); 284 if (obj->is_gc_marked()) { 285 obj->init_mark(); 286 } else { 287 assert(hr->is_empty(), "Should have been cleared in phase 2."); 288 } 289 } 290 hr->reset_during_compaction(); 291 } else if (!hr->is_pinned()) { 292 hr->compact(); 293 } 294 return false; 295 } 296 }; 297 298 void G1MarkSweep::mark_sweep_phase4() { 299 // All pointers are now adjusted, move objects accordingly 300 301 // The ValidateMarkSweep live oops tracking expects us to traverse spaces 302 // in the same order in phase2, phase3 and phase4. We don't quite do that 303 // here (code and comment not fixed for perm removal), so we tell the validate code 304 // to use a higher index (saved from phase2) when verifying perm_gen. 305 G1CollectedHeap* g1h = G1CollectedHeap::heap(); 306 307 GCTraceTime(Info, gc, phases) tm("Phase 4: Move objects", gc_timer()); 308 309 G1SpaceCompactClosure blk; 310 g1h->heap_region_iterate(&blk); 311 312 } 313 314 void G1MarkSweep::prepare_compaction_work(G1PrepareCompactClosure* blk) { 315 G1CollectedHeap* g1h = G1CollectedHeap::heap(); 316 g1h->heap_region_iterate(blk); 317 blk->update_sets(); 318 } 319 320 void G1PrepareCompactClosure::free_humongous_region(HeapRegion* hr) { 321 HeapWord* end = hr->end(); 322 FreeRegionList dummy_free_list("Dummy Free List for G1MarkSweep"); 323 324 hr->set_containing_set(NULL); 325 _humongous_regions_removed++; 326 327 _g1h->free_humongous_region(hr, &dummy_free_list, false /* skip_remset */); 328 prepare_for_compaction(hr, end); 329 dummy_free_list.remove_all(); 330 } 331 332 void G1PrepareCompactClosure::prepare_for_compaction(HeapRegion* hr, HeapWord* end) { 333 // If this is the first live region that we came across which we can compact, 334 // initialize the CompactPoint. 335 if (!is_cp_initialized()) { 336 _cp.space = hr; 337 _cp.threshold = hr->initialize_threshold(); 338 } 339 prepare_for_compaction_work(&_cp, hr, end); 340 } 341 342 void G1PrepareCompactClosure::prepare_for_compaction_work(CompactPoint* cp, 343 HeapRegion* hr, 344 HeapWord* end) { 345 hr->prepare_for_compaction(cp); 346 // Also clear the part of the card table that will be unused after 347 // compaction. 348 _mrbs->clear(MemRegion(hr->compaction_top(), end)); 349 } 350 351 void G1PrepareCompactClosure::update_sets() { 352 // We'll recalculate total used bytes and recreate the free list 353 // at the end of the GC, so no point in updating those values here. 354 _g1h->remove_from_old_sets(0, _humongous_regions_removed); 355 } 356 357 bool G1PrepareCompactClosure::doHeapRegion(HeapRegion* hr) { 358 if (hr->is_humongous()) { 359 oop obj = oop(hr->humongous_start_region()->bottom()); 360 if (hr->is_starts_humongous() && obj->is_gc_marked()) { 361 obj->forward_to(obj); 362 } 363 if (!obj->is_gc_marked()) { 364 free_humongous_region(hr); 365 } 366 } else if (!hr->is_pinned()) { 367 prepare_for_compaction(hr, hr->end()); 368 } 369 return false; 370 }