1 /* 2 * Copyright (c) 2001, 2014, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "classfile/javaClasses.hpp" 27 #include "classfile/symbolTable.hpp" 28 #include "classfile/systemDictionary.hpp" 29 #include "classfile/vmSymbols.hpp" 30 #include "code/codeCache.hpp" 31 #include "code/icBuffer.hpp" 32 #include "gc_implementation/g1/g1Log.hpp" 33 #include "gc_implementation/g1/g1MarkSweep.hpp" 34 #include "gc_implementation/g1/g1StringDedup.hpp" 35 #include "gc_implementation/shared/gcHeapSummary.hpp" 36 #include "gc_implementation/shared/gcTimer.hpp" 37 #include "gc_implementation/shared/gcTrace.hpp" 38 #include "gc_implementation/shared/gcTraceTime.hpp" 39 #include "memory/gcLocker.hpp" 40 #include "memory/genCollectedHeap.hpp" 41 #include "memory/modRefBarrierSet.hpp" 42 #include "memory/referencePolicy.hpp" 43 #include "memory/space.hpp" 44 #include "oops/instanceRefKlass.hpp" 45 #include "oops/oop.inline.hpp" 46 #include "prims/jvmtiExport.hpp" 47 #include "runtime/atomic.inline.hpp" 48 #include "runtime/biasedLocking.hpp" 49 #include "runtime/fprofiler.hpp" 50 #include "runtime/synchronizer.hpp" 51 #include "runtime/thread.hpp" 52 #include "runtime/vmThread.hpp" 53 #include "utilities/copy.hpp" 54 #include "utilities/events.hpp" 55 56 class HeapRegion; 57 58 void G1MarkSweep::invoke_at_safepoint(ReferenceProcessor* rp, 59 bool clear_all_softrefs) { 60 assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint"); 61 62 SharedHeap* sh = SharedHeap::heap(); 63 #ifdef ASSERT 64 if (sh->collector_policy()->should_clear_all_soft_refs()) { 65 assert(clear_all_softrefs, "Policy should have been checked earler"); 66 } 67 #endif 68 // hook up weak ref data so it can be used during Mark-Sweep 69 assert(GenMarkSweep::ref_processor() == NULL, "no stomping"); 70 assert(rp != NULL, "should be non-NULL"); 71 assert(rp == G1CollectedHeap::heap()->ref_processor_stw(), "Precondition"); 72 73 GenMarkSweep::_ref_processor = rp; 74 rp->setup_policy(clear_all_softrefs); 75 76 // When collecting the permanent generation Method*s may be moving, 77 // so we either have to flush all bcp data or convert it into bci. 78 CodeCache::gc_prologue(); 79 80 bool marked_for_unloading = false; 81 82 allocate_stacks(); 83 84 // We should save the marks of the currently locked biased monitors. 85 // The marking doesn't preserve the marks of biased objects. 86 BiasedLocking::preserve_marks(); 87 88 mark_sweep_phase1(marked_for_unloading, clear_all_softrefs); 89 90 mark_sweep_phase2(); 91 92 // Don't add any more derived pointers during phase3 93 COMPILER2_PRESENT(DerivedPointerTable::set_active(false)); 94 95 mark_sweep_phase3(); 96 97 mark_sweep_phase4(); 98 99 GenMarkSweep::restore_marks(); 100 BiasedLocking::restore_marks(); 101 GenMarkSweep::deallocate_stacks(); 102 103 // "free at last gc" is calculated from these. 104 // CHF: cheating for now!!! 105 // Universe::set_heap_capacity_at_last_gc(Universe::heap()->capacity()); 106 // Universe::set_heap_used_at_last_gc(Universe::heap()->used()); 107 108 CodeCache::gc_epilogue(); 109 JvmtiExport::gc_epilogue(); 110 111 // refs processing: clean slate 112 GenMarkSweep::_ref_processor = NULL; 113 } 114 115 116 void G1MarkSweep::allocate_stacks() { 117 GenMarkSweep::_preserved_count_max = 0; 118 GenMarkSweep::_preserved_marks = NULL; 119 GenMarkSweep::_preserved_count = 0; 120 } 121 122 void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading, 123 bool clear_all_softrefs) { 124 // Recursively traverse all live objects and mark them 125 GCTraceTime tm("phase 1", G1Log::fine() && Verbose, true, gc_timer(), gc_tracer()->gc_id()); 126 GenMarkSweep::trace(" 1"); 127 128 SharedHeap* sh = SharedHeap::heap(); 129 130 // Need cleared claim bits for the roots processing 131 ClassLoaderDataGraph::clear_claimed_marks(); 132 133 MarkingCodeBlobClosure follow_code_closure(&GenMarkSweep::follow_root_closure, !CodeBlobToOopClosure::FixRelocations); 134 sh->process_strong_roots(true, // activate StrongRootsScope 135 SharedHeap::SO_None, 136 &GenMarkSweep::follow_root_closure, 137 &GenMarkSweep::follow_cld_closure, 138 &follow_code_closure); 139 140 // Process reference objects found during marking 141 ReferenceProcessor* rp = GenMarkSweep::ref_processor(); 142 assert(rp == G1CollectedHeap::heap()->ref_processor_stw(), "Sanity"); 143 144 rp->setup_policy(clear_all_softrefs); 145 const ReferenceProcessorStats& stats = 146 rp->process_discovered_references(&GenMarkSweep::is_alive, 147 &GenMarkSweep::keep_alive, 148 &GenMarkSweep::follow_stack_closure, 149 NULL, 150 gc_timer(), 151 gc_tracer()->gc_id()); 152 gc_tracer()->report_gc_reference_stats(stats); 153 154 155 // This is the point where the entire marking should have completed. 156 assert(GenMarkSweep::_marking_stack.is_empty(), "Marking should have completed"); 157 158 // Unload classes and purge the SystemDictionary. 159 bool purged_class = SystemDictionary::do_unloading(&GenMarkSweep::is_alive); 160 161 // Unload nmethods. 162 CodeCache::do_unloading(&GenMarkSweep::is_alive, purged_class); 163 164 // Prune dead klasses from subklass/sibling/implementor lists. 165 Klass::clean_weak_klass_links(&GenMarkSweep::is_alive); 166 167 // Delete entries for dead interned string and clean up unreferenced symbols in symbol table. 168 G1CollectedHeap::heap()->unlink_string_and_symbol_table(&GenMarkSweep::is_alive); 169 170 if (VerifyDuringGC) { 171 HandleMark hm; // handle scope 172 COMPILER2_PRESENT(DerivedPointerTableDeactivate dpt_deact); 173 Universe::heap()->prepare_for_verify(); 174 // Note: we can verify only the heap here. When an object is 175 // marked, the previous value of the mark word (including 176 // identity hash values, ages, etc) is preserved, and the mark 177 // word is set to markOop::marked_value - effectively removing 178 // any hash values from the mark word. These hash values are 179 // used when verifying the dictionaries and so removing them 180 // from the mark word can make verification of the dictionaries 181 // fail. At the end of the GC, the original mark word values 182 // (including hash values) are restored to the appropriate 183 // objects. 184 if (!VerifySilently) { 185 gclog_or_tty->print(" VerifyDuringGC:(full)[Verifying "); 186 } 187 Universe::heap()->verify(VerifySilently, VerifyOption_G1UseMarkWord); 188 if (!VerifySilently) { 189 gclog_or_tty->print_cr("]"); 190 } 191 } 192 193 gc_tracer()->report_object_count_after_gc(&GenMarkSweep::is_alive); 194 } 195 196 197 void G1MarkSweep::mark_sweep_phase2() { 198 // Now all live objects are marked, compute the new object addresses. 199 200 // It is not required that we traverse spaces in the same order in 201 // phase2, phase3 and phase4, but the ValidateMarkSweep live oops 202 // tracking expects us to do so. See comment under phase4. 203 204 GCTraceTime tm("phase 2", G1Log::fine() && Verbose, true, gc_timer(), gc_tracer()->gc_id()); 205 GenMarkSweep::trace("2"); 206 207 prepare_compaction(); 208 } 209 210 class G1AdjustPointersClosure: public HeapRegionClosure { 211 public: 212 bool doHeapRegion(HeapRegion* r) { 213 if (r->is_humongous()) { 214 if (r->is_starts_humongous()) { 215 // We must adjust the pointers on the single H object. 216 oop obj = oop(r->bottom()); 217 // point all the oops to the new location 218 obj->adjust_pointers(); 219 } 220 } else { 221 // This really ought to be "as_CompactibleSpace"... 222 r->adjust_pointers(); 223 } 224 return false; 225 } 226 }; 227 228 void G1MarkSweep::mark_sweep_phase3() { 229 G1CollectedHeap* g1h = G1CollectedHeap::heap(); 230 231 // Adjust the pointers to reflect the new locations 232 GCTraceTime tm("phase 3", G1Log::fine() && Verbose, true, gc_timer(), gc_tracer()->gc_id()); 233 GenMarkSweep::trace("3"); 234 235 SharedHeap* sh = SharedHeap::heap(); 236 237 // Need cleared claim bits for the roots processing 238 ClassLoaderDataGraph::clear_claimed_marks(); 239 240 CodeBlobToOopClosure adjust_code_closure(&GenMarkSweep::adjust_pointer_closure, CodeBlobToOopClosure::FixRelocations); 241 sh->process_all_roots(true, // activate StrongRootsScope 242 SharedHeap::SO_AllCodeCache, 243 &GenMarkSweep::adjust_pointer_closure, 244 &GenMarkSweep::adjust_cld_closure, 245 &adjust_code_closure); 246 247 assert(GenMarkSweep::ref_processor() == g1h->ref_processor_stw(), "Sanity"); 248 g1h->ref_processor_stw()->weak_oops_do(&GenMarkSweep::adjust_pointer_closure); 249 250 // Now adjust pointers in remaining weak roots. (All of which should 251 // have been cleared if they pointed to non-surviving objects.) 252 sh->process_weak_roots(&GenMarkSweep::adjust_pointer_closure); 253 254 if (G1StringDedup::is_enabled()) { 255 G1StringDedup::oops_do(&GenMarkSweep::adjust_pointer_closure); 256 } 257 258 GenMarkSweep::adjust_marks(); 259 260 G1AdjustPointersClosure blk; 261 g1h->heap_region_iterate(&blk); 262 } 263 264 class G1SpaceCompactClosure: public HeapRegionClosure { 265 public: 266 G1SpaceCompactClosure() {} 267 268 bool doHeapRegion(HeapRegion* hr) { 269 if (hr->is_humongous()) { 270 if (hr->is_starts_humongous()) { 271 oop obj = oop(hr->bottom()); 272 if (obj->is_gc_marked()) { 273 obj->init_mark(); 274 } else { 275 assert(hr->is_empty(), "Should have been cleared in phase 2."); 276 } 277 hr->reset_during_compaction(); 278 } 279 } else { 280 hr->compact(); 281 } 282 return false; 283 } 284 }; 285 286 void G1MarkSweep::mark_sweep_phase4() { 287 // All pointers are now adjusted, move objects accordingly 288 289 // The ValidateMarkSweep live oops tracking expects us to traverse spaces 290 // in the same order in phase2, phase3 and phase4. We don't quite do that 291 // here (code and comment not fixed for perm removal), so we tell the validate code 292 // to use a higher index (saved from phase2) when verifying perm_gen. 293 G1CollectedHeap* g1h = G1CollectedHeap::heap(); 294 295 GCTraceTime tm("phase 4", G1Log::fine() && Verbose, true, gc_timer(), gc_tracer()->gc_id()); 296 GenMarkSweep::trace("4"); 297 298 G1SpaceCompactClosure blk; 299 g1h->heap_region_iterate(&blk); 300 301 } 302 303 void G1MarkSweep::prepare_compaction_work(G1PrepareCompactClosure* blk) { 304 G1CollectedHeap* g1h = G1CollectedHeap::heap(); 305 g1h->heap_region_iterate(blk); 306 blk->update_sets(); 307 } 308 309 void G1PrepareCompactClosure::free_humongous_region(HeapRegion* hr) { 310 HeapWord* end = hr->end(); 311 FreeRegionList dummy_free_list("Dummy Free List for G1MarkSweep"); 312 313 assert(hr->is_starts_humongous(), 314 "Only the start of a humongous region should be freed."); 315 316 hr->set_containing_set(NULL); 317 _humongous_regions_removed.increment(1u, hr->capacity()); 318 319 _g1h->free_humongous_region(hr, &dummy_free_list, false /* par */); 320 prepare_for_compaction(hr, end); 321 dummy_free_list.remove_all(); 322 } 323 324 void G1PrepareCompactClosure::prepare_for_compaction(HeapRegion* hr, HeapWord* end) { 325 // If this is the first live region that we came across which we can compact, 326 // initialize the CompactPoint. 327 if (!is_cp_initialized()) { 328 _cp.space = hr; 329 _cp.threshold = hr->initialize_threshold(); 330 } 331 prepare_for_compaction_work(&_cp, hr, end); 332 } 333 334 void G1PrepareCompactClosure::prepare_for_compaction_work(CompactPoint* cp, 335 HeapRegion* hr, 336 HeapWord* end) { 337 hr->prepare_for_compaction(cp); 338 // Also clear the part of the card table that will be unused after 339 // compaction. 340 _mrbs->clear(MemRegion(hr->compaction_top(), end)); 341 } 342 343 void G1PrepareCompactClosure::update_sets() { 344 // We'll recalculate total used bytes and recreate the free list 345 // at the end of the GC, so no point in updating those values here. 346 HeapRegionSetCount empty_set; 347 _g1h->remove_from_old_sets(empty_set, _humongous_regions_removed); 348 } 349 350 bool G1PrepareCompactClosure::doHeapRegion(HeapRegion* hr) { 351 if (hr->is_humongous()) { 352 if (hr->is_starts_humongous()) { 353 oop obj = oop(hr->bottom()); 354 if (obj->is_gc_marked()) { 355 obj->forward_to(obj); 356 } else { 357 free_humongous_region(hr); 358 } 359 } else { 360 assert(hr->is_continues_humongous(), "Invalid humongous."); 361 } 362 } else { 363 prepare_for_compaction(hr, hr->end()); 364 } 365 return false; 366 }