1 /* 2 * Copyright (c) 2001, 2010, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "incls/_precompiled.incl" 26 #include "incls/_g1MarkSweep.cpp.incl" 27 28 class HeapRegion; 29 30 void G1MarkSweep::invoke_at_safepoint(ReferenceProcessor* rp, 31 bool clear_all_softrefs) { 32 assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint"); 33 34 SharedHeap* sh = SharedHeap::heap(); 35 #ifdef ASSERT 36 if (sh->collector_policy()->should_clear_all_soft_refs()) { 37 assert(clear_all_softrefs, "Policy should have been checked earler"); 38 } 39 #endif 40 // hook up weak ref data so it can be used during Mark-Sweep 41 assert(GenMarkSweep::ref_processor() == NULL, "no stomping"); 42 assert(rp != NULL, "should be non-NULL"); 43 GenMarkSweep::_ref_processor = rp; 44 rp->setup_policy(clear_all_softrefs); 45 46 // When collecting the permanent generation methodOops may be moving, 47 // so we either have to flush all bcp data or convert it into bci. 48 CodeCache::gc_prologue(); 49 Threads::gc_prologue(); 50 51 // Increment the invocation count for the permanent generation, since it is 52 // implicitly collected whenever we do a full mark sweep collection. 53 sh->perm_gen()->stat_record()->invocations++; 54 55 bool marked_for_unloading = false; 56 57 allocate_stacks(); 58 59 // We should save the marks of the currently locked biased monitors. 60 // The marking doesn't preserve the marks of biased objects. 61 BiasedLocking::preserve_marks(); 62 63 mark_sweep_phase1(marked_for_unloading, clear_all_softrefs); 64 65 if (VerifyDuringGC) { 66 G1CollectedHeap* g1h = G1CollectedHeap::heap(); 67 g1h->checkConcurrentMark(); 68 } 69 70 mark_sweep_phase2(); 71 72 // Don't add any more derived pointers during phase3 73 COMPILER2_PRESENT(DerivedPointerTable::set_active(false)); 74 75 mark_sweep_phase3(); 76 77 mark_sweep_phase4(); 78 79 GenMarkSweep::restore_marks(); 80 BiasedLocking::restore_marks(); 81 GenMarkSweep::deallocate_stacks(); 82 83 // We must invalidate the perm-gen rs, so that it gets rebuilt. 84 GenRemSet* rs = sh->rem_set(); 85 rs->invalidate(sh->perm_gen()->used_region(), true /*whole_heap*/); 86 87 // "free at last gc" is calculated from these. 88 // CHF: cheating for now!!! 89 // Universe::set_heap_capacity_at_last_gc(Universe::heap()->capacity()); 90 // Universe::set_heap_used_at_last_gc(Universe::heap()->used()); 91 92 Threads::gc_epilogue(); 93 CodeCache::gc_epilogue(); 94 95 // refs processing: clean slate 96 GenMarkSweep::_ref_processor = NULL; 97 } 98 99 100 void G1MarkSweep::allocate_stacks() { 101 GenMarkSweep::_preserved_count_max = 0; 102 GenMarkSweep::_preserved_marks = NULL; 103 GenMarkSweep::_preserved_count = 0; 104 } 105 106 void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading, 107 bool clear_all_softrefs) { 108 // Recursively traverse all live objects and mark them 109 EventMark m("1 mark object"); 110 TraceTime tm("phase 1", PrintGC && Verbose, true, gclog_or_tty); 111 GenMarkSweep::trace(" 1"); 112 113 SharedHeap* sh = SharedHeap::heap(); 114 115 sh->process_strong_roots(true, // activeate StrongRootsScope 116 true, // Collecting permanent generation. 117 SharedHeap::SO_SystemClasses, 118 &GenMarkSweep::follow_root_closure, 119 &GenMarkSweep::follow_code_root_closure, 120 &GenMarkSweep::follow_root_closure); 121 122 // Process reference objects found during marking 123 ReferenceProcessor* rp = GenMarkSweep::ref_processor(); 124 rp->setup_policy(clear_all_softrefs); 125 rp->process_discovered_references(&GenMarkSweep::is_alive, 126 &GenMarkSweep::keep_alive, 127 &GenMarkSweep::follow_stack_closure, 128 NULL); 129 130 // Follow system dictionary roots and unload classes 131 bool purged_class = SystemDictionary::do_unloading(&GenMarkSweep::is_alive); 132 assert(GenMarkSweep::_marking_stack.is_empty(), 133 "stack should be empty by now"); 134 135 // Follow code cache roots (has to be done after system dictionary, 136 // assumes all live klasses are marked) 137 CodeCache::do_unloading(&GenMarkSweep::is_alive, 138 &GenMarkSweep::keep_alive, 139 purged_class); 140 GenMarkSweep::follow_stack(); 141 142 // Update subklass/sibling/implementor links of live klasses 143 GenMarkSweep::follow_weak_klass_links(); 144 assert(GenMarkSweep::_marking_stack.is_empty(), 145 "stack should be empty by now"); 146 147 // Visit memoized MDO's and clear any unmarked weak refs 148 GenMarkSweep::follow_mdo_weak_refs(); 149 assert(GenMarkSweep::_marking_stack.is_empty(), "just drained"); 150 151 152 // Visit symbol and interned string tables and delete unmarked oops 153 SymbolTable::unlink(&GenMarkSweep::is_alive); 154 StringTable::unlink(&GenMarkSweep::is_alive); 155 156 assert(GenMarkSweep::_marking_stack.is_empty(), 157 "stack should be empty by now"); 158 } 159 160 class G1PrepareCompactClosure: public HeapRegionClosure { 161 ModRefBarrierSet* _mrbs; 162 CompactPoint _cp; 163 164 void free_humongous_region(HeapRegion* hr) { 165 HeapWord* bot = hr->bottom(); 166 HeapWord* end = hr->end(); 167 assert(hr->startsHumongous(), 168 "Only the start of a humongous region should be freed."); 169 G1CollectedHeap::heap()->free_region(hr); 170 hr->prepare_for_compaction(&_cp); 171 // Also clear the part of the card table that will be unused after 172 // compaction. 173 _mrbs->clear(MemRegion(hr->compaction_top(), hr->end())); 174 } 175 176 public: 177 G1PrepareCompactClosure(CompactibleSpace* cs) : 178 _cp(NULL, cs, cs->initialize_threshold()), 179 _mrbs(G1CollectedHeap::heap()->mr_bs()) 180 {} 181 bool doHeapRegion(HeapRegion* hr) { 182 if (hr->isHumongous()) { 183 if (hr->startsHumongous()) { 184 oop obj = oop(hr->bottom()); 185 if (obj->is_gc_marked()) { 186 obj->forward_to(obj); 187 } else { 188 free_humongous_region(hr); 189 } 190 } else { 191 assert(hr->continuesHumongous(), "Invalid humongous."); 192 } 193 } else { 194 hr->prepare_for_compaction(&_cp); 195 // Also clear the part of the card table that will be unused after 196 // compaction. 197 _mrbs->clear(MemRegion(hr->compaction_top(), hr->end())); 198 } 199 return false; 200 } 201 }; 202 203 // Finds the first HeapRegion. 204 class FindFirstRegionClosure: public HeapRegionClosure { 205 HeapRegion* _a_region; 206 public: 207 FindFirstRegionClosure() : _a_region(NULL) {} 208 bool doHeapRegion(HeapRegion* r) { 209 _a_region = r; 210 return true; 211 } 212 HeapRegion* result() { return _a_region; } 213 }; 214 215 void G1MarkSweep::mark_sweep_phase2() { 216 // Now all live objects are marked, compute the new object addresses. 217 218 // It is imperative that we traverse perm_gen LAST. If dead space is 219 // allowed a range of dead object may get overwritten by a dead int 220 // array. If perm_gen is not traversed last a klassOop may get 221 // overwritten. This is fine since it is dead, but if the class has dead 222 // instances we have to skip them, and in order to find their size we 223 // need the klassOop! 224 // 225 // It is not required that we traverse spaces in the same order in 226 // phase2, phase3 and phase4, but the ValidateMarkSweep live oops 227 // tracking expects us to do so. See comment under phase4. 228 229 G1CollectedHeap* g1h = G1CollectedHeap::heap(); 230 Generation* pg = g1h->perm_gen(); 231 232 EventMark m("2 compute new addresses"); 233 TraceTime tm("phase 2", PrintGC && Verbose, true, gclog_or_tty); 234 GenMarkSweep::trace("2"); 235 236 FindFirstRegionClosure cl; 237 g1h->heap_region_iterate(&cl); 238 HeapRegion *r = cl.result(); 239 CompactibleSpace* sp = r; 240 if (r->isHumongous() && oop(r->bottom())->is_gc_marked()) { 241 sp = r->next_compaction_space(); 242 } 243 244 G1PrepareCompactClosure blk(sp); 245 g1h->heap_region_iterate(&blk); 246 247 CompactPoint perm_cp(pg, NULL, NULL); 248 pg->prepare_for_compaction(&perm_cp); 249 } 250 251 class G1AdjustPointersClosure: public HeapRegionClosure { 252 public: 253 bool doHeapRegion(HeapRegion* r) { 254 if (r->isHumongous()) { 255 if (r->startsHumongous()) { 256 // We must adjust the pointers on the single H object. 257 oop obj = oop(r->bottom()); 258 debug_only(GenMarkSweep::track_interior_pointers(obj)); 259 // point all the oops to the new location 260 obj->adjust_pointers(); 261 debug_only(GenMarkSweep::check_interior_pointers()); 262 } 263 } else { 264 // This really ought to be "as_CompactibleSpace"... 265 r->adjust_pointers(); 266 } 267 return false; 268 } 269 }; 270 271 void G1MarkSweep::mark_sweep_phase3() { 272 G1CollectedHeap* g1h = G1CollectedHeap::heap(); 273 Generation* pg = g1h->perm_gen(); 274 275 // Adjust the pointers to reflect the new locations 276 EventMark m("3 adjust pointers"); 277 TraceTime tm("phase 3", PrintGC && Verbose, true, gclog_or_tty); 278 GenMarkSweep::trace("3"); 279 280 SharedHeap* sh = SharedHeap::heap(); 281 282 sh->process_strong_roots(true, // activate StrongRootsScope 283 true, // Collecting permanent generation. 284 SharedHeap::SO_AllClasses, 285 &GenMarkSweep::adjust_root_pointer_closure, 286 NULL, // do not touch code cache here 287 &GenMarkSweep::adjust_pointer_closure); 288 289 g1h->ref_processor()->weak_oops_do(&GenMarkSweep::adjust_root_pointer_closure); 290 291 // Now adjust pointers in remaining weak roots. (All of which should 292 // have been cleared if they pointed to non-surviving objects.) 293 g1h->g1_process_weak_roots(&GenMarkSweep::adjust_root_pointer_closure, 294 &GenMarkSweep::adjust_pointer_closure); 295 296 GenMarkSweep::adjust_marks(); 297 298 G1AdjustPointersClosure blk; 299 g1h->heap_region_iterate(&blk); 300 pg->adjust_pointers(); 301 } 302 303 class G1SpaceCompactClosure: public HeapRegionClosure { 304 public: 305 G1SpaceCompactClosure() {} 306 307 bool doHeapRegion(HeapRegion* hr) { 308 if (hr->isHumongous()) { 309 if (hr->startsHumongous()) { 310 oop obj = oop(hr->bottom()); 311 if (obj->is_gc_marked()) { 312 obj->init_mark(); 313 } else { 314 assert(hr->is_empty(), "Should have been cleared in phase 2."); 315 } 316 hr->reset_during_compaction(); 317 } 318 } else { 319 hr->compact(); 320 } 321 return false; 322 } 323 }; 324 325 void G1MarkSweep::mark_sweep_phase4() { 326 // All pointers are now adjusted, move objects accordingly 327 328 // It is imperative that we traverse perm_gen first in phase4. All 329 // classes must be allocated earlier than their instances, and traversing 330 // perm_gen first makes sure that all klassOops have moved to their new 331 // location before any instance does a dispatch through it's klass! 332 333 // The ValidateMarkSweep live oops tracking expects us to traverse spaces 334 // in the same order in phase2, phase3 and phase4. We don't quite do that 335 // here (perm_gen first rather than last), so we tell the validate code 336 // to use a higher index (saved from phase2) when verifying perm_gen. 337 G1CollectedHeap* g1h = G1CollectedHeap::heap(); 338 Generation* pg = g1h->perm_gen(); 339 340 EventMark m("4 compact heap"); 341 TraceTime tm("phase 4", PrintGC && Verbose, true, gclog_or_tty); 342 GenMarkSweep::trace("4"); 343 344 pg->compact(); 345 346 G1SpaceCompactClosure blk; 347 g1h->heap_region_iterate(&blk); 348 349 } 350 351 // Local Variables: *** 352 // c-indentation-style: gnu *** 353 // End: ***