1 /* 2 * Copyright (c) 1997, 2015, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "gc/serial/genMarkSweep.hpp" 27 #include "gc/shared/blockOffsetTable.inline.hpp" 28 #include "gc/shared/cardTableRS.hpp" 29 #include "gc/shared/collectedHeap.inline.hpp" 30 #include "gc/shared/gcLocker.inline.hpp" 31 #include "gc/shared/gcTimer.hpp" 32 #include "gc/shared/gcTrace.hpp" 33 #include "gc/shared/genCollectedHeap.hpp" 34 #include "gc/shared/genOopClosures.hpp" 35 #include "gc/shared/genOopClosures.inline.hpp" 36 #include "gc/shared/generation.hpp" 37 #include "gc/shared/space.inline.hpp" 38 #include "gc/shared/spaceDecorator.hpp" 39 #include "memory/allocation.inline.hpp" 40 #include "oops/oop.inline.hpp" 41 #include "runtime/java.hpp" 42 #include "utilities/copy.hpp" 43 #include "utilities/events.hpp" 44 45 Generation::Generation(ReservedSpace rs, size_t initial_size) : 46 _ref_processor(NULL) { 47 if (!_virtual_space.initialize(rs, initial_size)) { 48 vm_exit_during_initialization("Could not reserve enough space for " 49 "object heap"); 50 } 51 // Mangle all of the the initial generation. 52 if (ZapUnusedHeapArea) { 53 MemRegion mangle_region((HeapWord*)_virtual_space.low(), 54 (HeapWord*)_virtual_space.high()); 55 SpaceMangler::mangle_region(mangle_region); 56 } 57 _reserved = MemRegion((HeapWord*)_virtual_space.low_boundary(), 58 (HeapWord*)_virtual_space.high_boundary()); 59 } 60 61 GenerationSpec* Generation::spec() { 62 GenCollectedHeap* gch = GenCollectedHeap::heap(); 63 if (this == gch->young_gen()) { 64 return gch->gen_policy()->young_gen_spec(); 65 } 66 return gch->gen_policy()->old_gen_spec(); 67 } 68 69 size_t Generation::max_capacity() const { 70 return reserved().byte_size(); 71 } 72 73 void Generation::print_heap_change(size_t prev_used) const { 74 if (PrintGCDetails && Verbose) { 75 gclog_or_tty->print(" " SIZE_FORMAT 76 "->" SIZE_FORMAT 77 "(" SIZE_FORMAT ")", 78 prev_used, used(), capacity()); 79 } else { 80 gclog_or_tty->print(" " SIZE_FORMAT "K" 81 "->" SIZE_FORMAT "K" 82 "(" SIZE_FORMAT "K)", 83 prev_used / K, used() / K, capacity() / K); 84 } 85 } 86 87 // By default we get a single threaded default reference processor; 88 // generations needing multi-threaded refs processing or discovery override this method. 89 void Generation::ref_processor_init() { 90 assert(_ref_processor == NULL, "a reference processor already exists"); 91 assert(!_reserved.is_empty(), "empty generation?"); 92 _ref_processor = new ReferenceProcessor(_reserved); // a vanilla reference processor 93 if (_ref_processor == NULL) { 94 vm_exit_during_initialization("Could not allocate ReferenceProcessor object"); 95 } 96 } 97 98 void Generation::print() const { print_on(tty); } 99 100 void Generation::print_on(outputStream* st) const { 101 st->print(" %-20s", name()); 102 st->print(" total " SIZE_FORMAT "K, used " SIZE_FORMAT "K", 103 capacity()/K, used()/K); 104 st->print_cr(" [" INTPTR_FORMAT ", " INTPTR_FORMAT ", " INTPTR_FORMAT ")", 105 p2i(_virtual_space.low_boundary()), 106 p2i(_virtual_space.high()), 107 p2i(_virtual_space.high_boundary())); 108 } 109 110 void Generation::print_summary_info() { print_summary_info_on(tty); } 111 112 void Generation::print_summary_info_on(outputStream* st) { 113 StatRecord* sr = stat_record(); 114 double time = sr->accumulated_time.seconds(); 115 // I didn't want to change the logging when removing the level concept, 116 // but I guess this logging could say young/old or something instead of 0/1. 117 uint level; 118 if (this == GenCollectedHeap::heap()->young_gen()) { 119 level = 0; 120 } else { 121 level = 1; 122 } 123 st->print_cr("[Accumulated GC generation %d time %3.7f secs, " 124 "%u GC's, avg GC time %3.7f]", 125 level, time, sr->invocations, 126 sr->invocations > 0 ? time / sr->invocations : 0.0); 127 } 128 129 // Utility iterator classes 130 131 class GenerationIsInReservedClosure : public SpaceClosure { 132 public: 133 const void* _p; 134 Space* sp; 135 virtual void do_space(Space* s) { 136 if (sp == NULL) { 137 if (s->is_in_reserved(_p)) sp = s; 138 } 139 } 140 GenerationIsInReservedClosure(const void* p) : _p(p), sp(NULL) {} 141 }; 142 143 class GenerationIsInClosure : public SpaceClosure { 144 public: 145 const void* _p; 146 Space* sp; 147 virtual void do_space(Space* s) { 148 if (sp == NULL) { 149 if (s->is_in(_p)) sp = s; 150 } 151 } 152 GenerationIsInClosure(const void* p) : _p(p), sp(NULL) {} 153 }; 154 155 bool Generation::is_in(const void* p) const { 156 GenerationIsInClosure blk(p); 157 ((Generation*)this)->space_iterate(&blk); 158 return blk.sp != NULL; 159 } 160 161 size_t Generation::max_contiguous_available() const { 162 // The largest number of contiguous free words in this or any higher generation. 163 size_t avail = contiguous_available(); 164 size_t old_avail = 0; 165 if (this == GenCollectedHeap::heap()->young_gen()) { 166 old_avail = GenCollectedHeap::heap()->old_gen()->contiguous_available(); 167 } 168 return MAX2(avail, old_avail); 169 } 170 171 bool Generation::promotion_attempt_is_safe(size_t max_promotion_in_bytes) const { 172 size_t available = max_contiguous_available(); 173 bool res = (available >= max_promotion_in_bytes); 174 if (PrintGC && Verbose) { 175 gclog_or_tty->print_cr( 176 "Generation: promo attempt is%s safe: available("SIZE_FORMAT") %s max_promo("SIZE_FORMAT")", 177 res? "":" not", available, res? ">=":"<", 178 max_promotion_in_bytes); 179 } 180 return res; 181 } 182 183 // Ignores "ref" and calls allocate(). 184 oop Generation::promote(oop obj, size_t obj_size) { 185 assert(obj_size == (size_t)obj->size(), "bad obj_size passed in"); 186 187 #ifndef PRODUCT 188 if (GenCollectedHeap::heap()->promotion_should_fail()) { 189 return NULL; 190 } 191 #endif // #ifndef PRODUCT 192 193 HeapWord* result = allocate(obj_size, false); 194 if (result != NULL) { 195 Copy::aligned_disjoint_words((HeapWord*)obj, result, obj_size); 196 return oop(result); 197 } else { 198 GenCollectedHeap* gch = GenCollectedHeap::heap(); 199 return gch->handle_failed_promotion(this, obj, obj_size); 200 } 201 } 202 203 oop Generation::par_promote(int thread_num, 204 oop obj, markOop m, size_t word_sz) { 205 // Could do a bad general impl here that gets a lock. But no. 206 ShouldNotCallThis(); 207 return NULL; 208 } 209 210 Space* Generation::space_containing(const void* p) const { 211 GenerationIsInReservedClosure blk(p); 212 // Cast away const 213 ((Generation*)this)->space_iterate(&blk); 214 return blk.sp; 215 } 216 217 // Some of these are mediocre general implementations. Should be 218 // overridden to get better performance. 219 220 class GenerationBlockStartClosure : public SpaceClosure { 221 public: 222 const void* _p; 223 HeapWord* _start; 224 virtual void do_space(Space* s) { 225 if (_start == NULL && s->is_in_reserved(_p)) { 226 _start = s->block_start(_p); 227 } 228 } 229 GenerationBlockStartClosure(const void* p) { _p = p; _start = NULL; } 230 }; 231 232 HeapWord* Generation::block_start(const void* p) const { 233 GenerationBlockStartClosure blk(p); 234 // Cast away const 235 ((Generation*)this)->space_iterate(&blk); 236 return blk._start; 237 } 238 239 class GenerationBlockSizeClosure : public SpaceClosure { 240 public: 241 const HeapWord* _p; 242 size_t size; 243 virtual void do_space(Space* s) { 244 if (size == 0 && s->is_in_reserved(_p)) { 245 size = s->block_size(_p); 246 } 247 } 248 GenerationBlockSizeClosure(const HeapWord* p) { _p = p; size = 0; } 249 }; 250 251 size_t Generation::block_size(const HeapWord* p) const { 252 GenerationBlockSizeClosure blk(p); 253 // Cast away const 254 ((Generation*)this)->space_iterate(&blk); 255 assert(blk.size > 0, "seems reasonable"); 256 return blk.size; 257 } 258 259 class GenerationBlockIsObjClosure : public SpaceClosure { 260 public: 261 const HeapWord* _p; 262 bool is_obj; 263 virtual void do_space(Space* s) { 264 if (!is_obj && s->is_in_reserved(_p)) { 265 is_obj |= s->block_is_obj(_p); 266 } 267 } 268 GenerationBlockIsObjClosure(const HeapWord* p) { _p = p; is_obj = false; } 269 }; 270 271 bool Generation::block_is_obj(const HeapWord* p) const { 272 GenerationBlockIsObjClosure blk(p); 273 // Cast away const 274 ((Generation*)this)->space_iterate(&blk); 275 return blk.is_obj; 276 } 277 278 class GenerationOopIterateClosure : public SpaceClosure { 279 public: 280 ExtendedOopClosure* _cl; 281 virtual void do_space(Space* s) { 282 s->oop_iterate(_cl); 283 } 284 GenerationOopIterateClosure(ExtendedOopClosure* cl) : 285 _cl(cl) {} 286 }; 287 288 void Generation::oop_iterate(ExtendedOopClosure* cl) { 289 GenerationOopIterateClosure blk(cl); 290 space_iterate(&blk); 291 } 292 293 void Generation::younger_refs_in_space_iterate(Space* sp, 294 OopsInGenClosure* cl, 295 uint n_threads) { 296 GenRemSet* rs = GenCollectedHeap::heap()->rem_set(); 297 rs->younger_refs_in_space_iterate(sp, cl, n_threads); 298 } 299 300 class GenerationObjIterateClosure : public SpaceClosure { 301 private: 302 ObjectClosure* _cl; 303 public: 304 virtual void do_space(Space* s) { 305 s->object_iterate(_cl); 306 } 307 GenerationObjIterateClosure(ObjectClosure* cl) : _cl(cl) {} 308 }; 309 310 void Generation::object_iterate(ObjectClosure* cl) { 311 GenerationObjIterateClosure blk(cl); 312 space_iterate(&blk); 313 } 314 315 class GenerationSafeObjIterateClosure : public SpaceClosure { 316 private: 317 ObjectClosure* _cl; 318 public: 319 virtual void do_space(Space* s) { 320 s->safe_object_iterate(_cl); 321 } 322 GenerationSafeObjIterateClosure(ObjectClosure* cl) : _cl(cl) {} 323 }; 324 325 void Generation::safe_object_iterate(ObjectClosure* cl) { 326 GenerationSafeObjIterateClosure blk(cl); 327 space_iterate(&blk); 328 } 329 330 void Generation::prepare_for_compaction(CompactPoint* cp) { 331 // Generic implementation, can be specialized 332 CompactibleSpace* space = first_compaction_space(); 333 while (space != NULL) { 334 space->prepare_for_compaction(cp); 335 space = space->next_compaction_space(); 336 } 337 } 338 339 class AdjustPointersClosure: public SpaceClosure { 340 public: 341 void do_space(Space* sp) { 342 sp->adjust_pointers(); 343 } 344 }; 345 346 void Generation::adjust_pointers() { 347 // Note that this is done over all spaces, not just the compactible 348 // ones. 349 AdjustPointersClosure blk; 350 space_iterate(&blk, true); 351 } 352 353 void Generation::compact() { 354 CompactibleSpace* sp = first_compaction_space(); 355 while (sp != NULL) { 356 sp->compact(); 357 sp = sp->next_compaction_space(); 358 } 359 }