1 /* 2 * Copyright (c) 2001, 2013, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "gc_implementation/shared/collectorCounters.hpp" 27 #include "gc_implementation/shared/parGCAllocBuffer.hpp" 28 #include "memory/allocation.inline.hpp" 29 #include "memory/blockOffsetTable.inline.hpp" 30 #include "memory/generation.inline.hpp" 31 #include "memory/generationSpec.hpp" 32 #include "memory/space.hpp" 33 #include "memory/tenuredGeneration.hpp" 34 #include "oops/oop.inline.hpp" 35 #include "runtime/java.hpp" 36 #include "utilities/macros.hpp" 37 38 TenuredGeneration::TenuredGeneration(ReservedSpace rs, 39 size_t initial_byte_size, int level, 40 GenRemSet* remset) : 41 OneContigSpaceCardGeneration(rs, initial_byte_size, 42 level, remset, NULL) 43 { 44 HeapWord* bottom = (HeapWord*) _virtual_space.low(); 45 HeapWord* end = (HeapWord*) _virtual_space.high(); 46 _the_space = new TenuredSpace(_bts, MemRegion(bottom, end)); 47 _the_space->reset_saved_mark(); 48 _shrink_factor = 0; 49 _capacity_at_prologue = 0; 50 51 _gc_stats = new GCStats(); 52 53 // initialize performance counters 54 55 const char* gen_name = "old"; 56 GenCollectorPolicy* gcp = (GenCollectorPolicy*) GenCollectedHeap::heap()->collector_policy(); 57 58 // Generation Counters -- generation 1, 1 subspace 59 _gen_counters = new GenerationCounters(gen_name, 1, 1, 60 gcp->min_old_size(), gcp->max_old_size(), &_virtual_space); 61 62 _gc_counters = new CollectorCounters("MSC", 1); 63 64 _space_counters = new CSpaceCounters(gen_name, 0, 65 _virtual_space.reserved_size(), 66 _the_space, _gen_counters); 67 #if INCLUDE_ALL_GCS 68 if (UseParNewGC) { 69 typedef ParGCAllocBufferWithBOT* ParGCAllocBufferWithBOTPtr; 70 _alloc_buffers = NEW_C_HEAP_ARRAY(ParGCAllocBufferWithBOTPtr, 71 ParallelGCThreads, mtGC); 72 if (_alloc_buffers == NULL) 73 vm_exit_during_initialization("Could not allocate alloc_buffers"); 74 for (uint i = 0; i < ParallelGCThreads; i++) { 75 _alloc_buffers[i] = 76 new ParGCAllocBufferWithBOT(OldPLABSize, _bts); 77 if (_alloc_buffers[i] == NULL) 78 vm_exit_during_initialization("Could not allocate alloc_buffers"); 79 } 80 } else { 81 _alloc_buffers = NULL; 82 } 83 #endif // INCLUDE_ALL_GCS 84 } 85 86 87 const char* TenuredGeneration::name() const { 88 return "tenured generation"; 89 } 90 91 void TenuredGeneration::gc_prologue(bool full) { 92 _capacity_at_prologue = capacity(); 93 _used_at_prologue = used(); 94 if (VerifyBeforeGC) { 95 verify_alloc_buffers_clean(); 96 } 97 } 98 99 void TenuredGeneration::gc_epilogue(bool full) { 100 if (VerifyAfterGC) { 101 verify_alloc_buffers_clean(); 102 } 103 OneContigSpaceCardGeneration::gc_epilogue(full); 104 } 105 106 107 bool TenuredGeneration::should_collect(bool full, 108 size_t size, 109 bool is_tlab) { 110 // This should be one big conditional or (||), but I want to be able to tell 111 // why it returns what it returns (without re-evaluating the conditionals 112 // in case they aren't idempotent), so I'm doing it this way. 113 // DeMorgan says it's okay. 114 bool result = false; 115 if (!result && full) { 116 result = true; 117 if (PrintGC && Verbose) { 118 gclog_or_tty->print_cr("TenuredGeneration::should_collect: because" 119 " full"); 120 } 121 } 122 if (!result && should_allocate(size, is_tlab)) { 123 result = true; 124 if (PrintGC && Verbose) { 125 gclog_or_tty->print_cr("TenuredGeneration::should_collect: because" 126 " should_allocate(" SIZE_FORMAT ")", 127 size); 128 } 129 } 130 // If we don't have very much free space. 131 // XXX: 10000 should be a percentage of the capacity!!! 132 if (!result && free() < 10000) { 133 result = true; 134 if (PrintGC && Verbose) { 135 gclog_or_tty->print_cr("TenuredGeneration::should_collect: because" 136 " free(): " SIZE_FORMAT, 137 free()); 138 } 139 } 140 // If we had to expand to accommodate promotions from younger generations 141 if (!result && _capacity_at_prologue < capacity()) { 142 result = true; 143 if (PrintGC && Verbose) { 144 gclog_or_tty->print_cr("TenuredGeneration::should_collect: because" 145 "_capacity_at_prologue: " SIZE_FORMAT " < capacity(): " SIZE_FORMAT, 146 _capacity_at_prologue, capacity()); 147 } 148 } 149 return result; 150 } 151 152 void TenuredGeneration::collect(bool full, 153 bool clear_all_soft_refs, 154 size_t size, 155 bool is_tlab) { 156 retire_alloc_buffers_before_full_gc(); 157 OneContigSpaceCardGeneration::collect(full, clear_all_soft_refs, 158 size, is_tlab); 159 } 160 161 void TenuredGeneration::compute_new_size() { 162 assert_locked_or_safepoint(Heap_lock); 163 164 // Compute some numbers about the state of the heap. 165 const size_t used_after_gc = used(); 166 const size_t capacity_after_gc = capacity(); 167 168 CardGeneration::compute_new_size(); 169 170 assert(used() == used_after_gc && used_after_gc <= capacity(), 171 err_msg("used: " SIZE_FORMAT " used_after_gc: " SIZE_FORMAT 172 " capacity: " SIZE_FORMAT, used(), used_after_gc, capacity())); 173 } 174 void TenuredGeneration::update_gc_stats(int current_level, 175 bool full) { 176 // If the next lower level(s) has been collected, gather any statistics 177 // that are of interest at this point. 178 if (!full && (current_level + 1) == level()) { 179 // Calculate size of data promoted from the younger generations 180 // before doing the collection. 181 size_t used_before_gc = used(); 182 183 // If the younger gen collections were skipped, then the 184 // number of promoted bytes will be 0 and adding it to the 185 // average will incorrectly lessen the average. It is, however, 186 // also possible that no promotion was needed. 187 if (used_before_gc >= _used_at_prologue) { 188 size_t promoted_in_bytes = used_before_gc - _used_at_prologue; 189 gc_stats()->avg_promoted()->sample(promoted_in_bytes); 190 } 191 } 192 } 193 194 void TenuredGeneration::update_counters() { 195 if (UsePerfData) { 196 _space_counters->update_all(); 197 _gen_counters->update_all(); 198 } 199 } 200 201 202 #if INCLUDE_ALL_GCS 203 oop TenuredGeneration::par_promote(int thread_num, 204 oop old, markOop m, size_t word_sz) { 205 206 ParGCAllocBufferWithBOT* buf = _alloc_buffers[thread_num]; 207 HeapWord* obj_ptr = buf->allocate(word_sz); 208 bool is_lab = true; 209 if (obj_ptr == NULL) { 210 #ifndef PRODUCT 211 if (Universe::heap()->promotion_should_fail()) { 212 return NULL; 213 } 214 #endif // #ifndef PRODUCT 215 216 // Slow path: 217 if (word_sz * 100 < ParallelGCBufferWastePct * buf->word_sz()) { 218 // Is small enough; abandon this buffer and start a new one. 219 size_t buf_size = buf->word_sz(); 220 HeapWord* buf_space = 221 TenuredGeneration::par_allocate(buf_size, false); 222 if (buf_space == NULL) { 223 buf_space = expand_and_allocate(buf_size, false, true /* parallel*/); 224 } 225 if (buf_space != NULL) { 226 buf->retire(false, false); 227 buf->set_buf(buf_space); 228 obj_ptr = buf->allocate(word_sz); 229 assert(obj_ptr != NULL, "Buffer was definitely big enough..."); 230 } 231 }; 232 // Otherwise, buffer allocation failed; try allocating object 233 // individually. 234 if (obj_ptr == NULL) { 235 obj_ptr = TenuredGeneration::par_allocate(word_sz, false); 236 if (obj_ptr == NULL) { 237 obj_ptr = expand_and_allocate(word_sz, false, true /* parallel */); 238 } 239 } 240 if (obj_ptr == NULL) return NULL; 241 } 242 assert(obj_ptr != NULL, "program logic"); 243 Copy::aligned_disjoint_words((HeapWord*)old, obj_ptr, word_sz); 244 oop obj = oop(obj_ptr); 245 // Restore the mark word copied above. 246 obj->set_mark(m); 247 return obj; 248 } 249 250 void TenuredGeneration::par_promote_alloc_undo(int thread_num, 251 HeapWord* obj, 252 size_t word_sz) { 253 ParGCAllocBufferWithBOT* buf = _alloc_buffers[thread_num]; 254 if (buf->contains(obj)) { 255 guarantee(buf->contains(obj + word_sz - 1), 256 "should contain whole object"); 257 buf->undo_allocation(obj, word_sz); 258 } else { 259 CollectedHeap::fill_with_object(obj, word_sz); 260 } 261 } 262 263 void TenuredGeneration::par_promote_alloc_done(int thread_num) { 264 ParGCAllocBufferWithBOT* buf = _alloc_buffers[thread_num]; 265 buf->retire(true, ParallelGCRetainPLAB); 266 } 267 268 void TenuredGeneration::retire_alloc_buffers_before_full_gc() { 269 if (UseParNewGC) { 270 for (uint i = 0; i < ParallelGCThreads; i++) { 271 _alloc_buffers[i]->retire(true /*end_of_gc*/, false /*retain*/); 272 } 273 } 274 } 275 276 // Verify that any retained parallel allocation buffers do not 277 // intersect with dirty cards. 278 void TenuredGeneration::verify_alloc_buffers_clean() { 279 if (UseParNewGC) { 280 for (uint i = 0; i < ParallelGCThreads; i++) { 281 _rs->verify_aligned_region_empty(_alloc_buffers[i]->range()); 282 } 283 } 284 } 285 286 #else // INCLUDE_ALL_GCS 287 void TenuredGeneration::retire_alloc_buffers_before_full_gc() {} 288 void TenuredGeneration::verify_alloc_buffers_clean() {} 289 #endif // INCLUDE_ALL_GCS 290 291 bool TenuredGeneration::promotion_attempt_is_safe(size_t max_promotion_in_bytes) const { 292 size_t available = max_contiguous_available(); 293 size_t av_promo = (size_t)gc_stats()->avg_promoted()->padded_average(); 294 bool res = (available >= av_promo) || (available >= max_promotion_in_bytes); 295 if (PrintGC && Verbose) { 296 gclog_or_tty->print_cr( 297 "Tenured: promo attempt is%s safe: available("SIZE_FORMAT") %s av_promo("SIZE_FORMAT")," 298 "max_promo("SIZE_FORMAT")", 299 res? "":" not", available, res? ">=":"<", 300 av_promo, max_promotion_in_bytes); 301 } 302 return res; 303 }