src/share/vm/memory/generation.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/share/vm/memory

src/share/vm/memory/generation.cpp

Print this page
rev 7211 : [mq]: remove_ngen
rev 7212 : [mq]: remove_get_gen
rev 7213 : imported patch move_genspecs
rev 7214 : imported patch remove_n_gen
rev 7215 : imported patch remove_levels


  28 #include "gc_implementation/shared/spaceDecorator.hpp"
  29 #include "gc_interface/collectedHeap.inline.hpp"
  30 #include "memory/allocation.inline.hpp"
  31 #include "memory/blockOffsetTable.inline.hpp"
  32 #include "memory/cardTableRS.hpp"
  33 #include "memory/gcLocker.inline.hpp"
  34 #include "memory/genCollectedHeap.hpp"
  35 #include "memory/genMarkSweep.hpp"
  36 #include "memory/genOopClosures.hpp"
  37 #include "memory/genOopClosures.inline.hpp"
  38 #include "memory/generation.hpp"
  39 #include "memory/generation.inline.hpp"
  40 #include "memory/space.inline.hpp"
  41 #include "oops/oop.inline.hpp"
  42 #include "runtime/java.hpp"
  43 #include "utilities/copy.hpp"
  44 #include "utilities/events.hpp"
  45 
  46 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
  47 
  48 Generation::Generation(ReservedSpace rs, size_t initial_size, int level) :
  49   _level(level),
  50   _ref_processor(NULL) {
  51   if (!_virtual_space.initialize(rs, initial_size)) {
  52     vm_exit_during_initialization("Could not reserve enough space for "
  53                     "object heap");
  54   }
  55   // Mangle all of the the initial generation.
  56   if (ZapUnusedHeapArea) {
  57     MemRegion mangle_region((HeapWord*)_virtual_space.low(),
  58       (HeapWord*)_virtual_space.high());
  59     SpaceMangler::mangle_region(mangle_region);
  60   }
  61   _reserved = MemRegion((HeapWord*)_virtual_space.low_boundary(),
  62           (HeapWord*)_virtual_space.high_boundary());
  63 }
  64 
  65 GenerationSpec* Generation::spec() {
  66   GenCollectedHeap* gch = GenCollectedHeap::heap();
  67   assert(0 <= level() && level() < gch->_n_gens, "Bad gen level");
  68   return gch->_gen_specs[level()];


  69 }
  70 
  71 size_t Generation::max_capacity() const {
  72   return reserved().byte_size();
  73 }
  74 
  75 void Generation::print_heap_change(size_t prev_used) const {
  76   if (PrintGCDetails && Verbose) {
  77     gclog_or_tty->print(" "  SIZE_FORMAT
  78                         "->" SIZE_FORMAT
  79                         "("  SIZE_FORMAT ")",
  80                         prev_used, used(), capacity());
  81   } else {
  82     gclog_or_tty->print(" "  SIZE_FORMAT "K"
  83                         "->" SIZE_FORMAT "K"
  84                         "("  SIZE_FORMAT "K)",
  85                         prev_used / K, used() / K, capacity() / K);
  86   }
  87 }
  88 


  97   }
  98 }
  99 
 100 void Generation::print() const { print_on(tty); }
 101 
 102 void Generation::print_on(outputStream* st)  const {
 103   st->print(" %-20s", name());
 104   st->print(" total " SIZE_FORMAT "K, used " SIZE_FORMAT "K",
 105              capacity()/K, used()/K);
 106   st->print_cr(" [" INTPTR_FORMAT ", " INTPTR_FORMAT ", " INTPTR_FORMAT ")",
 107               _virtual_space.low_boundary(),
 108               _virtual_space.high(),
 109               _virtual_space.high_boundary());
 110 }
 111 
 112 void Generation::print_summary_info() { print_summary_info_on(tty); }
 113 
 114 void Generation::print_summary_info_on(outputStream* st) {
 115   StatRecord* sr = stat_record();
 116   double time = sr->accumulated_time.seconds();








 117   st->print_cr("[Accumulated GC generation %d time %3.7f secs, "
 118                "%d GC's, avg GC time %3.7f]",
 119                level(), time, sr->invocations,
 120                sr->invocations > 0 ? time / sr->invocations : 0.0);
 121 }
 122 
 123 // Utility iterator classes
 124 
 125 class GenerationIsInReservedClosure : public SpaceClosure {
 126  public:
 127   const void* _p;
 128   Space* sp;
 129   virtual void do_space(Space* s) {
 130     if (sp == NULL) {
 131       if (s->is_in_reserved(_p)) sp = s;
 132     }
 133   }
 134   GenerationIsInReservedClosure(const void* p) : _p(p), sp(NULL) {}
 135 };
 136 
 137 class GenerationIsInClosure : public SpaceClosure {
 138  public:
 139   const void* _p;


 142     if (sp == NULL) {
 143       if (s->is_in(_p)) sp = s;
 144     }
 145   }
 146   GenerationIsInClosure(const void* p) : _p(p), sp(NULL) {}
 147 };
 148 
 149 bool Generation::is_in(const void* p) const {
 150   GenerationIsInClosure blk(p);
 151   ((Generation*)this)->space_iterate(&blk);
 152   return blk.sp != NULL;
 153 }
 154 
 155 DefNewGeneration* Generation::as_DefNewGeneration() {
 156   assert((kind() == Generation::DefNew) ||
 157          (kind() == Generation::ParNew),
 158     "Wrong youngest generation type");
 159   return (DefNewGeneration*) this;
 160 }
 161 
 162 Generation* Generation::next_gen() const {
 163   GenCollectedHeap* gch = GenCollectedHeap::heap();
 164   int next = level() + 1;
 165   if (next < gch->_n_gens) {
 166     return gch->_gens[next];
 167   } else {
 168     return NULL;
 169   }
 170 }
 171 
 172 size_t Generation::max_contiguous_available() const {
 173   // The largest number of contiguous free words in this or any higher generation.
 174   size_t max = 0;
 175   for (const Generation* gen = this; gen != NULL; gen = gen->next_gen()) {
 176     size_t avail = gen->contiguous_available();
 177     if (avail > max) {
 178       max = avail;
 179     }
 180   }
 181   return max;
 182 }
 183 
 184 bool Generation::promotion_attempt_is_safe(size_t max_promotion_in_bytes) const {
 185   size_t available = max_contiguous_available();
 186   bool   res = (available >= max_promotion_in_bytes);
 187   if (PrintGC && Verbose) {
 188     gclog_or_tty->print_cr(
 189       "Generation: promo attempt is%s safe: available("SIZE_FORMAT") %s max_promo("SIZE_FORMAT")",
 190       res? "":" not", available, res? ">=":"<",
 191       max_promotion_in_bytes);
 192   }
 193   return res;
 194 }
 195 
 196 // Ignores "ref" and calls allocate().
 197 oop Generation::promote(oop obj, size_t obj_size) {
 198   assert(obj_size == (size_t)obj->size(), "bad obj_size passed in");
 199 
 200 #ifndef PRODUCT
 201   if (Universe::heap()->promotion_should_fail()) {


 360     sp->adjust_pointers();
 361   }
 362 };
 363 
 364 void Generation::adjust_pointers() {
 365   // Note that this is done over all spaces, not just the compactible
 366   // ones.
 367   AdjustPointersClosure blk;
 368   space_iterate(&blk, true);
 369 }
 370 
 371 void Generation::compact() {
 372   CompactibleSpace* sp = first_compaction_space();
 373   while (sp != NULL) {
 374     sp->compact();
 375     sp = sp->next_compaction_space();
 376   }
 377 }
 378 
 379 CardGeneration::CardGeneration(ReservedSpace rs, size_t initial_byte_size,
 380                                int level,
 381                                GenRemSet* remset) :
 382   Generation(rs, initial_byte_size, level), _rs(remset),
 383   _shrink_factor(0), _min_heap_delta_bytes(), _capacity_at_prologue(),
 384   _used_at_prologue()
 385 {
 386   HeapWord* start = (HeapWord*)rs.base();
 387   size_t reserved_byte_size = rs.size();
 388   assert((uintptr_t(start) & 3) == 0, "bad alignment");
 389   assert((reserved_byte_size & 3) == 0, "bad alignment");
 390   MemRegion reserved_mr(start, heap_word_size(reserved_byte_size));
 391   _bts = new BlockOffsetSharedArray(reserved_mr,
 392                                     heap_word_size(initial_byte_size));
 393   MemRegion committed_mr(start, heap_word_size(initial_byte_size));
 394   _rs->resize_covered_region(committed_mr);
 395   if (_bts == NULL)
 396     vm_exit_during_initialization("Could not allocate a BlockOffsetArray");
 397 
 398   // Verify that the start and end of this generation is the start of a card.
 399   // If this wasn't true, a single card could span more than on generation,
 400   // which would cause problems when we commit/uncommit memory, and when we
 401   // clear and dirty cards.
 402   guarantee(_rs->is_aligned(reserved_mr.start()), "generation must be card aligned");


 618 
 619 
 620 void OneContigSpaceCardGeneration::collect(bool   full,
 621                                            bool   clear_all_soft_refs,
 622                                            size_t size,
 623                                            bool   is_tlab) {
 624   GenCollectedHeap* gch = GenCollectedHeap::heap();
 625 
 626   SpecializationStats::clear();
 627   // Temporarily expand the span of our ref processor, so
 628   // refs discovery is over the entire heap, not just this generation
 629   ReferenceProcessorSpanMutator
 630     x(ref_processor(), gch->reserved_region());
 631 
 632   STWGCTimer* gc_timer = GenMarkSweep::gc_timer();
 633   gc_timer->register_gc_start();
 634 
 635   SerialOldTracer* gc_tracer = GenMarkSweep::gc_tracer();
 636   gc_tracer->report_gc_start(gch->gc_cause(), gc_timer->gc_start());
 637 
 638   GenMarkSweep::invoke_at_safepoint(_level, ref_processor(), clear_all_soft_refs);
 639 
 640   gc_timer->register_gc_end();
 641 
 642   gc_tracer->report_gc_end(gc_timer->gc_end(), gc_timer->time_partitions());
 643 
 644   SpecializationStats::print();
 645 }
 646 
 647 HeapWord*
 648 OneContigSpaceCardGeneration::expand_and_allocate(size_t word_size,
 649                                                   bool is_tlab,
 650                                                   bool parallel) {
 651   assert(!is_tlab, "OneContigSpaceCardGeneration does not support TLAB allocation");
 652   if (parallel) {
 653     MutexLocker x(ParGCRareEvent_lock);
 654     HeapWord* result = NULL;
 655     size_t byte_size = word_size * HeapWordSize;
 656     while (true) {
 657       expand(byte_size, _min_heap_delta_bytes);
 658       if (GCExpandToAllocateDelayMillis > 0) {




  28 #include "gc_implementation/shared/spaceDecorator.hpp"
  29 #include "gc_interface/collectedHeap.inline.hpp"
  30 #include "memory/allocation.inline.hpp"
  31 #include "memory/blockOffsetTable.inline.hpp"
  32 #include "memory/cardTableRS.hpp"
  33 #include "memory/gcLocker.inline.hpp"
  34 #include "memory/genCollectedHeap.hpp"
  35 #include "memory/genMarkSweep.hpp"
  36 #include "memory/genOopClosures.hpp"
  37 #include "memory/genOopClosures.inline.hpp"
  38 #include "memory/generation.hpp"
  39 #include "memory/generation.inline.hpp"
  40 #include "memory/space.inline.hpp"
  41 #include "oops/oop.inline.hpp"
  42 #include "runtime/java.hpp"
  43 #include "utilities/copy.hpp"
  44 #include "utilities/events.hpp"
  45 
  46 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
  47 
  48 Generation::Generation(ReservedSpace rs, size_t initial_size) :

  49   _ref_processor(NULL) {
  50   if (!_virtual_space.initialize(rs, initial_size)) {
  51     vm_exit_during_initialization("Could not reserve enough space for "
  52                     "object heap");
  53   }
  54   // Mangle all of the the initial generation.
  55   if (ZapUnusedHeapArea) {
  56     MemRegion mangle_region((HeapWord*)_virtual_space.low(),
  57       (HeapWord*)_virtual_space.high());
  58     SpaceMangler::mangle_region(mangle_region);
  59   }
  60   _reserved = MemRegion((HeapWord*)_virtual_space.low_boundary(),
  61           (HeapWord*)_virtual_space.high_boundary());
  62 }
  63 
  64 GenerationSpec* Generation::spec() {
  65   GenCollectedHeap* gch = GenCollectedHeap::heap();
  66   if (this == gch->young_gen()) {
  67     return gch->gen_policy()->young_gen_spec();
  68   }
  69   return gch->gen_policy()->old_gen_spec();
  70 }
  71 
  72 size_t Generation::max_capacity() const {
  73   return reserved().byte_size();
  74 }
  75 
  76 void Generation::print_heap_change(size_t prev_used) const {
  77   if (PrintGCDetails && Verbose) {
  78     gclog_or_tty->print(" "  SIZE_FORMAT
  79                         "->" SIZE_FORMAT
  80                         "("  SIZE_FORMAT ")",
  81                         prev_used, used(), capacity());
  82   } else {
  83     gclog_or_tty->print(" "  SIZE_FORMAT "K"
  84                         "->" SIZE_FORMAT "K"
  85                         "("  SIZE_FORMAT "K)",
  86                         prev_used / K, used() / K, capacity() / K);
  87   }
  88 }
  89 


  98   }
  99 }
 100 
 101 void Generation::print() const { print_on(tty); }
 102 
 103 void Generation::print_on(outputStream* st)  const {
 104   st->print(" %-20s", name());
 105   st->print(" total " SIZE_FORMAT "K, used " SIZE_FORMAT "K",
 106              capacity()/K, used()/K);
 107   st->print_cr(" [" INTPTR_FORMAT ", " INTPTR_FORMAT ", " INTPTR_FORMAT ")",
 108               _virtual_space.low_boundary(),
 109               _virtual_space.high(),
 110               _virtual_space.high_boundary());
 111 }
 112 
 113 void Generation::print_summary_info() { print_summary_info_on(tty); }
 114 
 115 void Generation::print_summary_info_on(outputStream* st) {
 116   StatRecord* sr = stat_record();
 117   double time = sr->accumulated_time.seconds();
 118   // I didn't want to change the logging when removing the level concept,
 119   // but I guess this logging could say young/old or something instead of 0/1.
 120   int level;
 121   if (this == GenCollectedHeap::heap()->young_gen()) {
 122     level = 0;
 123   } else {
 124     level = 1;
 125   }
 126   st->print_cr("[Accumulated GC generation %d time %3.7f secs, "
 127                "%d GC's, avg GC time %3.7f]",
 128                level, time, sr->invocations,
 129                sr->invocations > 0 ? time / sr->invocations : 0.0);
 130 }
 131 
 132 // Utility iterator classes
 133 
 134 class GenerationIsInReservedClosure : public SpaceClosure {
 135  public:
 136   const void* _p;
 137   Space* sp;
 138   virtual void do_space(Space* s) {
 139     if (sp == NULL) {
 140       if (s->is_in_reserved(_p)) sp = s;
 141     }
 142   }
 143   GenerationIsInReservedClosure(const void* p) : _p(p), sp(NULL) {}
 144 };
 145 
 146 class GenerationIsInClosure : public SpaceClosure {
 147  public:
 148   const void* _p;


 151     if (sp == NULL) {
 152       if (s->is_in(_p)) sp = s;
 153     }
 154   }
 155   GenerationIsInClosure(const void* p) : _p(p), sp(NULL) {}
 156 };
 157 
 158 bool Generation::is_in(const void* p) const {
 159   GenerationIsInClosure blk(p);
 160   ((Generation*)this)->space_iterate(&blk);
 161   return blk.sp != NULL;
 162 }
 163 
 164 DefNewGeneration* Generation::as_DefNewGeneration() {
 165   assert((kind() == Generation::DefNew) ||
 166          (kind() == Generation::ParNew),
 167     "Wrong youngest generation type");
 168   return (DefNewGeneration*) this;
 169 }
 170 










 171 size_t Generation::max_contiguous_available() const {
 172   // The largest number of contiguous free words in this or any higher generation.
 173   size_t avail = contiguous_available();
 174   size_t old_avail = 0;
 175   if (this == GenCollectedHeap::heap()->young_gen()) {
 176     old_avail = GenCollectedHeap::heap()->old_gen()->contiguous_available();


 177   }
 178   return MAX2(avail, old_avail);
 179 }
 180 
 181 bool Generation::promotion_attempt_is_safe(size_t max_promotion_in_bytes) const {
 182   size_t available = max_contiguous_available();
 183   bool   res = (available >= max_promotion_in_bytes);
 184   if (PrintGC && Verbose) {
 185     gclog_or_tty->print_cr(
 186       "Generation: promo attempt is%s safe: available("SIZE_FORMAT") %s max_promo("SIZE_FORMAT")",
 187       res? "":" not", available, res? ">=":"<",
 188       max_promotion_in_bytes);
 189   }
 190   return res;
 191 }
 192 
 193 // Ignores "ref" and calls allocate().
 194 oop Generation::promote(oop obj, size_t obj_size) {
 195   assert(obj_size == (size_t)obj->size(), "bad obj_size passed in");
 196 
 197 #ifndef PRODUCT
 198   if (Universe::heap()->promotion_should_fail()) {


 357     sp->adjust_pointers();
 358   }
 359 };
 360 
 361 void Generation::adjust_pointers() {
 362   // Note that this is done over all spaces, not just the compactible
 363   // ones.
 364   AdjustPointersClosure blk;
 365   space_iterate(&blk, true);
 366 }
 367 
 368 void Generation::compact() {
 369   CompactibleSpace* sp = first_compaction_space();
 370   while (sp != NULL) {
 371     sp->compact();
 372     sp = sp->next_compaction_space();
 373   }
 374 }
 375 
 376 CardGeneration::CardGeneration(ReservedSpace rs, size_t initial_byte_size,

 377                                GenRemSet* remset) :
 378   Generation(rs, initial_byte_size), _rs(remset),
 379   _shrink_factor(0), _min_heap_delta_bytes(), _capacity_at_prologue(),
 380   _used_at_prologue()
 381 {
 382   HeapWord* start = (HeapWord*)rs.base();
 383   size_t reserved_byte_size = rs.size();
 384   assert((uintptr_t(start) & 3) == 0, "bad alignment");
 385   assert((reserved_byte_size & 3) == 0, "bad alignment");
 386   MemRegion reserved_mr(start, heap_word_size(reserved_byte_size));
 387   _bts = new BlockOffsetSharedArray(reserved_mr,
 388                                     heap_word_size(initial_byte_size));
 389   MemRegion committed_mr(start, heap_word_size(initial_byte_size));
 390   _rs->resize_covered_region(committed_mr);
 391   if (_bts == NULL)
 392     vm_exit_during_initialization("Could not allocate a BlockOffsetArray");
 393 
 394   // Verify that the start and end of this generation is the start of a card.
 395   // If this wasn't true, a single card could span more than on generation,
 396   // which would cause problems when we commit/uncommit memory, and when we
 397   // clear and dirty cards.
 398   guarantee(_rs->is_aligned(reserved_mr.start()), "generation must be card aligned");


 614 
 615 
 616 void OneContigSpaceCardGeneration::collect(bool   full,
 617                                            bool   clear_all_soft_refs,
 618                                            size_t size,
 619                                            bool   is_tlab) {
 620   GenCollectedHeap* gch = GenCollectedHeap::heap();
 621 
 622   SpecializationStats::clear();
 623   // Temporarily expand the span of our ref processor, so
 624   // refs discovery is over the entire heap, not just this generation
 625   ReferenceProcessorSpanMutator
 626     x(ref_processor(), gch->reserved_region());
 627 
 628   STWGCTimer* gc_timer = GenMarkSweep::gc_timer();
 629   gc_timer->register_gc_start();
 630 
 631   SerialOldTracer* gc_tracer = GenMarkSweep::gc_tracer();
 632   gc_tracer->report_gc_start(gch->gc_cause(), gc_timer->gc_start());
 633 
 634   GenMarkSweep::invoke_at_safepoint(ref_processor(), clear_all_soft_refs);
 635 
 636   gc_timer->register_gc_end();
 637 
 638   gc_tracer->report_gc_end(gc_timer->gc_end(), gc_timer->time_partitions());
 639 
 640   SpecializationStats::print();
 641 }
 642 
 643 HeapWord*
 644 OneContigSpaceCardGeneration::expand_and_allocate(size_t word_size,
 645                                                   bool is_tlab,
 646                                                   bool parallel) {
 647   assert(!is_tlab, "OneContigSpaceCardGeneration does not support TLAB allocation");
 648   if (parallel) {
 649     MutexLocker x(ParGCRareEvent_lock);
 650     HeapWord* result = NULL;
 651     size_t byte_size = word_size * HeapWordSize;
 652     while (true) {
 653       expand(byte_size, _min_heap_delta_bytes);
 654       if (GCExpandToAllocateDelayMillis > 0) {


src/share/vm/memory/generation.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File