src/share/vm/memory/generation.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/share/vm/memory

src/share/vm/memory/generation.cpp

Print this page
rev 7211 : [mq]: remove_ngen
rev 7212 : [mq]: remove_get_gen
rev 7213 : imported patch move_genspecs
rev 7214 : imported patch remove_n_gen
rev 7215 : imported patch remove_levels


  28 #include "gc_implementation/shared/spaceDecorator.hpp"
  29 #include "gc_interface/collectedHeap.inline.hpp"
  30 #include "memory/allocation.inline.hpp"
  31 #include "memory/blockOffsetTable.inline.hpp"
  32 #include "memory/cardTableRS.hpp"
  33 #include "memory/gcLocker.inline.hpp"
  34 #include "memory/genCollectedHeap.hpp"
  35 #include "memory/genMarkSweep.hpp"
  36 #include "memory/genOopClosures.hpp"
  37 #include "memory/genOopClosures.inline.hpp"
  38 #include "memory/generation.hpp"
  39 #include "memory/generation.inline.hpp"
  40 #include "memory/space.inline.hpp"
  41 #include "oops/oop.inline.hpp"
  42 #include "runtime/java.hpp"
  43 #include "utilities/copy.hpp"
  44 #include "utilities/events.hpp"
  45 
  46 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
  47 
  48 Generation::Generation(ReservedSpace rs, size_t initial_size, int level) :
  49   _level(level),
  50   _ref_processor(NULL) {
  51   if (!_virtual_space.initialize(rs, initial_size)) {
  52     vm_exit_during_initialization("Could not reserve enough space for "
  53                     "object heap");
  54   }
  55   // Mangle all of the the initial generation.
  56   if (ZapUnusedHeapArea) {
  57     MemRegion mangle_region((HeapWord*)_virtual_space.low(),
  58       (HeapWord*)_virtual_space.high());
  59     SpaceMangler::mangle_region(mangle_region);
  60   }
  61   _reserved = MemRegion((HeapWord*)_virtual_space.low_boundary(),
  62           (HeapWord*)_virtual_space.high_boundary());
  63 }
  64 
  65 GenerationSpec* Generation::spec() {
  66   GenCollectorPolicy* gcp = GenCollectedHeap::heap()->gen_policy();
  67   assert(0 <= level() && level() < gcp->number_of_generations(), "Bad gen level");
  68   return level() == 0 ? gcp->young_gen_spec() : gcp->old_gen_spec();


  69 }
  70 
  71 size_t Generation::max_capacity() const {
  72   return reserved().byte_size();
  73 }
  74 
  75 void Generation::print_heap_change(size_t prev_used) const {
  76   if (PrintGCDetails && Verbose) {
  77     gclog_or_tty->print(" "  SIZE_FORMAT
  78                         "->" SIZE_FORMAT
  79                         "("  SIZE_FORMAT ")",
  80                         prev_used, used(), capacity());
  81   } else {
  82     gclog_or_tty->print(" "  SIZE_FORMAT "K"
  83                         "->" SIZE_FORMAT "K"
  84                         "("  SIZE_FORMAT "K)",
  85                         prev_used / K, used() / K, capacity() / K);
  86   }
  87 }
  88 


  97   }
  98 }
  99 
 100 void Generation::print() const { print_on(tty); }
 101 
 102 void Generation::print_on(outputStream* st)  const {
 103   st->print(" %-20s", name());
 104   st->print(" total " SIZE_FORMAT "K, used " SIZE_FORMAT "K",
 105              capacity()/K, used()/K);
 106   st->print_cr(" [" INTPTR_FORMAT ", " INTPTR_FORMAT ", " INTPTR_FORMAT ")",
 107               _virtual_space.low_boundary(),
 108               _virtual_space.high(),
 109               _virtual_space.high_boundary());
 110 }
 111 
 112 void Generation::print_summary_info() { print_summary_info_on(tty); }
 113 
 114 void Generation::print_summary_info_on(outputStream* st) {
 115   StatRecord* sr = stat_record();
 116   double time = sr->accumulated_time.seconds();








 117   st->print_cr("[Accumulated GC generation %d time %3.7f secs, "
 118                "%d GC's, avg GC time %3.7f]",
 119                level(), time, sr->invocations,
 120                sr->invocations > 0 ? time / sr->invocations : 0.0);
 121 }
 122 
 123 // Utility iterator classes
 124 
 125 class GenerationIsInReservedClosure : public SpaceClosure {
 126  public:
 127   const void* _p;
 128   Space* sp;
 129   virtual void do_space(Space* s) {
 130     if (sp == NULL) {
 131       if (s->is_in_reserved(_p)) sp = s;
 132     }
 133   }
 134   GenerationIsInReservedClosure(const void* p) : _p(p), sp(NULL) {}
 135 };
 136 
 137 class GenerationIsInClosure : public SpaceClosure {
 138  public:
 139   const void* _p;


 142     if (sp == NULL) {
 143       if (s->is_in(_p)) sp = s;
 144     }
 145   }
 146   GenerationIsInClosure(const void* p) : _p(p), sp(NULL) {}
 147 };
 148 
 149 bool Generation::is_in(const void* p) const {
 150   GenerationIsInClosure blk(p);
 151   ((Generation*)this)->space_iterate(&blk);
 152   return blk.sp != NULL;
 153 }
 154 
 155 DefNewGeneration* Generation::as_DefNewGeneration() {
 156   assert((kind() == Generation::DefNew) ||
 157          (kind() == Generation::ParNew),
 158     "Wrong youngest generation type");
 159   return (DefNewGeneration*) this;
 160 }
 161 
 162 Generation* Generation::next_gen() const {
 163   GenCollectedHeap* gch = GenCollectedHeap::heap();
 164   if (level() == 0) {
 165     return gch->old_gen();
 166   } else {
 167     return NULL;
 168   }
 169 }
 170 
 171 size_t Generation::max_contiguous_available() const {
 172   // The largest number of contiguous free words in this or any higher generation.
 173   size_t max = 0;
 174   for (const Generation* gen = this; gen != NULL; gen = gen->next_gen()) {
 175     size_t avail = gen->contiguous_available();
 176     if (avail > max) {
 177       max = avail;
 178     }
 179   }
 180   return max;
 181 }
 182 
 183 bool Generation::promotion_attempt_is_safe(size_t max_promotion_in_bytes) const {
 184   size_t available = max_contiguous_available();
 185   bool   res = (available >= max_promotion_in_bytes);
 186   if (PrintGC && Verbose) {
 187     gclog_or_tty->print_cr(
 188       "Generation: promo attempt is%s safe: available("SIZE_FORMAT") %s max_promo("SIZE_FORMAT")",
 189       res? "":" not", available, res? ">=":"<",
 190       max_promotion_in_bytes);
 191   }
 192   return res;
 193 }
 194 
 195 // Ignores "ref" and calls allocate().
 196 oop Generation::promote(oop obj, size_t obj_size) {
 197   assert(obj_size == (size_t)obj->size(), "bad obj_size passed in");
 198 
 199 #ifndef PRODUCT
 200   if (Universe::heap()->promotion_should_fail()) {


 359     sp->adjust_pointers();
 360   }
 361 };
 362 
 363 void Generation::adjust_pointers() {
 364   // Note that this is done over all spaces, not just the compactible
 365   // ones.
 366   AdjustPointersClosure blk;
 367   space_iterate(&blk, true);
 368 }
 369 
 370 void Generation::compact() {
 371   CompactibleSpace* sp = first_compaction_space();
 372   while (sp != NULL) {
 373     sp->compact();
 374     sp = sp->next_compaction_space();
 375   }
 376 }
 377 
 378 CardGeneration::CardGeneration(ReservedSpace rs, size_t initial_byte_size,
 379                                int level,
 380                                GenRemSet* remset) :
 381   Generation(rs, initial_byte_size, level), _rs(remset),
 382   _shrink_factor(0), _min_heap_delta_bytes(), _capacity_at_prologue(),
 383   _used_at_prologue()
 384 {
 385   HeapWord* start = (HeapWord*)rs.base();
 386   size_t reserved_byte_size = rs.size();
 387   assert((uintptr_t(start) & 3) == 0, "bad alignment");
 388   assert((reserved_byte_size & 3) == 0, "bad alignment");
 389   MemRegion reserved_mr(start, heap_word_size(reserved_byte_size));
 390   _bts = new BlockOffsetSharedArray(reserved_mr,
 391                                     heap_word_size(initial_byte_size));
 392   MemRegion committed_mr(start, heap_word_size(initial_byte_size));
 393   _rs->resize_covered_region(committed_mr);
 394   if (_bts == NULL)
 395     vm_exit_during_initialization("Could not allocate a BlockOffsetArray");
 396 
 397   // Verify that the start and end of this generation is the start of a card.
 398   // If this wasn't true, a single card could span more than on generation,
 399   // which would cause problems when we commit/uncommit memory, and when we
 400   // clear and dirty cards.
 401   guarantee(_rs->is_aligned(reserved_mr.start()), "generation must be card aligned");


 617 
 618 
 619 void OneContigSpaceCardGeneration::collect(bool   full,
 620                                            bool   clear_all_soft_refs,
 621                                            size_t size,
 622                                            bool   is_tlab) {
 623   GenCollectedHeap* gch = GenCollectedHeap::heap();
 624 
 625   SpecializationStats::clear();
 626   // Temporarily expand the span of our ref processor, so
 627   // refs discovery is over the entire heap, not just this generation
 628   ReferenceProcessorSpanMutator
 629     x(ref_processor(), gch->reserved_region());
 630 
 631   STWGCTimer* gc_timer = GenMarkSweep::gc_timer();
 632   gc_timer->register_gc_start();
 633 
 634   SerialOldTracer* gc_tracer = GenMarkSweep::gc_tracer();
 635   gc_tracer->report_gc_start(gch->gc_cause(), gc_timer->gc_start());
 636 
 637   GenMarkSweep::invoke_at_safepoint(_level, ref_processor(), clear_all_soft_refs);
 638 
 639   gc_timer->register_gc_end();
 640 
 641   gc_tracer->report_gc_end(gc_timer->gc_end(), gc_timer->time_partitions());
 642 
 643   SpecializationStats::print();
 644 }
 645 
 646 HeapWord*
 647 OneContigSpaceCardGeneration::expand_and_allocate(size_t word_size,
 648                                                   bool is_tlab,
 649                                                   bool parallel) {
 650   assert(!is_tlab, "OneContigSpaceCardGeneration does not support TLAB allocation");
 651   if (parallel) {
 652     MutexLocker x(ParGCRareEvent_lock);
 653     HeapWord* result = NULL;
 654     size_t byte_size = word_size * HeapWordSize;
 655     while (true) {
 656       expand(byte_size, _min_heap_delta_bytes);
 657       if (GCExpandToAllocateDelayMillis > 0) {




  28 #include "gc_implementation/shared/spaceDecorator.hpp"
  29 #include "gc_interface/collectedHeap.inline.hpp"
  30 #include "memory/allocation.inline.hpp"
  31 #include "memory/blockOffsetTable.inline.hpp"
  32 #include "memory/cardTableRS.hpp"
  33 #include "memory/gcLocker.inline.hpp"
  34 #include "memory/genCollectedHeap.hpp"
  35 #include "memory/genMarkSweep.hpp"
  36 #include "memory/genOopClosures.hpp"
  37 #include "memory/genOopClosures.inline.hpp"
  38 #include "memory/generation.hpp"
  39 #include "memory/generation.inline.hpp"
  40 #include "memory/space.inline.hpp"
  41 #include "oops/oop.inline.hpp"
  42 #include "runtime/java.hpp"
  43 #include "utilities/copy.hpp"
  44 #include "utilities/events.hpp"
  45 
  46 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
  47 
  48 Generation::Generation(ReservedSpace rs, size_t initial_size) :

  49   _ref_processor(NULL) {
  50   if (!_virtual_space.initialize(rs, initial_size)) {
  51     vm_exit_during_initialization("Could not reserve enough space for "
  52                     "object heap");
  53   }
  54   // Mangle all of the the initial generation.
  55   if (ZapUnusedHeapArea) {
  56     MemRegion mangle_region((HeapWord*)_virtual_space.low(),
  57       (HeapWord*)_virtual_space.high());
  58     SpaceMangler::mangle_region(mangle_region);
  59   }
  60   _reserved = MemRegion((HeapWord*)_virtual_space.low_boundary(),
  61           (HeapWord*)_virtual_space.high_boundary());
  62 }
  63 
  64 GenerationSpec* Generation::spec() {
  65   GenCollectedHeap* gch = GenCollectedHeap::heap();
  66   if (this == gch->young_gen()) {
  67     return gch->gen_policy()->young_gen_spec();
  68   }
  69   return gch->gen_policy()->old_gen_spec();
  70 }
  71 
  72 size_t Generation::max_capacity() const {
  73   return reserved().byte_size();
  74 }
  75 
  76 void Generation::print_heap_change(size_t prev_used) const {
  77   if (PrintGCDetails && Verbose) {
  78     gclog_or_tty->print(" "  SIZE_FORMAT
  79                         "->" SIZE_FORMAT
  80                         "("  SIZE_FORMAT ")",
  81                         prev_used, used(), capacity());
  82   } else {
  83     gclog_or_tty->print(" "  SIZE_FORMAT "K"
  84                         "->" SIZE_FORMAT "K"
  85                         "("  SIZE_FORMAT "K)",
  86                         prev_used / K, used() / K, capacity() / K);
  87   }
  88 }
  89 


  98   }
  99 }
 100 
 101 void Generation::print() const { print_on(tty); }
 102 
 103 void Generation::print_on(outputStream* st)  const {
 104   st->print(" %-20s", name());
 105   st->print(" total " SIZE_FORMAT "K, used " SIZE_FORMAT "K",
 106              capacity()/K, used()/K);
 107   st->print_cr(" [" INTPTR_FORMAT ", " INTPTR_FORMAT ", " INTPTR_FORMAT ")",
 108               _virtual_space.low_boundary(),
 109               _virtual_space.high(),
 110               _virtual_space.high_boundary());
 111 }
 112 
 113 void Generation::print_summary_info() { print_summary_info_on(tty); }
 114 
 115 void Generation::print_summary_info_on(outputStream* st) {
 116   StatRecord* sr = stat_record();
 117   double time = sr->accumulated_time.seconds();
 118   // I didn't want to change the logging when removing the level concept,
 119   // but I guess this logging could say young/old or something instead of 0/1.
 120   int level;
 121   if (this == GenCollectedHeap::heap()->young_gen()) {
 122     level = 0;
 123   } else {
 124     level = 1;
 125   }
 126   st->print_cr("[Accumulated GC generation %d time %3.7f secs, "
 127                "%d GC's, avg GC time %3.7f]",
 128                level, time, sr->invocations,
 129                sr->invocations > 0 ? time / sr->invocations : 0.0);
 130 }
 131 
 132 // Utility iterator classes
 133 
 134 class GenerationIsInReservedClosure : public SpaceClosure {
 135  public:
 136   const void* _p;
 137   Space* sp;
 138   virtual void do_space(Space* s) {
 139     if (sp == NULL) {
 140       if (s->is_in_reserved(_p)) sp = s;
 141     }
 142   }
 143   GenerationIsInReservedClosure(const void* p) : _p(p), sp(NULL) {}
 144 };
 145 
 146 class GenerationIsInClosure : public SpaceClosure {
 147  public:
 148   const void* _p;


 151     if (sp == NULL) {
 152       if (s->is_in(_p)) sp = s;
 153     }
 154   }
 155   GenerationIsInClosure(const void* p) : _p(p), sp(NULL) {}
 156 };
 157 
 158 bool Generation::is_in(const void* p) const {
 159   GenerationIsInClosure blk(p);
 160   ((Generation*)this)->space_iterate(&blk);
 161   return blk.sp != NULL;
 162 }
 163 
 164 DefNewGeneration* Generation::as_DefNewGeneration() {
 165   assert((kind() == Generation::DefNew) ||
 166          (kind() == Generation::ParNew),
 167     "Wrong youngest generation type");
 168   return (DefNewGeneration*) this;
 169 }
 170 









 171 size_t Generation::max_contiguous_available() const {
 172   // The largest number of contiguous free words in this or any higher generation.
 173   size_t avail = contiguous_available();
 174   size_t old_avail = 0;
 175   if (this == GenCollectedHeap::heap()->young_gen()) {
 176     old_avail = GenCollectedHeap::heap()->old_gen()->contiguous_available();


 177   }
 178   return MAX2(avail, old_avail);
 179 }
 180 
 181 bool Generation::promotion_attempt_is_safe(size_t max_promotion_in_bytes) const {
 182   size_t available = max_contiguous_available();
 183   bool   res = (available >= max_promotion_in_bytes);
 184   if (PrintGC && Verbose) {
 185     gclog_or_tty->print_cr(
 186       "Generation: promo attempt is%s safe: available("SIZE_FORMAT") %s max_promo("SIZE_FORMAT")",
 187       res? "":" not", available, res? ">=":"<",
 188       max_promotion_in_bytes);
 189   }
 190   return res;
 191 }
 192 
 193 // Ignores "ref" and calls allocate().
 194 oop Generation::promote(oop obj, size_t obj_size) {
 195   assert(obj_size == (size_t)obj->size(), "bad obj_size passed in");
 196 
 197 #ifndef PRODUCT
 198   if (Universe::heap()->promotion_should_fail()) {


 357     sp->adjust_pointers();
 358   }
 359 };
 360 
 361 void Generation::adjust_pointers() {
 362   // Note that this is done over all spaces, not just the compactible
 363   // ones.
 364   AdjustPointersClosure blk;
 365   space_iterate(&blk, true);
 366 }
 367 
 368 void Generation::compact() {
 369   CompactibleSpace* sp = first_compaction_space();
 370   while (sp != NULL) {
 371     sp->compact();
 372     sp = sp->next_compaction_space();
 373   }
 374 }
 375 
 376 CardGeneration::CardGeneration(ReservedSpace rs, size_t initial_byte_size,

 377                                GenRemSet* remset) :
 378   Generation(rs, initial_byte_size), _rs(remset),
 379   _shrink_factor(0), _min_heap_delta_bytes(), _capacity_at_prologue(),
 380   _used_at_prologue()
 381 {
 382   HeapWord* start = (HeapWord*)rs.base();
 383   size_t reserved_byte_size = rs.size();
 384   assert((uintptr_t(start) & 3) == 0, "bad alignment");
 385   assert((reserved_byte_size & 3) == 0, "bad alignment");
 386   MemRegion reserved_mr(start, heap_word_size(reserved_byte_size));
 387   _bts = new BlockOffsetSharedArray(reserved_mr,
 388                                     heap_word_size(initial_byte_size));
 389   MemRegion committed_mr(start, heap_word_size(initial_byte_size));
 390   _rs->resize_covered_region(committed_mr);
 391   if (_bts == NULL)
 392     vm_exit_during_initialization("Could not allocate a BlockOffsetArray");
 393 
 394   // Verify that the start and end of this generation is the start of a card.
 395   // If this wasn't true, a single card could span more than on generation,
 396   // which would cause problems when we commit/uncommit memory, and when we
 397   // clear and dirty cards.
 398   guarantee(_rs->is_aligned(reserved_mr.start()), "generation must be card aligned");


 614 
 615 
 616 void OneContigSpaceCardGeneration::collect(bool   full,
 617                                            bool   clear_all_soft_refs,
 618                                            size_t size,
 619                                            bool   is_tlab) {
 620   GenCollectedHeap* gch = GenCollectedHeap::heap();
 621 
 622   SpecializationStats::clear();
 623   // Temporarily expand the span of our ref processor, so
 624   // refs discovery is over the entire heap, not just this generation
 625   ReferenceProcessorSpanMutator
 626     x(ref_processor(), gch->reserved_region());
 627 
 628   STWGCTimer* gc_timer = GenMarkSweep::gc_timer();
 629   gc_timer->register_gc_start();
 630 
 631   SerialOldTracer* gc_tracer = GenMarkSweep::gc_tracer();
 632   gc_tracer->report_gc_start(gch->gc_cause(), gc_timer->gc_start());
 633 
 634   GenMarkSweep::invoke_at_safepoint(ref_processor(), clear_all_soft_refs);
 635 
 636   gc_timer->register_gc_end();
 637 
 638   gc_tracer->report_gc_end(gc_timer->gc_end(), gc_timer->time_partitions());
 639 
 640   SpecializationStats::print();
 641 }
 642 
 643 HeapWord*
 644 OneContigSpaceCardGeneration::expand_and_allocate(size_t word_size,
 645                                                   bool is_tlab,
 646                                                   bool parallel) {
 647   assert(!is_tlab, "OneContigSpaceCardGeneration does not support TLAB allocation");
 648   if (parallel) {
 649     MutexLocker x(ParGCRareEvent_lock);
 650     HeapWord* result = NULL;
 651     size_t byte_size = word_size * HeapWordSize;
 652     while (true) {
 653       expand(byte_size, _min_heap_delta_bytes);
 654       if (GCExpandToAllocateDelayMillis > 0) {


src/share/vm/memory/generation.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File