< prev index next >

src/share/vm/gc/g1/heapRegion.hpp

Print this page




  26 #define SHARE_VM_GC_G1_HEAPREGION_HPP
  27 
  28 #include "gc/g1/g1AllocationContext.hpp"
  29 #include "gc/g1/g1BlockOffsetTable.hpp"
  30 #include "gc/g1/heapRegionType.hpp"
  31 #include "gc/g1/survRateGroup.hpp"
  32 #include "gc/shared/ageTable.hpp"
  33 #include "gc/shared/spaceDecorator.hpp"
  34 #include "utilities/macros.hpp"
  35 
  36 // A HeapRegion is the smallest piece of a G1CollectedHeap that
  37 // can be collected independently.
  38 
  39 // NOTE: Although a HeapRegion is a Space, its
  40 // Space::initDirtyCardClosure method must not be called.
  41 // The problem is that the existence of this method breaks
  42 // the independence of barrier sets from remembered sets.
  43 // The solution is to remove this method from the definition
  44 // of a Space.
  45 









  46 class G1CollectedHeap;
  47 class HeapRegionRemSet;
  48 class HeapRegionRemSetIterator;
  49 class HeapRegion;
  50 class HeapRegionSetBase;
  51 class nmethod;
  52 
  53 #define HR_FORMAT "%u:(%s)[" PTR_FORMAT "," PTR_FORMAT "," PTR_FORMAT "]"
  54 #define HR_FORMAT_PARAMS(_hr_) \
  55                 (_hr_)->hrm_index(), \
  56                 (_hr_)->get_short_type_str(), \
  57                 p2i((_hr_)->bottom()), p2i((_hr_)->top()), p2i((_hr_)->end())
  58 
  59 // sentinel value for hrm_index
  60 #define G1_NO_HRM_INDEX ((uint) -1)
  61 
  62 // A dirty card to oop closure for heap regions. It
  63 // knows how to get the G1 heap and how to use the bitmap
  64 // in the concurrent marker used by G1 to filter remembered
  65 // sets.


 547   // Notify the region that it ceases being to-space during a GC and
 548   // we will not copy objects into it any more.
 549   inline void note_end_of_copying(bool during_initial_mark);
 550 
 551   // Notify the region that we are about to start processing
 552   // self-forwarded objects during evac failure handling.
 553   void note_self_forwarding_removal_start(bool during_initial_mark,
 554                                           bool during_conc_mark);
 555 
 556   // Notify the region that we have finished processing self-forwarded
 557   // objects during evac failure handling.
 558   void note_self_forwarding_removal_end(bool during_initial_mark,
 559                                         bool during_conc_mark,
 560                                         size_t marked_bytes);
 561 
 562   // Returns "false" iff no object in the region was allocated when the
 563   // last mark phase ended.
 564   bool is_marked() { return _prev_top_at_mark_start != bottom(); }
 565 
 566   void reset_during_compaction() {



 567     zero_marked_bytes();
 568     init_top_at_mark_start();
 569   }
 570 
 571   void calc_gc_efficiency(void);
 572   double gc_efficiency() { return _gc_efficiency;}
 573 
 574   int  young_index_in_cset() const { return _young_index_in_cset; }
 575   void set_young_index_in_cset(int index) {
 576     assert( (index == -1) || is_young(), "pre-condition" );
 577     _young_index_in_cset = index;
 578   }
 579 
 580   int age_in_surv_rate_group() {
 581     assert( _surv_rate_group != NULL, "pre-condition" );
 582     assert( _age_index > -1, "pre-condition" );
 583     return _surv_rate_group->age_in_group(_age_index);
 584   }
 585 
 586   void record_surv_words_in_group(size_t words_survived) {




  26 #define SHARE_VM_GC_G1_HEAPREGION_HPP
  27 
  28 #include "gc/g1/g1AllocationContext.hpp"
  29 #include "gc/g1/g1BlockOffsetTable.hpp"
  30 #include "gc/g1/heapRegionType.hpp"
  31 #include "gc/g1/survRateGroup.hpp"
  32 #include "gc/shared/ageTable.hpp"
  33 #include "gc/shared/spaceDecorator.hpp"
  34 #include "utilities/macros.hpp"
  35 
  36 // A HeapRegion is the smallest piece of a G1CollectedHeap that
  37 // can be collected independently.
  38 
  39 // NOTE: Although a HeapRegion is a Space, its
  40 // Space::initDirtyCardClosure method must not be called.
  41 // The problem is that the existence of this method breaks
  42 // the independence of barrier sets from remembered sets.
  43 // The solution is to remove this method from the definition
  44 // of a Space.
  45 
  46 // Each heap region is self contained. top() and end() can never
  47 // be set beyond the end of the region. For humongous objects,
  48 // the first region is a StartsHumongous region. If the humongous
  49 // object is larger than a heap region, the following regions will
  50 // be of type ContinuesHumongous. In this case the top() and end()
  51 // of the StartHumongous region will point to the end of that region.
  52 // The same will be true for all ContinuesHumongous regions except
  53 // the last, which will have its' top() at the objects' top.
  54 
  55 class G1CollectedHeap;
  56 class HeapRegionRemSet;
  57 class HeapRegionRemSetIterator;
  58 class HeapRegion;
  59 class HeapRegionSetBase;
  60 class nmethod;
  61 
  62 #define HR_FORMAT "%u:(%s)[" PTR_FORMAT "," PTR_FORMAT "," PTR_FORMAT "]"
  63 #define HR_FORMAT_PARAMS(_hr_) \
  64                 (_hr_)->hrm_index(), \
  65                 (_hr_)->get_short_type_str(), \
  66                 p2i((_hr_)->bottom()), p2i((_hr_)->top()), p2i((_hr_)->end())
  67 
  68 // sentinel value for hrm_index
  69 #define G1_NO_HRM_INDEX ((uint) -1)
  70 
  71 // A dirty card to oop closure for heap regions. It
  72 // knows how to get the G1 heap and how to use the bitmap
  73 // in the concurrent marker used by G1 to filter remembered
  74 // sets.


 556   // Notify the region that it ceases being to-space during a GC and
 557   // we will not copy objects into it any more.
 558   inline void note_end_of_copying(bool during_initial_mark);
 559 
 560   // Notify the region that we are about to start processing
 561   // self-forwarded objects during evac failure handling.
 562   void note_self_forwarding_removal_start(bool during_initial_mark,
 563                                           bool during_conc_mark);
 564 
 565   // Notify the region that we have finished processing self-forwarded
 566   // objects during evac failure handling.
 567   void note_self_forwarding_removal_end(bool during_initial_mark,
 568                                         bool during_conc_mark,
 569                                         size_t marked_bytes);
 570 
 571   // Returns "false" iff no object in the region was allocated when the
 572   // last mark phase ended.
 573   bool is_marked() { return _prev_top_at_mark_start != bottom(); }
 574 
 575   void reset_during_compaction() {
 576     assert(is_humongous(),
 577            "should only be called for humongous regions");
 578 
 579     zero_marked_bytes();
 580     init_top_at_mark_start();
 581   }
 582 
 583   void calc_gc_efficiency(void);
 584   double gc_efficiency() { return _gc_efficiency;}
 585 
 586   int  young_index_in_cset() const { return _young_index_in_cset; }
 587   void set_young_index_in_cset(int index) {
 588     assert( (index == -1) || is_young(), "pre-condition" );
 589     _young_index_in_cset = index;
 590   }
 591 
 592   int age_in_surv_rate_group() {
 593     assert( _surv_rate_group != NULL, "pre-condition" );
 594     assert( _age_index > -1, "pre-condition" );
 595     return _surv_rate_group->age_in_group(_age_index);
 596   }
 597 
 598   void record_surv_words_in_group(size_t words_survived) {


< prev index next >