Print this page
rev 3463 : 7114678: G1: various small fixes, code cleanup, and refactoring
Summary: Various cleanups as a prelude to introducing iterators for HeapRegions.
Reviewed-by: johnc
Contributed-by: tonyp

Split Close
Expand all
Collapse all
          --- old/src/share/vm/gc_implementation/g1/heapRegion.hpp
          +++ new/src/share/vm/gc_implementation/g1/heapRegion.hpp
↓ open down ↓ 47 lines elided ↑ open up ↑
  48   48  class CompactibleSpace;
  49   49  class ContiguousSpace;
  50   50  class HeapRegionRemSet;
  51   51  class HeapRegionRemSetIterator;
  52   52  class HeapRegion;
  53   53  class HeapRegionSetBase;
  54   54  
  55   55  #define HR_FORMAT "%u:(%s)["PTR_FORMAT","PTR_FORMAT","PTR_FORMAT"]"
  56   56  #define HR_FORMAT_PARAMS(_hr_) \
  57   57                  (_hr_)->hrs_index(), \
  58      -                (_hr_)->is_survivor() ? "S" : (_hr_)->is_young() ? "E" : "-", \
       58 +                (_hr_)->is_survivor() ? "S" : (_hr_)->is_young() ? "E" : \
       59 +                (_hr_)->startsHumongous() ? "HS" : \
       60 +                (_hr_)->continuesHumongous() ? "HC" : \
       61 +                !(_hr_)->is_empty() ? "O" : "F", \
  59   62                  (_hr_)->bottom(), (_hr_)->top(), (_hr_)->end()
  60   63  
  61   64  // sentinel value for hrs_index
  62   65  #define G1_NULL_HRS_INDEX ((uint) -1)
  63   66  
  64   67  // A dirty card to oop closure for heap regions. It
  65   68  // knows how to get the G1 heap and how to use the bitmap
  66   69  // in the concurrent marker used by G1 to filter remembered
  67   70  // sets.
  68   71  
↓ open down ↓ 97 lines elided ↑ open up ↑
 166  169    // assumed to contain zeros.
 167  170    G1OffsetTableContigSpace(G1BlockOffsetSharedArray* sharedOffsetArray,
 168  171                             MemRegion mr, bool is_zeroed = false);
 169  172  
 170  173    void set_bottom(HeapWord* value);
 171  174    void set_end(HeapWord* value);
 172  175  
 173  176    virtual HeapWord* saved_mark_word() const;
 174  177    virtual void set_saved_mark();
 175  178    void reset_gc_time_stamp() { _gc_time_stamp = 0; }
      179 +  unsigned get_gc_time_stamp() { return _gc_time_stamp; }
 176  180  
 177  181    // See the comment above in the declaration of _pre_dummy_top for an
 178  182    // explanation of what it is.
 179  183    void set_pre_dummy_top(HeapWord* pre_dummy_top) {
 180  184      assert(is_in(pre_dummy_top) && pre_dummy_top <= top(), "pre-condition");
 181  185      _pre_dummy_top = pre_dummy_top;
 182  186    }
 183  187    HeapWord* pre_dummy_top() {
 184  188      return (_pre_dummy_top == NULL) ? top() : _pre_dummy_top;
 185  189    }
↓ open down ↓ 246 lines elided ↑ open up ↑
 432  436    }
 433  437  
 434  438    bool isHumongous() const { return _humongous_type != NotHumongous; }
 435  439    bool startsHumongous() const { return _humongous_type == StartsHumongous; }
 436  440    bool continuesHumongous() const { return _humongous_type == ContinuesHumongous; }
 437  441    // For a humongous region, region in which it starts.
 438  442    HeapRegion* humongous_start_region() const {
 439  443      return _humongous_start_region;
 440  444    }
 441  445  
      446 +  // Return the number of distinct regions that are covered by this region:
      447 +  // 1 if the region is not humongous, >= 1 if the region is humongous.
      448 +  uint region_num() const {
      449 +    if (!isHumongous()) {
      450 +      return 1U;
      451 +    } else {
      452 +      assert(startsHumongous(), "doesn't make sense on HC regions");
      453 +      assert(capacity() % HeapRegion::GrainBytes == 0, "sanity");
      454 +      return (uint) (capacity() >> HeapRegion::LogOfHRGrainBytes);
      455 +    }
      456 +  }
      457 +
      458 +  // Return the index + 1 of the last HC regions that's associated
      459 +  // with this HS region.
      460 +  uint last_hc_index() const {
      461 +    assert(startsHumongous(), "don't call this otherwise");
      462 +    return hrs_index() + region_num();
      463 +  }
      464 +
 442  465    // Same as Space::is_in_reserved, but will use the original size of the region.
 443  466    // The original size is different only for start humongous regions. They get
 444  467    // their _end set up to be the end of the last continues region of the
 445  468    // corresponding humongous object.
 446  469    bool is_in_reserved_raw(const void* p) const {
 447  470      return _bottom <= p && p < _orig_end;
 448  471    }
 449  472  
 450  473    // Makes the current region be a "starts humongous" region, i.e.,
 451  474    // the first region in a series of one or more contiguous regions
↓ open down ↓ 163 lines elided ↑ open up ↑
 615  638    // objects during evac failure handling.
 616  639    void note_self_forwarding_removal_end(bool during_initial_mark,
 617  640                                          bool during_conc_mark,
 618  641                                          size_t marked_bytes);
 619  642  
 620  643    // Returns "false" iff no object in the region was allocated when the
 621  644    // last mark phase ended.
 622  645    bool is_marked() { return _prev_top_at_mark_start != bottom(); }
 623  646  
 624  647    void reset_during_compaction() {
 625      -    guarantee( isHumongous() && startsHumongous(),
 626      -               "should only be called for humongous regions");
      648 +    assert(isHumongous() && startsHumongous(),
      649 +           "should only be called for starts humongous regions");
 627  650  
 628  651      zero_marked_bytes();
 629  652      init_top_at_mark_start();
 630  653    }
 631  654  
 632  655    void calc_gc_efficiency(void);
 633  656    double gc_efficiency() { return _gc_efficiency;}
 634  657  
 635  658    bool is_young() const     { return _young_type != NotYoung; }
 636  659    bool is_survivor() const  { return _young_type == Survivor; }
↓ open down ↓ 130 lines elided ↑ open up ↑
 767  790    }
 768  791  
 769  792    void set_predicted_bytes_to_copy(size_t bytes) {
 770  793      _predicted_bytes_to_copy = bytes;
 771  794    }
 772  795  
 773  796  #define HeapRegion_OOP_SINCE_SAVE_MARKS_DECL(OopClosureType, nv_suffix)  \
 774  797    virtual void oop_since_save_marks_iterate##nv_suffix(OopClosureType* cl);
 775  798    SPECIALIZED_SINCE_SAVE_MARKS_CLOSURES(HeapRegion_OOP_SINCE_SAVE_MARKS_DECL)
 776  799  
 777      -  CompactibleSpace* next_compaction_space() const;
      800 +  virtual CompactibleSpace* next_compaction_space() const;
 778  801  
 779  802    virtual void reset_after_compaction();
 780  803  
 781  804    void print() const;
 782  805    void print_on(outputStream* st) const;
 783  806  
 784  807    // vo == UsePrevMarking  -> use "prev" marking information,
 785  808    // vo == UseNextMarking -> use "next" marking information
 786  809    // vo == UseMarkWord    -> use the mark word in the object header
 787  810    //
↓ open down ↓ 38 lines elided ↑ open up ↑
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX