< prev index next >

src/hotspot/share/gc/g1/heapRegion.hpp

Print this page
rev 56449 : imported patch 8220310.mut.1


 274 
 275   void init_top_at_mark_start() {
 276     assert(_prev_marked_bytes == 0 &&
 277            _next_marked_bytes == 0,
 278            "Must be called after zero_marked_bytes.");
 279     HeapWord* bot = bottom();
 280     _prev_top_at_mark_start = bot;
 281     _next_top_at_mark_start = bot;
 282   }
 283 
 284   // Cached attributes used in the collection set policy information
 285 
 286   // The RSet length that was added to the total value
 287   // for the collection set.
 288   size_t _recorded_rs_length;
 289 
 290   // The predicted elapsed time that was added to total value
 291   // for the collection set.
 292   double _predicted_elapsed_time_ms;
 293 


 294   // Iterate over the references covered by the given MemRegion in a humongous
 295   // object and apply the given closure to them.
 296   // Humongous objects are allocated directly in the old-gen. So we need special
 297   // handling for concurrent processing encountering an in-progress allocation.
 298   // Returns the address after the last actually scanned or NULL if the area could
 299   // not be scanned (That should only happen when invoked concurrently with the
 300   // mutator).
 301   template <class Closure, bool is_gc_active>
 302   inline HeapWord* do_oops_on_memregion_in_humongous(MemRegion mr,
 303                                                      Closure* cl,
 304                                                      G1CollectedHeap* g1h);
 305 
 306   // Returns the block size of the given (dead, potentially having its class unloaded) object
 307   // starting at p extending to at most the prev TAMS using the given mark bitmap.
 308   inline size_t block_size_using_bitmap(const HeapWord* p, const G1CMBitMap* const prev_bitmap) const;
 309  public:
 310   HeapRegion(uint hrm_index,
 311              G1BlockOffsetTable* bot,
 312              MemRegion mr);
 313 


 667   size_t recorded_rs_length() const        { return _recorded_rs_length; }
 668   double predicted_elapsed_time_ms() const { return _predicted_elapsed_time_ms; }
 669 
 670   void set_recorded_rs_length(size_t rs_length) {
 671     _recorded_rs_length = rs_length;
 672   }
 673 
 674   void set_predicted_elapsed_time_ms(double ms) {
 675     _predicted_elapsed_time_ms = ms;
 676   }
 677 
 678   // Routines for managing a list of code roots (attached to the
 679   // this region's RSet) that point into this heap region.
 680   void add_strong_code_root(nmethod* nm);
 681   void add_strong_code_root_locked(nmethod* nm);
 682   void remove_strong_code_root(nmethod* nm);
 683 
 684   // Applies blk->do_code_blob() to each of the entries in
 685   // the strong code roots list for this region
 686   void strong_code_roots_do(CodeBlobClosure* blk) const;



 687 
 688   // Verify that the entries on the strong code root list for this
 689   // region are live and include at least one pointer into this region.
 690   void verify_strong_code_roots(VerifyOption vo, bool* failures) const;
 691 
 692   void print() const;
 693   void print_on(outputStream* st) const;
 694 
 695   // vo == UsePrevMarking -> use "prev" marking information,
 696   // vo == UseNextMarking -> use "next" marking information
 697   // vo == UseFullMarking -> use "next" marking bitmap but no TAMS
 698   //
 699   // NOTE: Only the "prev" marking information is guaranteed to be
 700   // consistent most of the time, so most calls to this should use
 701   // vo == UsePrevMarking.
 702   // Currently, there is only one case where this is called with
 703   // vo == UseNextMarking, which is to verify the "next" marking
 704   // information at the end of remark.
 705   // Currently there is only one place where this is called with
 706   // vo == UseFullMarking, which is to verify the marking during a




 274 
 275   void init_top_at_mark_start() {
 276     assert(_prev_marked_bytes == 0 &&
 277            _next_marked_bytes == 0,
 278            "Must be called after zero_marked_bytes.");
 279     HeapWord* bot = bottom();
 280     _prev_top_at_mark_start = bot;
 281     _next_top_at_mark_start = bot;
 282   }
 283 
 284   // Cached attributes used in the collection set policy information
 285 
 286   // The RSet length that was added to the total value
 287   // for the collection set.
 288   size_t _recorded_rs_length;
 289 
 290   // The predicted elapsed time that was added to total value
 291   // for the collection set.
 292   double _predicted_elapsed_time_ms;
 293 
 294   uint _node_index;
 295 
 296   // Iterate over the references covered by the given MemRegion in a humongous
 297   // object and apply the given closure to them.
 298   // Humongous objects are allocated directly in the old-gen. So we need special
 299   // handling for concurrent processing encountering an in-progress allocation.
 300   // Returns the address after the last actually scanned or NULL if the area could
 301   // not be scanned (That should only happen when invoked concurrently with the
 302   // mutator).
 303   template <class Closure, bool is_gc_active>
 304   inline HeapWord* do_oops_on_memregion_in_humongous(MemRegion mr,
 305                                                      Closure* cl,
 306                                                      G1CollectedHeap* g1h);
 307 
 308   // Returns the block size of the given (dead, potentially having its class unloaded) object
 309   // starting at p extending to at most the prev TAMS using the given mark bitmap.
 310   inline size_t block_size_using_bitmap(const HeapWord* p, const G1CMBitMap* const prev_bitmap) const;
 311  public:
 312   HeapRegion(uint hrm_index,
 313              G1BlockOffsetTable* bot,
 314              MemRegion mr);
 315 


 669   size_t recorded_rs_length() const        { return _recorded_rs_length; }
 670   double predicted_elapsed_time_ms() const { return _predicted_elapsed_time_ms; }
 671 
 672   void set_recorded_rs_length(size_t rs_length) {
 673     _recorded_rs_length = rs_length;
 674   }
 675 
 676   void set_predicted_elapsed_time_ms(double ms) {
 677     _predicted_elapsed_time_ms = ms;
 678   }
 679 
 680   // Routines for managing a list of code roots (attached to the
 681   // this region's RSet) that point into this heap region.
 682   void add_strong_code_root(nmethod* nm);
 683   void add_strong_code_root_locked(nmethod* nm);
 684   void remove_strong_code_root(nmethod* nm);
 685 
 686   // Applies blk->do_code_blob() to each of the entries in
 687   // the strong code roots list for this region
 688   void strong_code_roots_do(CodeBlobClosure* blk) const;
 689 
 690   uint node_index() const { return _node_index; }
 691   void set_node_index(uint node_index) { _node_index = node_index; }
 692 
 693   // Verify that the entries on the strong code root list for this
 694   // region are live and include at least one pointer into this region.
 695   void verify_strong_code_roots(VerifyOption vo, bool* failures) const;
 696 
 697   void print() const;
 698   void print_on(outputStream* st) const;
 699 
 700   // vo == UsePrevMarking -> use "prev" marking information,
 701   // vo == UseNextMarking -> use "next" marking information
 702   // vo == UseFullMarking -> use "next" marking bitmap but no TAMS
 703   //
 704   // NOTE: Only the "prev" marking information is guaranteed to be
 705   // consistent most of the time, so most calls to this should use
 706   // vo == UsePrevMarking.
 707   // Currently, there is only one case where this is called with
 708   // vo == UseNextMarking, which is to verify the "next" marking
 709   // information at the end of remark.
 710   // Currently there is only one place where this is called with
 711   // vo == UseFullMarking, which is to verify the marking during a


< prev index next >