21 * questions.
22 *
23 */
24
25 #ifndef SHARE_GC_G1_HEAPREGION_HPP
26 #define SHARE_GC_G1_HEAPREGION_HPP
27
28 #include "gc/g1/g1BlockOffsetTable.hpp"
29 #include "gc/g1/g1HeapRegionTraceType.hpp"
30 #include "gc/g1/heapRegionTracer.hpp"
31 #include "gc/g1/heapRegionType.hpp"
32 #include "gc/g1/survRateGroup.hpp"
33 #include "gc/shared/ageTable.hpp"
34 #include "gc/shared/spaceDecorator.hpp"
35 #include "gc/shared/verifyOption.hpp"
36 #include "runtime/mutex.hpp"
37 #include "utilities/macros.hpp"
38
39 class G1CollectedHeap;
40 class G1CMBitMap;
41 class HeapRegionRemSet;
42 class HeapRegion;
43 class HeapRegionSetBase;
44 class nmethod;
45
46 #define HR_FORMAT "%u:(%s)[" PTR_FORMAT "," PTR_FORMAT "," PTR_FORMAT "]"
47 #define HR_FORMAT_PARAMS(_hr_) \
48 (_hr_)->hrm_index(), \
49 (_hr_)->get_short_type_str(), \
50 p2i((_hr_)->bottom()), p2i((_hr_)->top()), p2i((_hr_)->end())
51
52 // sentinel value for hrm_index
53 #define G1_NO_HRM_INDEX ((uint) -1)
54
55 // A HeapRegion is the smallest piece of a G1CollectedHeap that
56 // can be collected independently.
57
58 // Each heap region is self contained. top() and end() can never
59 // be set beyond the end of the region. For humongous objects,
60 // the first region is a StartsHumongous region. If the humongous
527 uint index_in_opt_cset() const {
528 assert(has_index_in_opt_cset(), "Opt cset index not set.");
529 return _index_in_opt_cset;
530 }
531 bool has_index_in_opt_cset() const { return _index_in_opt_cset != InvalidCSetIndex; }
532 void set_index_in_opt_cset(uint index) { _index_in_opt_cset = index; }
533 void clear_index_in_opt_cset() { _index_in_opt_cset = InvalidCSetIndex; }
534
535 void calc_gc_efficiency(void);
536 double gc_efficiency() const { return _gc_efficiency;}
537
538 uint young_index_in_cset() const { return _young_index_in_cset; }
539 void clear_young_index_in_cset() { _young_index_in_cset = 0; }
540 void set_young_index_in_cset(uint index) {
541 assert(index != UINT_MAX, "just checking");
542 assert(index != 0, "just checking");
543 assert(is_young(), "pre-condition");
544 _young_index_in_cset = index;
545 }
546
547 int age_in_surv_rate_group() {
548 assert(_surv_rate_group != NULL, "pre-condition");
549 assert(_age_index > -1, "pre-condition");
550 return _surv_rate_group->age_in_group(_age_index);
551 }
552
553 void record_surv_words_in_group(size_t words_survived) {
554 assert(_surv_rate_group != NULL, "pre-condition");
555 assert(_age_index > -1, "pre-condition");
556 int age_in_group = age_in_surv_rate_group();
557 _surv_rate_group->record_surviving_words(age_in_group, words_survived);
558 }
559
560 int age_in_surv_rate_group_cond() {
561 if (_surv_rate_group != NULL)
562 return age_in_surv_rate_group();
563 else
564 return -1;
565 }
566
567 SurvRateGroup* surv_rate_group() {
568 return _surv_rate_group;
569 }
570
571 void install_surv_rate_group(SurvRateGroup* surv_rate_group) {
572 assert(surv_rate_group != NULL, "pre-condition");
573 assert(_surv_rate_group == NULL, "pre-condition");
574 assert(is_young(), "pre-condition");
575
576 _surv_rate_group = surv_rate_group;
577 _age_index = surv_rate_group->next_age_index();
578 }
579
580 void uninstall_surv_rate_group() {
581 if (_surv_rate_group != NULL) {
582 assert(_age_index > -1, "pre-condition");
583 assert(is_young(), "pre-condition");
584
585 _surv_rate_group = NULL;
586 _age_index = -1;
587 } else {
588 assert(_age_index == -1, "pre-condition");
589 }
590 }
591
592 // Determine if an object has been allocated since the last
593 // mark performed by the collector. This returns true iff the object
594 // is within the unmarked area of the region.
595 bool obj_allocated_since_prev_marking(oop obj) const {
596 return (HeapWord *) obj >= prev_top_at_mark_start();
597 }
598 bool obj_allocated_since_next_marking(oop obj) const {
599 return (HeapWord *) obj >= next_top_at_mark_start();
600 }
601
602 // Iterate over the objects overlapping the given memory region, applying cl
603 // to all references in the region. This is a helper for
604 // G1RemSet::refine_card*, and is tightly coupled with them.
605 // mr must not be empty. Must be trimmed to the allocated/parseable space in this region.
606 // This region must be old or humongous.
607 // Returns the next unscanned address if the designated objects were successfully
608 // processed, NULL if an unparseable part of the heap was encountered (That should
609 // only happen when invoked concurrently with the mutator).
610 template <bool is_gc_active, class Closure>
|
21 * questions.
22 *
23 */
24
25 #ifndef SHARE_GC_G1_HEAPREGION_HPP
26 #define SHARE_GC_G1_HEAPREGION_HPP
27
28 #include "gc/g1/g1BlockOffsetTable.hpp"
29 #include "gc/g1/g1HeapRegionTraceType.hpp"
30 #include "gc/g1/heapRegionTracer.hpp"
31 #include "gc/g1/heapRegionType.hpp"
32 #include "gc/g1/survRateGroup.hpp"
33 #include "gc/shared/ageTable.hpp"
34 #include "gc/shared/spaceDecorator.hpp"
35 #include "gc/shared/verifyOption.hpp"
36 #include "runtime/mutex.hpp"
37 #include "utilities/macros.hpp"
38
39 class G1CollectedHeap;
40 class G1CMBitMap;
41 class G1Predictions;
42 class HeapRegionRemSet;
43 class HeapRegion;
44 class HeapRegionSetBase;
45 class nmethod;
46
47 #define HR_FORMAT "%u:(%s)[" PTR_FORMAT "," PTR_FORMAT "," PTR_FORMAT "]"
48 #define HR_FORMAT_PARAMS(_hr_) \
49 (_hr_)->hrm_index(), \
50 (_hr_)->get_short_type_str(), \
51 p2i((_hr_)->bottom()), p2i((_hr_)->top()), p2i((_hr_)->end())
52
53 // sentinel value for hrm_index
54 #define G1_NO_HRM_INDEX ((uint) -1)
55
56 // A HeapRegion is the smallest piece of a G1CollectedHeap that
57 // can be collected independently.
58
59 // Each heap region is self contained. top() and end() can never
60 // be set beyond the end of the region. For humongous objects,
61 // the first region is a StartsHumongous region. If the humongous
528 uint index_in_opt_cset() const {
529 assert(has_index_in_opt_cset(), "Opt cset index not set.");
530 return _index_in_opt_cset;
531 }
532 bool has_index_in_opt_cset() const { return _index_in_opt_cset != InvalidCSetIndex; }
533 void set_index_in_opt_cset(uint index) { _index_in_opt_cset = index; }
534 void clear_index_in_opt_cset() { _index_in_opt_cset = InvalidCSetIndex; }
535
536 void calc_gc_efficiency(void);
537 double gc_efficiency() const { return _gc_efficiency;}
538
539 uint young_index_in_cset() const { return _young_index_in_cset; }
540 void clear_young_index_in_cset() { _young_index_in_cset = 0; }
541 void set_young_index_in_cset(uint index) {
542 assert(index != UINT_MAX, "just checking");
543 assert(index != 0, "just checking");
544 assert(is_young(), "pre-condition");
545 _young_index_in_cset = index;
546 }
547
548 int age_in_surv_rate_group() const;
549 bool has_valid_age_in_surv_rate() const;
550
551 bool has_surv_rate_group() const;
552
553 double surv_rate_prediction(G1Predictions const& predictor) const;
554
555 void install_surv_rate_group(SurvRateGroup* surv_rate_group);
556 void uninstall_surv_rate_group();
557
558 void record_surv_words_in_group(size_t words_survived);
559
560 // Determine if an object has been allocated since the last
561 // mark performed by the collector. This returns true iff the object
562 // is within the unmarked area of the region.
563 bool obj_allocated_since_prev_marking(oop obj) const {
564 return (HeapWord *) obj >= prev_top_at_mark_start();
565 }
566 bool obj_allocated_since_next_marking(oop obj) const {
567 return (HeapWord *) obj >= next_top_at_mark_start();
568 }
569
570 // Iterate over the objects overlapping the given memory region, applying cl
571 // to all references in the region. This is a helper for
572 // G1RemSet::refine_card*, and is tightly coupled with them.
573 // mr must not be empty. Must be trimmed to the allocated/parseable space in this region.
574 // This region must be old or humongous.
575 // Returns the next unscanned address if the designated objects were successfully
576 // processed, NULL if an unparseable part of the heap was encountered (That should
577 // only happen when invoked concurrently with the mutator).
578 template <bool is_gc_active, class Closure>
|