41 // ReferenceProcessor class abstracts away from a generational setting
42 // by using only a heap interval (called "span" below), thus allowing
43 // its use in a straightforward manner in a general, non-generational
44 // setting.
45 //
46 // The basic idea is that each ReferenceProcessor object concerns
47 // itself with ("weak") reference processing in a specific "span"
48 // of the heap of interest to a specific collector. Currently,
49 // the span is a convex interval of the heap, but, efficiency
50 // apart, there seems to be no reason it couldn't be extended
51 // (with appropriate modifications) to any "non-convex interval".
52
53 // forward references
54 class ReferencePolicy;
55 class AbstractRefProcTaskExecutor;
56
57 // List of discovered references.
58 class DiscoveredList {
59 public:
60 DiscoveredList() : _len(0), _compressed_head(0), _oop_head(NULL) { }
61 oop head() const {
62 return UseCompressedOops ? oopDesc::decode_heap_oop(_compressed_head) :
63 _oop_head;
64 }
65 HeapWord* adr_head() {
66 return UseCompressedOops ? (HeapWord*)&_compressed_head :
67 (HeapWord*)&_oop_head;
68 }
69 void set_head(oop o) {
70 if (UseCompressedOops) {
71 // Must compress the head ptr.
72 _compressed_head = oopDesc::encode_heap_oop(o);
73 } else {
74 _oop_head = o;
75 }
76 }
77 bool is_empty() const { return head() == NULL; }
78 size_t length() { return _len; }
79 void set_length(size_t len) { _len = len; }
80 void inc_length(size_t inc) { _len += inc; assert(_len > 0, "Error"); }
81 void dec_length(size_t dec) { _len -= dec; }
82 private:
83 // Set value depending on UseCompressedOops. This could be a template class
84 // but then we have to fix all the instantiations and declarations that use this class.
85 oop _oop_head;
86 narrowOop _compressed_head;
87 size_t _len;
88 };
89
90 // Iterator for the list of discovered references.
91 class DiscoveredListIterator {
92 private:
93 DiscoveredList& _refs_list;
94 HeapWord* _prev_next;
95 oop _prev;
96 oop _ref;
97 HeapWord* _discovered_addr;
98 oop _next;
99 HeapWord* _referent_addr;
100 oop _referent;
101 OopClosure* _keep_alive;
102 BoolObjectClosure* _is_alive;
103
104 DEBUG_ONLY(
105 oop _first_seen; // cyclic linked list check
106 )
107
108 NOT_PRODUCT(
109 size_t _processed;
110 size_t _removed;
111 )
112
113 public:
114 inline DiscoveredListIterator(DiscoveredList& refs_list,
115 OopClosure* keep_alive,
116 BoolObjectClosure* is_alive):
117 _refs_list(refs_list),
118 _prev_next(refs_list.adr_head()),
119 _prev(NULL),
120 _ref(refs_list.head()),
121 #ifdef ASSERT
122 _first_seen(refs_list.head()),
123 #endif
124 #ifndef PRODUCT
125 _processed(0),
126 _removed(0),
127 #endif
128 _next(NULL),
129 _keep_alive(keep_alive),
130 _is_alive(is_alive)
131 { }
132
133 // End Of List.
134 inline bool has_next() const { return _ref != NULL; }
135
136 // Get oop to the Reference object.
137 inline oop obj() const { return _ref; }
138
139 // Get oop to the referent object.
140 inline oop referent() const { return _referent; }
141
142 // Returns true if referent is alive.
143 inline bool is_referent_alive() const {
144 return _is_alive->do_object_b(_referent);
145 }
146
147 // Loads data for the current reference.
148 // The "allow_null_referent" argument tells us to allow for the possibility
149 // of a NULL referent in the discovered Reference object. This typically
150 // happens in the case of concurrent collectors that may have done the
151 // discovery concurrently, or interleaved, with mutator execution.
|
41 // ReferenceProcessor class abstracts away from a generational setting
42 // by using only a heap interval (called "span" below), thus allowing
43 // its use in a straightforward manner in a general, non-generational
44 // setting.
45 //
46 // The basic idea is that each ReferenceProcessor object concerns
47 // itself with ("weak") reference processing in a specific "span"
48 // of the heap of interest to a specific collector. Currently,
49 // the span is a convex interval of the heap, but, efficiency
50 // apart, there seems to be no reason it couldn't be extended
51 // (with appropriate modifications) to any "non-convex interval".
52
53 // forward references
54 class ReferencePolicy;
55 class AbstractRefProcTaskExecutor;
56
57 // List of discovered references.
58 class DiscoveredList {
59 public:
60 DiscoveredList() : _len(0), _compressed_head(0), _oop_head(NULL) { }
61 inline oop head() const;
62 HeapWord* adr_head() {
63 return UseCompressedOops ? (HeapWord*)&_compressed_head :
64 (HeapWord*)&_oop_head;
65 }
66 inline void set_head(oop o);
67 inline bool is_empty() const;
68 size_t length() { return _len; }
69 void set_length(size_t len) { _len = len; }
70 void inc_length(size_t inc) { _len += inc; assert(_len > 0, "Error"); }
71 void dec_length(size_t dec) { _len -= dec; }
72 private:
73 // Set value depending on UseCompressedOops. This could be a template class
74 // but then we have to fix all the instantiations and declarations that use this class.
75 oop _oop_head;
76 narrowOop _compressed_head;
77 size_t _len;
78 };
79
80 // Iterator for the list of discovered references.
81 class DiscoveredListIterator {
82 private:
83 DiscoveredList& _refs_list;
84 HeapWord* _prev_next;
85 oop _prev;
86 oop _ref;
87 HeapWord* _discovered_addr;
88 oop _next;
89 HeapWord* _referent_addr;
90 oop _referent;
91 OopClosure* _keep_alive;
92 BoolObjectClosure* _is_alive;
93
94 DEBUG_ONLY(
95 oop _first_seen; // cyclic linked list check
96 )
97
98 NOT_PRODUCT(
99 size_t _processed;
100 size_t _removed;
101 )
102
103 public:
104 inline DiscoveredListIterator(DiscoveredList& refs_list,
105 OopClosure* keep_alive,
106 BoolObjectClosure* is_alive);
107
108 // End Of List.
109 inline bool has_next() const { return _ref != NULL; }
110
111 // Get oop to the Reference object.
112 inline oop obj() const { return _ref; }
113
114 // Get oop to the referent object.
115 inline oop referent() const { return _referent; }
116
117 // Returns true if referent is alive.
118 inline bool is_referent_alive() const {
119 return _is_alive->do_object_b(_referent);
120 }
121
122 // Loads data for the current reference.
123 // The "allow_null_referent" argument tells us to allow for the possibility
124 // of a NULL referent in the discovered Reference object. This typically
125 // happens in the case of concurrent collectors that may have done the
126 // discovery concurrently, or interleaved, with mutator execution.
|