Print this page
rev 2691 : [mq]: g1-reference-processing
Split |
Close |
Expand all |
Collapse all |
--- old/src/share/vm/memory/referenceProcessor.cpp
+++ new/src/share/vm/memory/referenceProcessor.cpp
1 1 /*
2 2 * Copyright (c) 2001, 2011, Oracle and/or its affiliates. All rights reserved.
3 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 4 *
5 5 * This code is free software; you can redistribute it and/or modify it
6 6 * under the terms of the GNU General Public License version 2 only, as
7 7 * published by the Free Software Foundation.
8 8 *
9 9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 12 * version 2 for more details (a copy is included in the LICENSE file that
13 13 * accompanied this code).
14 14 *
15 15 * You should have received a copy of the GNU General Public License version
16 16 * 2 along with this work; if not, write to the Free Software Foundation,
17 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 18 *
19 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 20 * or visit www.oracle.com if you need additional information or have any
21 21 * questions.
22 22 *
23 23 */
24 24
25 25 #include "precompiled.hpp"
26 26 #include "classfile/javaClasses.hpp"
27 27 #include "classfile/systemDictionary.hpp"
↓ open down ↓ |
27 lines elided |
↑ open up ↑ |
28 28 #include "gc_interface/collectedHeap.hpp"
29 29 #include "gc_interface/collectedHeap.inline.hpp"
30 30 #include "memory/referencePolicy.hpp"
31 31 #include "memory/referenceProcessor.hpp"
32 32 #include "oops/oop.inline.hpp"
33 33 #include "runtime/java.hpp"
34 34 #include "runtime/jniHandles.hpp"
35 35
36 36 ReferencePolicy* ReferenceProcessor::_always_clear_soft_ref_policy = NULL;
37 37 ReferencePolicy* ReferenceProcessor::_default_soft_ref_policy = NULL;
38 -const int subclasses_of_ref = REF_PHANTOM - REF_OTHER;
39 38 bool ReferenceProcessor::_pending_list_uses_discovered_field = false;
40 39
41 -// List of discovered references.
42 -class DiscoveredList {
43 -public:
44 - DiscoveredList() : _len(0), _compressed_head(0), _oop_head(NULL) { }
45 - oop head() const {
46 - return UseCompressedOops ? oopDesc::decode_heap_oop(_compressed_head) :
47 - _oop_head;
48 - }
49 - HeapWord* adr_head() {
50 - return UseCompressedOops ? (HeapWord*)&_compressed_head :
51 - (HeapWord*)&_oop_head;
52 - }
53 - void set_head(oop o) {
54 - if (UseCompressedOops) {
55 - // Must compress the head ptr.
56 - _compressed_head = oopDesc::encode_heap_oop(o);
57 - } else {
58 - _oop_head = o;
59 - }
60 - }
61 - bool empty() const { return head() == NULL; }
62 - size_t length() { return _len; }
63 - void set_length(size_t len) { _len = len; }
64 - void inc_length(size_t inc) { _len += inc; assert(_len > 0, "Error"); }
65 - void dec_length(size_t dec) { _len -= dec; }
66 -private:
67 - // Set value depending on UseCompressedOops. This could be a template class
68 - // but then we have to fix all the instantiations and declarations that use this class.
69 - oop _oop_head;
70 - narrowOop _compressed_head;
71 - size_t _len;
72 -};
73 -
74 40 void referenceProcessor_init() {
75 41 ReferenceProcessor::init_statics();
76 42 }
77 43
78 44 void ReferenceProcessor::init_statics() {
79 45 // Initialize the master soft ref clock.
80 46 java_lang_ref_SoftReference::set_clock(os::javaTimeMillis());
81 47
82 48 _always_clear_soft_ref_policy = new AlwaysClearPolicy();
83 49 _default_soft_ref_policy = new COMPILER2_PRESENT(LRUMaxHeapPolicy())
84 50 NOT_COMPILER2(LRUCurrentHeapPolicy());
85 51 if (_always_clear_soft_ref_policy == NULL || _default_soft_ref_policy == NULL) {
86 52 vm_exit_during_initialization("Could not allocate reference policy object");
87 53 }
88 54 guarantee(RefDiscoveryPolicy == ReferenceBasedDiscovery ||
89 55 RefDiscoveryPolicy == ReferentBasedDiscovery,
90 56 "Unrecongnized RefDiscoveryPolicy");
91 57 _pending_list_uses_discovered_field = JDK_Version::current().pending_list_uses_discovered_field();
92 58 }
93 59
94 60 ReferenceProcessor::ReferenceProcessor(MemRegion span,
95 61 bool mt_processing,
96 62 int mt_processing_degree,
97 63 bool mt_discovery,
98 64 int mt_discovery_degree,
99 65 bool atomic_discovery,
100 66 BoolObjectClosure* is_alive_non_header,
101 67 bool discovered_list_needs_barrier) :
102 68 _discovering_refs(false),
103 69 _enqueuing_is_done(false),
104 70 _is_alive_non_header(is_alive_non_header),
↓ open down ↓ |
21 lines elided |
↑ open up ↑ |
105 71 _discovered_list_needs_barrier(discovered_list_needs_barrier),
106 72 _bs(NULL),
107 73 _processing_is_mt(mt_processing),
108 74 _next_id(0)
109 75 {
110 76 _span = span;
111 77 _discovery_is_atomic = atomic_discovery;
112 78 _discovery_is_mt = mt_discovery;
113 79 _num_q = MAX2(1, mt_processing_degree);
114 80 _max_num_q = MAX2(_num_q, mt_discovery_degree);
115 - _discoveredSoftRefs = NEW_C_HEAP_ARRAY(DiscoveredList, _max_num_q * subclasses_of_ref);
81 + _discoveredSoftRefs = NEW_C_HEAP_ARRAY(DiscoveredList,
82 + _max_num_q * number_of_subclasses_of_ref());
116 83 if (_discoveredSoftRefs == NULL) {
117 84 vm_exit_during_initialization("Could not allocated RefProc Array");
118 85 }
119 86 _discoveredWeakRefs = &_discoveredSoftRefs[_max_num_q];
120 87 _discoveredFinalRefs = &_discoveredWeakRefs[_max_num_q];
121 88 _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_q];
122 89 // Initialized all entries to NULL
123 - for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
90 + for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
124 91 _discoveredSoftRefs[i].set_head(NULL);
125 92 _discoveredSoftRefs[i].set_length(0);
126 93 }
127 94 // If we do barriers, cache a copy of the barrier set.
128 95 if (discovered_list_needs_barrier) {
129 96 _bs = Universe::heap()->barrier_set();
130 97 }
131 98 setup_policy(false /* default soft ref policy */);
132 99 }
133 100
134 101 #ifndef PRODUCT
135 102 void ReferenceProcessor::verify_no_references_recorded() {
136 103 guarantee(!_discovering_refs, "Discovering refs?");
137 - for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
138 - guarantee(_discoveredSoftRefs[i].empty(),
104 + for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
105 + guarantee(_discoveredSoftRefs[i].is_empty(),
139 106 "Found non-empty discovered list");
140 107 }
141 108 }
142 109 #endif
143 110
144 111 void ReferenceProcessor::weak_oops_do(OopClosure* f) {
145 - // Should this instead be
146 - // for (int i = 0; i < subclasses_of_ref; i++_ {
112 + // An alternative implementation of this routine
113 + // could use the following nested loop:
114 + //
115 + // for (int i = 0; i < number_of_subclasses_of_ref(); i++_ {
147 116 // for (int j = 0; j < _num_q; j++) {
148 117 // int index = i * _max_num_q + j;
149 - for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
118 +
119 + for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
150 120 if (UseCompressedOops) {
151 121 f->do_oop((narrowOop*)_discoveredSoftRefs[i].adr_head());
152 122 } else {
153 123 f->do_oop((oop*)_discoveredSoftRefs[i].adr_head());
154 124 }
155 125 }
156 126 }
157 127
158 128 void ReferenceProcessor::update_soft_ref_master_clock() {
159 129 // Update (advance) the soft ref master clock field. This must be done
160 130 // after processing the soft ref list.
161 131 jlong now = os::javaTimeMillis();
162 132 jlong clock = java_lang_ref_SoftReference::clock();
163 133 NOT_PRODUCT(
164 134 if (now < clock) {
165 135 warning("time warp: %d to %d", clock, now);
166 136 }
167 137 )
168 138 // In product mode, protect ourselves from system time being adjusted
169 139 // externally and going backward; see note in the implementation of
170 140 // GenCollectedHeap::time_since_last_gc() for the right way to fix
171 141 // this uniformly throughout the VM; see bug-id 4741166. XXX
172 142 if (now > clock) {
173 143 java_lang_ref_SoftReference::set_clock(now);
174 144 }
175 145 // Else leave clock stalled at its old value until time progresses
176 146 // past clock value.
177 147 }
178 148
179 149 void ReferenceProcessor::process_discovered_references(
180 150 BoolObjectClosure* is_alive,
181 151 OopClosure* keep_alive,
182 152 VoidClosure* complete_gc,
183 153 AbstractRefProcTaskExecutor* task_executor) {
184 154 NOT_PRODUCT(verify_ok_to_handle_reflists());
185 155
186 156 assert(!enqueuing_is_done(), "If here enqueuing should not be complete");
187 157 // Stop treating discovered references specially.
188 158 disable_discovery();
189 159
190 160 bool trace_time = PrintGCDetails && PrintReferenceGC;
191 161 // Soft references
192 162 {
193 163 TraceTime tt("SoftReference", trace_time, false, gclog_or_tty);
194 164 process_discovered_reflist(_discoveredSoftRefs, _current_soft_ref_policy, true,
195 165 is_alive, keep_alive, complete_gc, task_executor);
196 166 }
197 167
198 168 update_soft_ref_master_clock();
199 169
200 170 // Weak references
201 171 {
202 172 TraceTime tt("WeakReference", trace_time, false, gclog_or_tty);
203 173 process_discovered_reflist(_discoveredWeakRefs, NULL, true,
204 174 is_alive, keep_alive, complete_gc, task_executor);
205 175 }
206 176
207 177 // Final references
208 178 {
209 179 TraceTime tt("FinalReference", trace_time, false, gclog_or_tty);
210 180 process_discovered_reflist(_discoveredFinalRefs, NULL, false,
211 181 is_alive, keep_alive, complete_gc, task_executor);
212 182 }
213 183
214 184 // Phantom references
215 185 {
216 186 TraceTime tt("PhantomReference", trace_time, false, gclog_or_tty);
217 187 process_discovered_reflist(_discoveredPhantomRefs, NULL, false,
218 188 is_alive, keep_alive, complete_gc, task_executor);
219 189 }
220 190
221 191 // Weak global JNI references. It would make more sense (semantically) to
222 192 // traverse these simultaneously with the regular weak references above, but
223 193 // that is not how the JDK1.2 specification is. See #4126360. Native code can
224 194 // thus use JNI weak references to circumvent the phantom references and
225 195 // resurrect a "post-mortem" object.
226 196 {
227 197 TraceTime tt("JNI Weak Reference", trace_time, false, gclog_or_tty);
228 198 if (task_executor != NULL) {
229 199 task_executor->set_single_threaded_mode();
230 200 }
231 201 process_phaseJNI(is_alive, keep_alive, complete_gc);
232 202 }
233 203 }
234 204
235 205 #ifndef PRODUCT
236 206 // Calculate the number of jni handles.
237 207 uint ReferenceProcessor::count_jni_refs() {
238 208 class AlwaysAliveClosure: public BoolObjectClosure {
239 209 public:
240 210 virtual bool do_object_b(oop obj) { return true; }
241 211 virtual void do_object(oop obj) { assert(false, "Don't call"); }
242 212 };
243 213
244 214 class CountHandleClosure: public OopClosure {
245 215 private:
246 216 int _count;
247 217 public:
248 218 CountHandleClosure(): _count(0) {}
249 219 void do_oop(oop* unused) { _count++; }
250 220 void do_oop(narrowOop* unused) { ShouldNotReachHere(); }
251 221 int count() { return _count; }
252 222 };
253 223 CountHandleClosure global_handle_count;
254 224 AlwaysAliveClosure always_alive;
255 225 JNIHandles::weak_oops_do(&always_alive, &global_handle_count);
256 226 return global_handle_count.count();
257 227 }
258 228 #endif
259 229
260 230 void ReferenceProcessor::process_phaseJNI(BoolObjectClosure* is_alive,
261 231 OopClosure* keep_alive,
262 232 VoidClosure* complete_gc) {
263 233 #ifndef PRODUCT
264 234 if (PrintGCDetails && PrintReferenceGC) {
265 235 unsigned int count = count_jni_refs();
266 236 gclog_or_tty->print(", %u refs", count);
267 237 }
268 238 #endif
269 239 JNIHandles::weak_oops_do(is_alive, keep_alive);
270 240 complete_gc->do_void();
271 241 }
272 242
273 243
274 244 template <class T>
275 245 bool enqueue_discovered_ref_helper(ReferenceProcessor* ref,
276 246 AbstractRefProcTaskExecutor* task_executor) {
277 247
278 248 // Remember old value of pending references list
279 249 T* pending_list_addr = (T*)java_lang_ref_Reference::pending_list_addr();
280 250 T old_pending_list_value = *pending_list_addr;
281 251
282 252 // Enqueue references that are not made active again, and
283 253 // clear the decks for the next collection (cycle).
284 254 ref->enqueue_discovered_reflists((HeapWord*)pending_list_addr, task_executor);
285 255 // Do the oop-check on pending_list_addr missed in
286 256 // enqueue_discovered_reflist. We should probably
287 257 // do a raw oop_check so that future such idempotent
288 258 // oop_stores relying on the oop-check side-effect
289 259 // may be elided automatically and safely without
290 260 // affecting correctness.
291 261 oop_store(pending_list_addr, oopDesc::load_decode_heap_oop(pending_list_addr));
292 262
293 263 // Stop treating discovered references specially.
294 264 ref->disable_discovery();
295 265
296 266 // Return true if new pending references were added
297 267 return old_pending_list_value != *pending_list_addr;
298 268 }
299 269
300 270 bool ReferenceProcessor::enqueue_discovered_references(AbstractRefProcTaskExecutor* task_executor) {
301 271 NOT_PRODUCT(verify_ok_to_handle_reflists());
302 272 if (UseCompressedOops) {
303 273 return enqueue_discovered_ref_helper<narrowOop>(this, task_executor);
304 274 } else {
305 275 return enqueue_discovered_ref_helper<oop>(this, task_executor);
306 276 }
307 277 }
308 278
309 279 void ReferenceProcessor::enqueue_discovered_reflist(DiscoveredList& refs_list,
310 280 HeapWord* pending_list_addr) {
311 281 // Given a list of refs linked through the "discovered" field
312 282 // (java.lang.ref.Reference.discovered), self-loop their "next" field
313 283 // thus distinguishing them from active References, then
314 284 // prepend them to the pending list.
315 285 // BKWRD COMPATIBILITY NOTE: For older JDKs (prior to the fix for 4956777),
316 286 // the "next" field is used to chain the pending list, not the discovered
317 287 // field.
318 288
319 289 if (TraceReferenceGC && PrintGCDetails) {
320 290 gclog_or_tty->print_cr("ReferenceProcessor::enqueue_discovered_reflist list "
321 291 INTPTR_FORMAT, (address)refs_list.head());
322 292 }
323 293
324 294 oop obj = NULL;
325 295 oop next_d = refs_list.head();
326 296 if (pending_list_uses_discovered_field()) { // New behaviour
327 297 // Walk down the list, self-looping the next field
328 298 // so that the References are not considered active.
329 299 while (obj != next_d) {
330 300 obj = next_d;
331 301 assert(obj->is_instanceRef(), "should be reference object");
332 302 next_d = java_lang_ref_Reference::discovered(obj);
333 303 if (TraceReferenceGC && PrintGCDetails) {
334 304 gclog_or_tty->print_cr(" obj " INTPTR_FORMAT "/next_d " INTPTR_FORMAT,
335 305 obj, next_d);
336 306 }
337 307 assert(java_lang_ref_Reference::next(obj) == NULL,
338 308 "Reference not active; should not be discovered");
339 309 // Self-loop next, so as to make Ref not active.
340 310 java_lang_ref_Reference::set_next(obj, obj);
341 311 if (next_d == obj) { // obj is last
342 312 // Swap refs_list into pendling_list_addr and
343 313 // set obj's discovered to what we read from pending_list_addr.
344 314 oop old = oopDesc::atomic_exchange_oop(refs_list.head(), pending_list_addr);
345 315 // Need oop_check on pending_list_addr above;
346 316 // see special oop-check code at the end of
347 317 // enqueue_discovered_reflists() further below.
348 318 java_lang_ref_Reference::set_discovered(obj, old); // old may be NULL
349 319 }
350 320 }
351 321 } else { // Old behaviour
352 322 // Walk down the list, copying the discovered field into
353 323 // the next field and clearing the discovered field.
354 324 while (obj != next_d) {
355 325 obj = next_d;
356 326 assert(obj->is_instanceRef(), "should be reference object");
357 327 next_d = java_lang_ref_Reference::discovered(obj);
358 328 if (TraceReferenceGC && PrintGCDetails) {
359 329 gclog_or_tty->print_cr(" obj " INTPTR_FORMAT "/next_d " INTPTR_FORMAT,
360 330 obj, next_d);
361 331 }
362 332 assert(java_lang_ref_Reference::next(obj) == NULL,
363 333 "The reference should not be enqueued");
364 334 if (next_d == obj) { // obj is last
365 335 // Swap refs_list into pendling_list_addr and
366 336 // set obj's next to what we read from pending_list_addr.
367 337 oop old = oopDesc::atomic_exchange_oop(refs_list.head(), pending_list_addr);
368 338 // Need oop_check on pending_list_addr above;
369 339 // see special oop-check code at the end of
370 340 // enqueue_discovered_reflists() further below.
371 341 if (old == NULL) {
372 342 // obj should be made to point to itself, since
373 343 // pending list was empty.
374 344 java_lang_ref_Reference::set_next(obj, obj);
375 345 } else {
376 346 java_lang_ref_Reference::set_next(obj, old);
377 347 }
378 348 } else {
379 349 java_lang_ref_Reference::set_next(obj, next_d);
380 350 }
381 351 java_lang_ref_Reference::set_discovered(obj, (oop) NULL);
382 352 }
383 353 }
384 354 }
385 355
386 356 // Parallel enqueue task
387 357 class RefProcEnqueueTask: public AbstractRefProcTaskExecutor::EnqueueTask {
388 358 public:
389 359 RefProcEnqueueTask(ReferenceProcessor& ref_processor,
390 360 DiscoveredList discovered_refs[],
391 361 HeapWord* pending_list_addr,
392 362 int n_queues)
393 363 : EnqueueTask(ref_processor, discovered_refs,
394 364 pending_list_addr, n_queues)
395 365 { }
396 366
↓ open down ↓ |
237 lines elided |
↑ open up ↑ |
397 367 virtual void work(unsigned int work_id) {
398 368 assert(work_id < (unsigned int)_ref_processor.max_num_q(), "Index out-of-bounds");
399 369 // Simplest first cut: static partitioning.
400 370 int index = work_id;
401 371 // The increment on "index" must correspond to the maximum number of queues
402 372 // (n_queues) with which that ReferenceProcessor was created. That
403 373 // is because of the "clever" way the discovered references lists were
404 374 // allocated and are indexed into.
405 375 assert(_n_queues == (int) _ref_processor.max_num_q(), "Different number not expected");
406 376 for (int j = 0;
407 - j < subclasses_of_ref;
377 + j < ReferenceProcessor::number_of_subclasses_of_ref();
408 378 j++, index += _n_queues) {
409 379 _ref_processor.enqueue_discovered_reflist(
410 380 _refs_lists[index], _pending_list_addr);
411 381 _refs_lists[index].set_head(NULL);
412 382 _refs_lists[index].set_length(0);
413 383 }
414 384 }
415 385 };
416 386
417 387 // Enqueue references that are not made active again
418 388 void ReferenceProcessor::enqueue_discovered_reflists(HeapWord* pending_list_addr,
419 389 AbstractRefProcTaskExecutor* task_executor) {
420 390 if (_processing_is_mt && task_executor != NULL) {
421 391 // Parallel code
422 392 RefProcEnqueueTask tsk(*this, _discoveredSoftRefs,
423 393 pending_list_addr, _max_num_q);
424 394 task_executor->execute(tsk);
425 395 } else {
426 396 // Serial code: call the parent class's implementation
427 - for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
397 + for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
428 398 enqueue_discovered_reflist(_discoveredSoftRefs[i], pending_list_addr);
429 399 _discoveredSoftRefs[i].set_head(NULL);
430 400 _discoveredSoftRefs[i].set_length(0);
431 401 }
432 402 }
433 403 }
434 404
435 -// Iterator for the list of discovered references.
436 -class DiscoveredListIterator {
437 -public:
438 - inline DiscoveredListIterator(DiscoveredList& refs_list,
439 - OopClosure* keep_alive,
440 - BoolObjectClosure* is_alive);
441 -
442 - // End Of List.
443 - inline bool has_next() const { return _ref != NULL; }
444 -
445 - // Get oop to the Reference object.
446 - inline oop obj() const { return _ref; }
447 -
448 - // Get oop to the referent object.
449 - inline oop referent() const { return _referent; }
450 -
451 - // Returns true if referent is alive.
452 - inline bool is_referent_alive() const;
453 -
454 - // Loads data for the current reference.
455 - // The "allow_null_referent" argument tells us to allow for the possibility
456 - // of a NULL referent in the discovered Reference object. This typically
457 - // happens in the case of concurrent collectors that may have done the
458 - // discovery concurrently, or interleaved, with mutator execution.
459 - inline void load_ptrs(DEBUG_ONLY(bool allow_null_referent));
460 -
461 - // Move to the next discovered reference.
462 - inline void next();
463 -
464 - // Remove the current reference from the list
465 - inline void remove();
466 -
467 - // Make the Reference object active again.
468 - inline void make_active() { java_lang_ref_Reference::set_next(_ref, NULL); }
469 -
470 - // Make the referent alive.
471 - inline void make_referent_alive() {
472 - if (UseCompressedOops) {
473 - _keep_alive->do_oop((narrowOop*)_referent_addr);
474 - } else {
475 - _keep_alive->do_oop((oop*)_referent_addr);
476 - }
477 - }
478 -
479 - // Update the discovered field.
480 - inline void update_discovered() {
481 - // First _prev_next ref actually points into DiscoveredList (gross).
482 - if (UseCompressedOops) {
483 - if (!oopDesc::is_null(*(narrowOop*)_prev_next)) {
484 - _keep_alive->do_oop((narrowOop*)_prev_next);
485 - }
486 - } else {
487 - if (!oopDesc::is_null(*(oop*)_prev_next)) {
488 - _keep_alive->do_oop((oop*)_prev_next);
489 - }
490 - }
491 - }
492 -
493 - // NULL out referent pointer.
494 - inline void clear_referent() { oop_store_raw(_referent_addr, NULL); }
495 -
496 - // Statistics
497 - NOT_PRODUCT(
498 - inline size_t processed() const { return _processed; }
499 - inline size_t removed() const { return _removed; }
500 - )
501 -
502 - inline void move_to_next();
503 -
504 -private:
505 - DiscoveredList& _refs_list;
506 - HeapWord* _prev_next;
507 - oop _prev;
508 - oop _ref;
509 - HeapWord* _discovered_addr;
510 - oop _next;
511 - HeapWord* _referent_addr;
512 - oop _referent;
513 - OopClosure* _keep_alive;
514 - BoolObjectClosure* _is_alive;
515 - DEBUG_ONLY(
516 - oop _first_seen; // cyclic linked list check
517 - )
518 - NOT_PRODUCT(
519 - size_t _processed;
520 - size_t _removed;
521 - )
522 -};
523 -
524 -inline DiscoveredListIterator::DiscoveredListIterator(DiscoveredList& refs_list,
525 - OopClosure* keep_alive,
526 - BoolObjectClosure* is_alive)
527 - : _refs_list(refs_list),
528 - _prev_next(refs_list.adr_head()),
529 - _prev(NULL),
530 - _ref(refs_list.head()),
531 -#ifdef ASSERT
532 - _first_seen(refs_list.head()),
533 -#endif
534 -#ifndef PRODUCT
535 - _processed(0),
536 - _removed(0),
537 -#endif
538 - _next(NULL),
539 - _keep_alive(keep_alive),
540 - _is_alive(is_alive)
541 -{ }
542 -
543 -inline bool DiscoveredListIterator::is_referent_alive() const {
544 - return _is_alive->do_object_b(_referent);
545 -}
546 -
547 -inline void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) {
405 +void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) {
548 406 _discovered_addr = java_lang_ref_Reference::discovered_addr(_ref);
549 407 oop discovered = java_lang_ref_Reference::discovered(_ref);
550 408 assert(_discovered_addr && discovered->is_oop_or_null(),
551 409 "discovered field is bad");
552 410 _next = discovered;
553 411 _referent_addr = java_lang_ref_Reference::referent_addr(_ref);
554 412 _referent = java_lang_ref_Reference::referent(_ref);
555 413 assert(Universe::heap()->is_in_reserved_or_null(_referent),
556 414 "Wrong oop found in java.lang.Reference object");
557 415 assert(allow_null_referent ?
558 416 _referent->is_oop_or_null()
559 417 : _referent->is_oop(),
560 418 "bad referent");
561 419 }
562 420
563 -inline void DiscoveredListIterator::next() {
564 - _prev_next = _discovered_addr;
565 - _prev = _ref;
566 - move_to_next();
567 -}
568 -
569 -inline void DiscoveredListIterator::remove() {
421 +void DiscoveredListIterator::remove() {
570 422 assert(_ref->is_oop(), "Dropping a bad reference");
571 423 oop_store_raw(_discovered_addr, NULL);
572 424
573 425 // First _prev_next ref actually points into DiscoveredList (gross).
574 426 oop new_next;
575 427 if (_next == _ref) {
576 428 // At the end of the list, we should make _prev point to itself.
577 429 // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
578 430 // and _prev will be NULL.
579 431 new_next = _prev;
580 432 } else {
581 433 new_next = _next;
582 434 }
583 435
584 436 if (UseCompressedOops) {
↓ open down ↓ |
5 lines elided |
↑ open up ↑ |
585 437 // Remove Reference object from list.
586 438 oopDesc::encode_store_heap_oop((narrowOop*)_prev_next, new_next);
587 439 } else {
588 440 // Remove Reference object from list.
589 441 oopDesc::store_heap_oop((oop*)_prev_next, new_next);
590 442 }
591 443 NOT_PRODUCT(_removed++);
592 444 _refs_list.dec_length(1);
593 445 }
594 446
595 -inline void DiscoveredListIterator::move_to_next() {
596 - if (_ref == _next) {
597 - // End of the list.
598 - _ref = NULL;
447 +// Make the Reference object active again.
448 +void DiscoveredListIterator::make_active() {
449 + // For G1 we don't want to use set_next - it
450 + // will dirty the card for the next field of
451 + // the reference object and will fail
452 + // CT verification.
453 + if (UseG1GC) {
454 + BarrierSet* bs = oopDesc::bs();
455 + HeapWord* next_addr = java_lang_ref_Reference::next_addr(_ref);
456 +
457 + if (UseCompressedOops) {
458 + bs->write_ref_field_pre((narrowOop*)next_addr, NULL);
459 + } else {
460 + bs->write_ref_field_pre((oop*)next_addr, NULL);
461 + }
462 + java_lang_ref_Reference::set_next_raw(_ref, NULL);
599 463 } else {
600 - _ref = _next;
464 + java_lang_ref_Reference::set_next(_ref, NULL);
601 465 }
602 - assert(_ref != _first_seen, "cyclic ref_list found");
603 - NOT_PRODUCT(_processed++);
466 +}
467 +
468 +void DiscoveredListIterator::clear_referent() {
469 + oop_store_raw(_referent_addr, NULL);
604 470 }
605 471
606 472 // NOTE: process_phase*() are largely similar, and at a high level
607 473 // merely iterate over the extant list applying a predicate to
608 474 // each of its elements and possibly removing that element from the
609 475 // list and applying some further closures to that element.
610 476 // We should consider the possibility of replacing these
611 477 // process_phase*() methods by abstracting them into
612 478 // a single general iterator invocation that receives appropriate
613 479 // closures that accomplish this work.
614 480
615 481 // (SoftReferences only) Traverse the list and remove any SoftReferences whose
616 482 // referents are not alive, but that should be kept alive for policy reasons.
617 483 // Keep alive the transitive closure of all such referents.
618 484 void
619 485 ReferenceProcessor::process_phase1(DiscoveredList& refs_list,
620 486 ReferencePolicy* policy,
621 487 BoolObjectClosure* is_alive,
622 488 OopClosure* keep_alive,
623 489 VoidClosure* complete_gc) {
624 490 assert(policy != NULL, "Must have a non-NULL policy");
625 491 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
626 492 // Decide which softly reachable refs should be kept alive.
627 493 while (iter.has_next()) {
628 494 iter.load_ptrs(DEBUG_ONLY(!discovery_is_atomic() /* allow_null_referent */));
629 495 bool referent_is_dead = (iter.referent() != NULL) && !iter.is_referent_alive();
630 496 if (referent_is_dead && !policy->should_clear_reference(iter.obj())) {
631 497 if (TraceReferenceGC) {
632 498 gclog_or_tty->print_cr("Dropping reference (" INTPTR_FORMAT ": %s" ") by policy",
633 499 iter.obj(), iter.obj()->blueprint()->internal_name());
634 500 }
635 501 // Remove Reference object from list
636 502 iter.remove();
637 503 // Make the Reference object active again
638 504 iter.make_active();
639 505 // keep the referent around
640 506 iter.make_referent_alive();
641 507 iter.move_to_next();
642 508 } else {
643 509 iter.next();
644 510 }
645 511 }
646 512 // Close the reachable set
647 513 complete_gc->do_void();
648 514 NOT_PRODUCT(
649 515 if (PrintGCDetails && TraceReferenceGC) {
650 516 gclog_or_tty->print_cr(" Dropped %d dead Refs out of %d "
651 517 "discovered Refs by policy, from list " INTPTR_FORMAT,
652 518 iter.removed(), iter.processed(), (address)refs_list.head());
653 519 }
654 520 )
655 521 }
656 522
657 523 // Traverse the list and remove any Refs that are not active, or
658 524 // whose referents are either alive or NULL.
659 525 void
660 526 ReferenceProcessor::pp2_work(DiscoveredList& refs_list,
661 527 BoolObjectClosure* is_alive,
662 528 OopClosure* keep_alive) {
663 529 assert(discovery_is_atomic(), "Error");
664 530 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
665 531 while (iter.has_next()) {
666 532 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
667 533 DEBUG_ONLY(oop next = java_lang_ref_Reference::next(iter.obj());)
668 534 assert(next == NULL, "Should not discover inactive Reference");
669 535 if (iter.is_referent_alive()) {
670 536 if (TraceReferenceGC) {
671 537 gclog_or_tty->print_cr("Dropping strongly reachable reference (" INTPTR_FORMAT ": %s)",
672 538 iter.obj(), iter.obj()->blueprint()->internal_name());
673 539 }
674 540 // The referent is reachable after all.
675 541 // Remove Reference object from list.
676 542 iter.remove();
677 543 // Update the referent pointer as necessary: Note that this
678 544 // should not entail any recursive marking because the
679 545 // referent must already have been traversed.
680 546 iter.make_referent_alive();
681 547 iter.move_to_next();
682 548 } else {
683 549 iter.next();
684 550 }
685 551 }
686 552 NOT_PRODUCT(
687 553 if (PrintGCDetails && TraceReferenceGC && (iter.processed() > 0)) {
688 554 gclog_or_tty->print_cr(" Dropped %d active Refs out of %d "
689 555 "Refs in discovered list " INTPTR_FORMAT,
690 556 iter.removed(), iter.processed(), (address)refs_list.head());
691 557 }
692 558 )
693 559 }
694 560
695 561 void
696 562 ReferenceProcessor::pp2_work_concurrent_discovery(DiscoveredList& refs_list,
697 563 BoolObjectClosure* is_alive,
698 564 OopClosure* keep_alive,
699 565 VoidClosure* complete_gc) {
700 566 assert(!discovery_is_atomic(), "Error");
701 567 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
702 568 while (iter.has_next()) {
703 569 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
704 570 HeapWord* next_addr = java_lang_ref_Reference::next_addr(iter.obj());
705 571 oop next = java_lang_ref_Reference::next(iter.obj());
706 572 if ((iter.referent() == NULL || iter.is_referent_alive() ||
707 573 next != NULL)) {
708 574 assert(next->is_oop_or_null(), "bad next field");
709 575 // Remove Reference object from list
710 576 iter.remove();
711 577 // Trace the cohorts
712 578 iter.make_referent_alive();
713 579 if (UseCompressedOops) {
714 580 keep_alive->do_oop((narrowOop*)next_addr);
715 581 } else {
716 582 keep_alive->do_oop((oop*)next_addr);
717 583 }
718 584 iter.move_to_next();
719 585 } else {
720 586 iter.next();
721 587 }
722 588 }
723 589 // Now close the newly reachable set
724 590 complete_gc->do_void();
725 591 NOT_PRODUCT(
726 592 if (PrintGCDetails && TraceReferenceGC && (iter.processed() > 0)) {
727 593 gclog_or_tty->print_cr(" Dropped %d active Refs out of %d "
728 594 "Refs in discovered list " INTPTR_FORMAT,
729 595 iter.removed(), iter.processed(), (address)refs_list.head());
730 596 }
731 597 )
732 598 }
733 599
734 600 // Traverse the list and process the referents, by either
735 601 // clearing them or keeping them (and their reachable
736 602 // closure) alive.
737 603 void
738 604 ReferenceProcessor::process_phase3(DiscoveredList& refs_list,
739 605 bool clear_referent,
740 606 BoolObjectClosure* is_alive,
741 607 OopClosure* keep_alive,
742 608 VoidClosure* complete_gc) {
743 609 ResourceMark rm;
744 610 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
745 611 while (iter.has_next()) {
746 612 iter.update_discovered();
747 613 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
748 614 if (clear_referent) {
749 615 // NULL out referent pointer
750 616 iter.clear_referent();
751 617 } else {
752 618 // keep the referent around
753 619 iter.make_referent_alive();
754 620 }
755 621 if (TraceReferenceGC) {
756 622 gclog_or_tty->print_cr("Adding %sreference (" INTPTR_FORMAT ": %s) as pending",
757 623 clear_referent ? "cleared " : "",
758 624 iter.obj(), iter.obj()->blueprint()->internal_name());
759 625 }
760 626 assert(iter.obj()->is_oop(UseConcMarkSweepGC), "Adding a bad reference");
761 627 iter.next();
762 628 }
763 629 // Remember to update the next pointer of the last ref.
764 630 iter.update_discovered();
765 631 // Close the reachable set
766 632 complete_gc->do_void();
767 633 }
768 634
769 635 void
770 636 ReferenceProcessor::clear_discovered_references(DiscoveredList& refs_list) {
771 637 oop obj = NULL;
772 638 oop next = refs_list.head();
773 639 while (next != obj) {
774 640 obj = next;
775 641 next = java_lang_ref_Reference::discovered(obj);
776 642 java_lang_ref_Reference::set_discovered_raw(obj, NULL);
777 643 }
778 644 refs_list.set_head(NULL);
↓ open down ↓ |
165 lines elided |
↑ open up ↑ |
779 645 refs_list.set_length(0);
780 646 }
781 647
782 648 void
783 649 ReferenceProcessor::abandon_partial_discovered_list(DiscoveredList& refs_list) {
784 650 clear_discovered_references(refs_list);
785 651 }
786 652
787 653 void ReferenceProcessor::abandon_partial_discovery() {
788 654 // loop over the lists
789 - for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
655 + for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
790 656 if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
791 - gclog_or_tty->print_cr("\nAbandoning %s discovered list",
792 - list_name(i));
657 + gclog_or_tty->print_cr("\nAbandoning %s discovered list", list_name(i));
793 658 }
794 659 abandon_partial_discovered_list(_discoveredSoftRefs[i]);
795 660 }
796 661 }
797 662
798 663 class RefProcPhase1Task: public AbstractRefProcTaskExecutor::ProcessTask {
799 664 public:
800 665 RefProcPhase1Task(ReferenceProcessor& ref_processor,
801 666 DiscoveredList refs_lists[],
802 667 ReferencePolicy* policy,
803 668 bool marks_oops_alive)
804 669 : ProcessTask(ref_processor, refs_lists, marks_oops_alive),
805 670 _policy(policy)
806 671 { }
807 672 virtual void work(unsigned int i, BoolObjectClosure& is_alive,
808 673 OopClosure& keep_alive,
809 674 VoidClosure& complete_gc)
810 675 {
811 676 Thread* thr = Thread::current();
812 677 int refs_list_index = ((WorkerThread*)thr)->id();
813 678 _ref_processor.process_phase1(_refs_lists[refs_list_index], _policy,
814 679 &is_alive, &keep_alive, &complete_gc);
815 680 }
816 681 private:
817 682 ReferencePolicy* _policy;
818 683 };
819 684
820 685 class RefProcPhase2Task: public AbstractRefProcTaskExecutor::ProcessTask {
821 686 public:
822 687 RefProcPhase2Task(ReferenceProcessor& ref_processor,
823 688 DiscoveredList refs_lists[],
824 689 bool marks_oops_alive)
825 690 : ProcessTask(ref_processor, refs_lists, marks_oops_alive)
826 691 { }
827 692 virtual void work(unsigned int i, BoolObjectClosure& is_alive,
828 693 OopClosure& keep_alive,
829 694 VoidClosure& complete_gc)
830 695 {
831 696 _ref_processor.process_phase2(_refs_lists[i],
832 697 &is_alive, &keep_alive, &complete_gc);
833 698 }
834 699 };
835 700
836 701 class RefProcPhase3Task: public AbstractRefProcTaskExecutor::ProcessTask {
837 702 public:
838 703 RefProcPhase3Task(ReferenceProcessor& ref_processor,
839 704 DiscoveredList refs_lists[],
840 705 bool clear_referent,
841 706 bool marks_oops_alive)
842 707 : ProcessTask(ref_processor, refs_lists, marks_oops_alive),
843 708 _clear_referent(clear_referent)
844 709 { }
845 710 virtual void work(unsigned int i, BoolObjectClosure& is_alive,
846 711 OopClosure& keep_alive,
847 712 VoidClosure& complete_gc)
848 713 {
849 714 // Don't use "refs_list_index" calculated in this way because
850 715 // balance_queues() has moved the Ref's into the first n queues.
↓ open down ↓ |
48 lines elided |
↑ open up ↑ |
851 716 // Thread* thr = Thread::current();
852 717 // int refs_list_index = ((WorkerThread*)thr)->id();
853 718 // _ref_processor.process_phase3(_refs_lists[refs_list_index], _clear_referent,
854 719 _ref_processor.process_phase3(_refs_lists[i], _clear_referent,
855 720 &is_alive, &keep_alive, &complete_gc);
856 721 }
857 722 private:
858 723 bool _clear_referent;
859 724 };
860 725
726 +void ReferenceProcessor::set_discovered(oop ref, oop value) {
727 + if (_discovered_list_needs_barrier) {
728 + java_lang_ref_Reference::set_discovered(ref, value);
729 + } else {
730 + java_lang_ref_Reference::set_discovered_raw(ref, value);
731 + }
732 +}
733 +
861 734 // Balances reference queues.
862 735 // Move entries from all queues[0, 1, ..., _max_num_q-1] to
863 736 // queues[0, 1, ..., _num_q-1] because only the first _num_q
864 737 // corresponding to the active workers will be processed.
865 738 void ReferenceProcessor::balance_queues(DiscoveredList ref_lists[])
866 739 {
867 740 // calculate total length
868 741 size_t total_refs = 0;
869 742 if (TraceReferenceGC && PrintGCDetails) {
870 743 gclog_or_tty->print_cr("\nBalance ref_lists ");
871 744 }
872 745
873 746 for (int i = 0; i < _max_num_q; ++i) {
874 747 total_refs += ref_lists[i].length();
875 748 if (TraceReferenceGC && PrintGCDetails) {
876 749 gclog_or_tty->print("%d ", ref_lists[i].length());
877 750 }
878 751 }
879 752 if (TraceReferenceGC && PrintGCDetails) {
880 753 gclog_or_tty->print_cr(" = %d", total_refs);
881 754 }
882 755 size_t avg_refs = total_refs / _num_q + 1;
883 756 int to_idx = 0;
884 757 for (int from_idx = 0; from_idx < _max_num_q; from_idx++) {
885 758 bool move_all = false;
886 759 if (from_idx >= _num_q) {
887 760 move_all = ref_lists[from_idx].length() > 0;
888 761 }
889 762 while ((ref_lists[from_idx].length() > avg_refs) ||
890 763 move_all) {
891 764 assert(to_idx < _num_q, "Sanity Check!");
892 765 if (ref_lists[to_idx].length() < avg_refs) {
893 766 // move superfluous refs
894 767 size_t refs_to_move;
895 768 // Move all the Ref's if the from queue will not be processed.
896 769 if (move_all) {
897 770 refs_to_move = MIN2(ref_lists[from_idx].length(),
898 771 avg_refs - ref_lists[to_idx].length());
899 772 } else {
900 773 refs_to_move = MIN2(ref_lists[from_idx].length() - avg_refs,
901 774 avg_refs - ref_lists[to_idx].length());
902 775 }
903 776
904 777 assert(refs_to_move > 0, "otherwise the code below will fail");
905 778
906 779 oop move_head = ref_lists[from_idx].head();
907 780 oop move_tail = move_head;
↓ open down ↓ |
37 lines elided |
↑ open up ↑ |
908 781 oop new_head = move_head;
909 782 // find an element to split the list on
910 783 for (size_t j = 0; j < refs_to_move; ++j) {
911 784 move_tail = new_head;
912 785 new_head = java_lang_ref_Reference::discovered(new_head);
913 786 }
914 787
915 788 // Add the chain to the to list.
916 789 if (ref_lists[to_idx].head() == NULL) {
917 790 // to list is empty. Make a loop at the end.
918 - java_lang_ref_Reference::set_discovered(move_tail, move_tail);
791 + set_discovered(move_tail, move_tail);
919 792 } else {
920 - java_lang_ref_Reference::set_discovered(move_tail, ref_lists[to_idx].head());
793 + set_discovered(move_tail, ref_lists[to_idx].head());
921 794 }
922 795 ref_lists[to_idx].set_head(move_head);
923 796 ref_lists[to_idx].inc_length(refs_to_move);
924 797
925 798 // Remove the chain from the from list.
926 799 if (move_tail == new_head) {
927 800 // We found the end of the from list.
928 801 ref_lists[from_idx].set_head(NULL);
929 802 } else {
930 803 ref_lists[from_idx].set_head(new_head);
931 804 }
932 805 ref_lists[from_idx].dec_length(refs_to_move);
933 806 if (ref_lists[from_idx].length() == 0) {
934 807 break;
935 808 }
936 809 } else {
937 810 to_idx = (to_idx + 1) % _num_q;
938 811 }
939 812 }
940 813 }
941 814 #ifdef ASSERT
942 815 size_t balanced_total_refs = 0;
943 816 for (int i = 0; i < _max_num_q; ++i) {
944 817 balanced_total_refs += ref_lists[i].length();
945 818 if (TraceReferenceGC && PrintGCDetails) {
946 819 gclog_or_tty->print("%d ", ref_lists[i].length());
947 820 }
948 821 }
949 822 if (TraceReferenceGC && PrintGCDetails) {
950 823 gclog_or_tty->print_cr(" = %d", balanced_total_refs);
951 824 gclog_or_tty->flush();
952 825 }
953 826 assert(total_refs == balanced_total_refs, "Balancing was incomplete");
954 827 #endif
955 828 }
956 829
957 830 void ReferenceProcessor::balance_all_queues() {
958 831 balance_queues(_discoveredSoftRefs);
959 832 balance_queues(_discoveredWeakRefs);
960 833 balance_queues(_discoveredFinalRefs);
961 834 balance_queues(_discoveredPhantomRefs);
962 835 }
963 836
964 837 void
965 838 ReferenceProcessor::process_discovered_reflist(
966 839 DiscoveredList refs_lists[],
967 840 ReferencePolicy* policy,
968 841 bool clear_referent,
969 842 BoolObjectClosure* is_alive,
970 843 OopClosure* keep_alive,
971 844 VoidClosure* complete_gc,
972 845 AbstractRefProcTaskExecutor* task_executor)
973 846 {
974 847 bool mt_processing = task_executor != NULL && _processing_is_mt;
975 848 // If discovery used MT and a dynamic number of GC threads, then
976 849 // the queues must be balanced for correctness if fewer than the
977 850 // maximum number of queues were used. The number of queue used
978 851 // during discovery may be different than the number to be used
979 852 // for processing so don't depend of _num_q < _max_num_q as part
980 853 // of the test.
981 854 bool must_balance = _discovery_is_mt;
982 855
983 856 if ((mt_processing && ParallelRefProcBalancingEnabled) ||
984 857 must_balance) {
985 858 balance_queues(refs_lists);
986 859 }
987 860 if (PrintReferenceGC && PrintGCDetails) {
988 861 size_t total = 0;
989 862 for (int i = 0; i < _max_num_q; ++i) {
990 863 total += refs_lists[i].length();
991 864 }
992 865 gclog_or_tty->print(", %u refs", total);
993 866 }
994 867
995 868 // Phase 1 (soft refs only):
996 869 // . Traverse the list and remove any SoftReferences whose
997 870 // referents are not alive, but that should be kept alive for
998 871 // policy reasons. Keep alive the transitive closure of all
999 872 // such referents.
1000 873 if (policy != NULL) {
1001 874 if (mt_processing) {
1002 875 RefProcPhase1Task phase1(*this, refs_lists, policy, true /*marks_oops_alive*/);
1003 876 task_executor->execute(phase1);
1004 877 } else {
1005 878 for (int i = 0; i < _max_num_q; i++) {
1006 879 process_phase1(refs_lists[i], policy,
1007 880 is_alive, keep_alive, complete_gc);
1008 881 }
1009 882 }
1010 883 } else { // policy == NULL
1011 884 assert(refs_lists != _discoveredSoftRefs,
1012 885 "Policy must be specified for soft references.");
1013 886 }
1014 887
1015 888 // Phase 2:
1016 889 // . Traverse the list and remove any refs whose referents are alive.
1017 890 if (mt_processing) {
1018 891 RefProcPhase2Task phase2(*this, refs_lists, !discovery_is_atomic() /*marks_oops_alive*/);
1019 892 task_executor->execute(phase2);
1020 893 } else {
1021 894 for (int i = 0; i < _max_num_q; i++) {
1022 895 process_phase2(refs_lists[i], is_alive, keep_alive, complete_gc);
1023 896 }
1024 897 }
1025 898
1026 899 // Phase 3:
1027 900 // . Traverse the list and process referents as appropriate.
1028 901 if (mt_processing) {
1029 902 RefProcPhase3Task phase3(*this, refs_lists, clear_referent, true /*marks_oops_alive*/);
1030 903 task_executor->execute(phase3);
↓ open down ↓ |
100 lines elided |
↑ open up ↑ |
1031 904 } else {
1032 905 for (int i = 0; i < _max_num_q; i++) {
1033 906 process_phase3(refs_lists[i], clear_referent,
1034 907 is_alive, keep_alive, complete_gc);
1035 908 }
1036 909 }
1037 910 }
1038 911
1039 912 void ReferenceProcessor::clean_up_discovered_references() {
1040 913 // loop over the lists
1041 - // Should this instead be
1042 - // for (int i = 0; i < subclasses_of_ref; i++_ {
914 +
915 + // An alternative implementation of this routine could
916 + // use the following nested loop:
917 + //
918 + // for (int i = 0; i < number_of_subclasses_of_ref(); i++) {
1043 919 // for (int j = 0; j < _num_q; j++) {
1044 920 // int index = i * _max_num_q + j;
1045 - for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
921 +
922 + for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
1046 923 if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
1047 924 gclog_or_tty->print_cr(
1048 925 "\nScrubbing %s discovered list of Null referents",
1049 926 list_name(i));
1050 927 }
1051 928 clean_up_discovered_reflist(_discoveredSoftRefs[i]);
1052 929 }
1053 930 }
1054 931
1055 932 void ReferenceProcessor::clean_up_discovered_reflist(DiscoveredList& refs_list) {
1056 933 assert(!discovery_is_atomic(), "Else why call this method?");
1057 934 DiscoveredListIterator iter(refs_list, NULL, NULL);
1058 935 while (iter.has_next()) {
1059 936 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
1060 937 oop next = java_lang_ref_Reference::next(iter.obj());
1061 938 assert(next->is_oop_or_null(), "bad next field");
1062 939 // If referent has been cleared or Reference is not active,
1063 940 // drop it.
1064 941 if (iter.referent() == NULL || next != NULL) {
1065 942 debug_only(
1066 943 if (PrintGCDetails && TraceReferenceGC) {
1067 944 gclog_or_tty->print_cr("clean_up_discovered_list: Dropping Reference: "
1068 945 INTPTR_FORMAT " with next field: " INTPTR_FORMAT
1069 946 " and referent: " INTPTR_FORMAT,
1070 947 iter.obj(), next, iter.referent());
1071 948 }
1072 949 )
1073 950 // Remove Reference object from list
1074 951 iter.remove();
1075 952 iter.move_to_next();
1076 953 } else {
1077 954 iter.next();
1078 955 }
1079 956 }
1080 957 NOT_PRODUCT(
1081 958 if (PrintGCDetails && TraceReferenceGC) {
1082 959 gclog_or_tty->print(
1083 960 " Removed %d Refs with NULL referents out of %d discovered Refs",
1084 961 iter.removed(), iter.processed());
1085 962 }
1086 963 )
1087 964 }
1088 965
1089 966 inline DiscoveredList* ReferenceProcessor::get_discovered_list(ReferenceType rt) {
1090 967 int id = 0;
1091 968 // Determine the queue index to use for this object.
1092 969 if (_discovery_is_mt) {
1093 970 // During a multi-threaded discovery phase,
1094 971 // each thread saves to its "own" list.
1095 972 Thread* thr = Thread::current();
1096 973 id = thr->as_Worker_thread()->id();
1097 974 } else {
1098 975 // single-threaded discovery, we save in round-robin
1099 976 // fashion to each of the lists.
1100 977 if (_processing_is_mt) {
1101 978 id = next_id();
1102 979 }
1103 980 }
1104 981 assert(0 <= id && id < _max_num_q, "Id is out-of-bounds (call Freud?)");
1105 982
1106 983 // Get the discovered queue to which we will add
1107 984 DiscoveredList* list = NULL;
1108 985 switch (rt) {
1109 986 case REF_OTHER:
1110 987 // Unknown reference type, no special treatment
1111 988 break;
1112 989 case REF_SOFT:
1113 990 list = &_discoveredSoftRefs[id];
1114 991 break;
1115 992 case REF_WEAK:
1116 993 list = &_discoveredWeakRefs[id];
1117 994 break;
1118 995 case REF_FINAL:
1119 996 list = &_discoveredFinalRefs[id];
1120 997 break;
1121 998 case REF_PHANTOM:
1122 999 list = &_discoveredPhantomRefs[id];
1123 1000 break;
1124 1001 case REF_NONE:
1125 1002 // we should not reach here if we are an instanceRefKlass
1126 1003 default:
1127 1004 ShouldNotReachHere();
1128 1005 }
1129 1006 if (TraceReferenceGC && PrintGCDetails) {
1130 1007 gclog_or_tty->print_cr("Thread %d gets list " INTPTR_FORMAT, id, list);
1131 1008 }
1132 1009 return list;
1133 1010 }
1134 1011
1135 1012 inline void
1136 1013 ReferenceProcessor::add_to_discovered_list_mt(DiscoveredList& refs_list,
1137 1014 oop obj,
1138 1015 HeapWord* discovered_addr) {
1139 1016 assert(_discovery_is_mt, "!_discovery_is_mt should have been handled by caller");
1140 1017 // First we must make sure this object is only enqueued once. CAS in a non null
1141 1018 // discovered_addr.
1142 1019 oop current_head = refs_list.head();
1143 1020 // The last ref must have its discovered field pointing to itself.
1144 1021 oop next_discovered = (current_head != NULL) ? current_head : obj;
1145 1022
1146 1023 // Note: In the case of G1, this specific pre-barrier is strictly
1147 1024 // not necessary because the only case we are interested in
1148 1025 // here is when *discovered_addr is NULL (see the CAS further below),
1149 1026 // so this will expand to nothing. As a result, we have manually
1150 1027 // elided this out for G1, but left in the test for some future
1151 1028 // collector that might have need for a pre-barrier here, e.g.:-
1152 1029 // _bs->write_ref_field_pre((oop* or narrowOop*)discovered_addr, next_discovered);
1153 1030 assert(!_discovered_list_needs_barrier || UseG1GC,
1154 1031 "Need to check non-G1 collector: "
1155 1032 "may need a pre-write-barrier for CAS from NULL below");
1156 1033 oop retest = oopDesc::atomic_compare_exchange_oop(next_discovered, discovered_addr,
1157 1034 NULL);
1158 1035 if (retest == NULL) {
1159 1036 // This thread just won the right to enqueue the object.
1160 1037 // We have separate lists for enqueueing, so no synchronization
1161 1038 // is necessary.
1162 1039 refs_list.set_head(obj);
1163 1040 refs_list.inc_length(1);
1164 1041 if (_discovered_list_needs_barrier) {
1165 1042 _bs->write_ref_field((void*)discovered_addr, next_discovered);
1166 1043 }
1167 1044
1168 1045 if (TraceReferenceGC) {
1169 1046 gclog_or_tty->print_cr("Discovered reference (mt) (" INTPTR_FORMAT ": %s)",
1170 1047 obj, obj->blueprint()->internal_name());
1171 1048 }
1172 1049 } else {
1173 1050 // If retest was non NULL, another thread beat us to it:
1174 1051 // The reference has already been discovered...
1175 1052 if (TraceReferenceGC) {
1176 1053 gclog_or_tty->print_cr("Already discovered reference (" INTPTR_FORMAT ": %s)",
1177 1054 obj, obj->blueprint()->internal_name());
1178 1055 }
1179 1056 }
1180 1057 }
1181 1058
1182 1059 #ifndef PRODUCT
1183 1060 // Non-atomic (i.e. concurrent) discovery might allow us
1184 1061 // to observe j.l.References with NULL referents, being those
1185 1062 // cleared concurrently by mutators during (or after) discovery.
1186 1063 void ReferenceProcessor::verify_referent(oop obj) {
1187 1064 bool da = discovery_is_atomic();
1188 1065 oop referent = java_lang_ref_Reference::referent(obj);
1189 1066 assert(da ? referent->is_oop() : referent->is_oop_or_null(),
1190 1067 err_msg("Bad referent " INTPTR_FORMAT " found in Reference "
1191 1068 INTPTR_FORMAT " during %satomic discovery ",
1192 1069 (intptr_t)referent, (intptr_t)obj, da ? "" : "non-"));
1193 1070 }
1194 1071 #endif
1195 1072
1196 1073 // We mention two of several possible choices here:
1197 1074 // #0: if the reference object is not in the "originating generation"
1198 1075 // (or part of the heap being collected, indicated by our "span"
1199 1076 // we don't treat it specially (i.e. we scan it as we would
1200 1077 // a normal oop, treating its references as strong references).
1201 1078 // This means that references can't be discovered unless their
1202 1079 // referent is also in the same span. This is the simplest,
1203 1080 // most "local" and most conservative approach, albeit one
1204 1081 // that may cause weak references to be enqueued least promptly.
1205 1082 // We call this choice the "ReferenceBasedDiscovery" policy.
1206 1083 // #1: the reference object may be in any generation (span), but if
1207 1084 // the referent is in the generation (span) being currently collected
1208 1085 // then we can discover the reference object, provided
1209 1086 // the object has not already been discovered by
1210 1087 // a different concurrently running collector (as may be the
1211 1088 // case, for instance, if the reference object is in CMS and
1212 1089 // the referent in DefNewGeneration), and provided the processing
1213 1090 // of this reference object by the current collector will
1214 1091 // appear atomic to every other collector in the system.
1215 1092 // (Thus, for instance, a concurrent collector may not
1216 1093 // discover references in other generations even if the
1217 1094 // referent is in its own generation). This policy may,
1218 1095 // in certain cases, enqueue references somewhat sooner than
1219 1096 // might Policy #0 above, but at marginally increased cost
1220 1097 // and complexity in processing these references.
1221 1098 // We call this choice the "RefeferentBasedDiscovery" policy.
1222 1099 bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
1223 1100 // Make sure we are discovering refs (rather than processing discovered refs).
1224 1101 if (!_discovering_refs || !RegisterReferences) {
1225 1102 return false;
1226 1103 }
1227 1104 // We only discover active references.
1228 1105 oop next = java_lang_ref_Reference::next(obj);
1229 1106 if (next != NULL) { // Ref is no longer active
1230 1107 return false;
1231 1108 }
1232 1109
1233 1110 HeapWord* obj_addr = (HeapWord*)obj;
1234 1111 if (RefDiscoveryPolicy == ReferenceBasedDiscovery &&
1235 1112 !_span.contains(obj_addr)) {
1236 1113 // Reference is not in the originating generation;
1237 1114 // don't treat it specially (i.e. we want to scan it as a normal
1238 1115 // object with strong references).
1239 1116 return false;
1240 1117 }
1241 1118
1242 1119 // We only discover references whose referents are not (yet)
1243 1120 // known to be strongly reachable.
1244 1121 if (is_alive_non_header() != NULL) {
1245 1122 verify_referent(obj);
1246 1123 if (is_alive_non_header()->do_object_b(java_lang_ref_Reference::referent(obj))) {
1247 1124 return false; // referent is reachable
1248 1125 }
1249 1126 }
1250 1127 if (rt == REF_SOFT) {
1251 1128 // For soft refs we can decide now if these are not
1252 1129 // current candidates for clearing, in which case we
↓ open down ↓ |
197 lines elided |
↑ open up ↑ |
1253 1130 // can mark through them now, rather than delaying that
1254 1131 // to the reference-processing phase. Since all current
1255 1132 // time-stamp policies advance the soft-ref clock only
1256 1133 // at a major collection cycle, this is always currently
1257 1134 // accurate.
1258 1135 if (!_current_soft_ref_policy->should_clear_reference(obj)) {
1259 1136 return false;
1260 1137 }
1261 1138 }
1262 1139
1140 + ResourceMark rm; // Needed for tracing.
1141 +
1263 1142 HeapWord* const discovered_addr = java_lang_ref_Reference::discovered_addr(obj);
1264 1143 const oop discovered = java_lang_ref_Reference::discovered(obj);
1265 1144 assert(discovered->is_oop_or_null(), "bad discovered field");
1266 1145 if (discovered != NULL) {
1267 1146 // The reference has already been discovered...
1268 1147 if (TraceReferenceGC) {
1269 1148 gclog_or_tty->print_cr("Already discovered reference (" INTPTR_FORMAT ": %s)",
1270 1149 obj, obj->blueprint()->internal_name());
1271 1150 }
1272 1151 if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
1273 1152 // assumes that an object is not processed twice;
1274 1153 // if it's been already discovered it must be on another
1275 1154 // generation's discovered list; so we won't discover it.
1276 1155 return false;
1277 1156 } else {
1278 1157 assert(RefDiscoveryPolicy == ReferenceBasedDiscovery,
1279 1158 "Unrecognized policy");
1280 1159 // Check assumption that an object is not potentially
1281 1160 // discovered twice except by concurrent collectors that potentially
1282 1161 // trace the same Reference object twice.
1283 1162 assert(UseConcMarkSweepGC || UseG1GC,
1284 1163 "Only possible with a concurrent marking collector");
1285 1164 return true;
1286 1165 }
1287 1166 }
1288 1167
1289 1168 if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
1290 1169 verify_referent(obj);
1291 1170 // Discover if and only if EITHER:
1292 1171 // .. reference is in our span, OR
1293 1172 // .. we are an atomic collector and referent is in our span
1294 1173 if (_span.contains(obj_addr) ||
1295 1174 (discovery_is_atomic() &&
1296 1175 _span.contains(java_lang_ref_Reference::referent(obj)))) {
1297 1176 // should_enqueue = true;
1298 1177 } else {
1299 1178 return false;
1300 1179 }
1301 1180 } else {
1302 1181 assert(RefDiscoveryPolicy == ReferenceBasedDiscovery &&
1303 1182 _span.contains(obj_addr), "code inconsistency");
1304 1183 }
1305 1184
1306 1185 // Get the right type of discovered queue head.
1307 1186 DiscoveredList* list = get_discovered_list(rt);
1308 1187 if (list == NULL) {
1309 1188 return false; // nothing special needs to be done
1310 1189 }
1311 1190
1312 1191 if (_discovery_is_mt) {
1313 1192 add_to_discovered_list_mt(*list, obj, discovered_addr);
1314 1193 } else {
1315 1194 // If "_discovered_list_needs_barrier", we do write barriers when
1316 1195 // updating the discovered reference list. Otherwise, we do a raw store
1317 1196 // here: the field will be visited later when processing the discovered
1318 1197 // references.
1319 1198 oop current_head = list->head();
1320 1199 // The last ref must have its discovered field pointing to itself.
1321 1200 oop next_discovered = (current_head != NULL) ? current_head : obj;
1322 1201
1323 1202 // As in the case further above, since we are over-writing a NULL
1324 1203 // pre-value, we can safely elide the pre-barrier here for the case of G1.
1325 1204 // e.g.:- _bs->write_ref_field_pre((oop* or narrowOop*)discovered_addr, next_discovered);
1326 1205 assert(discovered == NULL, "control point invariant");
1327 1206 assert(!_discovered_list_needs_barrier || UseG1GC,
1328 1207 "For non-G1 collector, may need a pre-write-barrier for CAS from NULL below");
1329 1208 oop_store_raw(discovered_addr, next_discovered);
1330 1209 if (_discovered_list_needs_barrier) {
1331 1210 _bs->write_ref_field((void*)discovered_addr, next_discovered);
1332 1211 }
1333 1212 list->set_head(obj);
1334 1213 list->inc_length(1);
1335 1214
1336 1215 if (TraceReferenceGC) {
1337 1216 gclog_or_tty->print_cr("Discovered reference (" INTPTR_FORMAT ": %s)",
1338 1217 obj, obj->blueprint()->internal_name());
1339 1218 }
1340 1219 }
1341 1220 assert(obj->is_oop(), "Discovered a bad reference");
1342 1221 verify_referent(obj);
1343 1222 return true;
1344 1223 }
1345 1224
1346 1225 // Preclean the discovered references by removing those
1347 1226 // whose referents are alive, and by marking from those that
1348 1227 // are not active. These lists can be handled here
1349 1228 // in any order and, indeed, concurrently.
1350 1229 void ReferenceProcessor::preclean_discovered_references(
1351 1230 BoolObjectClosure* is_alive,
1352 1231 OopClosure* keep_alive,
1353 1232 VoidClosure* complete_gc,
1354 1233 YieldClosure* yield,
1355 1234 bool should_unload_classes) {
1356 1235
1357 1236 NOT_PRODUCT(verify_ok_to_handle_reflists());
1358 1237
1359 1238 #ifdef ASSERT
1360 1239 bool must_remember_klasses = ClassUnloading && !UseConcMarkSweepGC ||
1361 1240 CMSClassUnloadingEnabled && UseConcMarkSweepGC ||
1362 1241 ExplicitGCInvokesConcurrentAndUnloadsClasses &&
1363 1242 UseConcMarkSweepGC && should_unload_classes;
1364 1243 RememberKlassesChecker mx(must_remember_klasses);
1365 1244 #endif
1366 1245 // Soft references
1367 1246 {
1368 1247 TraceTime tt("Preclean SoftReferences", PrintGCDetails && PrintReferenceGC,
1369 1248 false, gclog_or_tty);
1370 1249 for (int i = 0; i < _max_num_q; i++) {
1371 1250 if (yield->should_return()) {
1372 1251 return;
1373 1252 }
1374 1253 preclean_discovered_reflist(_discoveredSoftRefs[i], is_alive,
1375 1254 keep_alive, complete_gc, yield);
1376 1255 }
1377 1256 }
1378 1257
1379 1258 // Weak references
1380 1259 {
1381 1260 TraceTime tt("Preclean WeakReferences", PrintGCDetails && PrintReferenceGC,
1382 1261 false, gclog_or_tty);
1383 1262 for (int i = 0; i < _max_num_q; i++) {
1384 1263 if (yield->should_return()) {
1385 1264 return;
1386 1265 }
1387 1266 preclean_discovered_reflist(_discoveredWeakRefs[i], is_alive,
1388 1267 keep_alive, complete_gc, yield);
1389 1268 }
1390 1269 }
1391 1270
1392 1271 // Final references
1393 1272 {
1394 1273 TraceTime tt("Preclean FinalReferences", PrintGCDetails && PrintReferenceGC,
1395 1274 false, gclog_or_tty);
1396 1275 for (int i = 0; i < _max_num_q; i++) {
1397 1276 if (yield->should_return()) {
1398 1277 return;
1399 1278 }
1400 1279 preclean_discovered_reflist(_discoveredFinalRefs[i], is_alive,
1401 1280 keep_alive, complete_gc, yield);
1402 1281 }
1403 1282 }
1404 1283
1405 1284 // Phantom references
1406 1285 {
1407 1286 TraceTime tt("Preclean PhantomReferences", PrintGCDetails && PrintReferenceGC,
1408 1287 false, gclog_or_tty);
1409 1288 for (int i = 0; i < _max_num_q; i++) {
1410 1289 if (yield->should_return()) {
1411 1290 return;
1412 1291 }
1413 1292 preclean_discovered_reflist(_discoveredPhantomRefs[i], is_alive,
1414 1293 keep_alive, complete_gc, yield);
1415 1294 }
1416 1295 }
1417 1296 }
1418 1297
1419 1298 // Walk the given discovered ref list, and remove all reference objects
1420 1299 // whose referents are still alive, whose referents are NULL or which
1421 1300 // are not active (have a non-NULL next field). NOTE: When we are
1422 1301 // thus precleaning the ref lists (which happens single-threaded today),
1423 1302 // we do not disable refs discovery to honour the correct semantics of
1424 1303 // java.lang.Reference. As a result, we need to be careful below
1425 1304 // that ref removal steps interleave safely with ref discovery steps
1426 1305 // (in this thread).
1427 1306 void
1428 1307 ReferenceProcessor::preclean_discovered_reflist(DiscoveredList& refs_list,
1429 1308 BoolObjectClosure* is_alive,
1430 1309 OopClosure* keep_alive,
1431 1310 VoidClosure* complete_gc,
1432 1311 YieldClosure* yield) {
1433 1312 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
1434 1313 while (iter.has_next()) {
1435 1314 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
1436 1315 oop obj = iter.obj();
1437 1316 oop next = java_lang_ref_Reference::next(obj);
1438 1317 if (iter.referent() == NULL || iter.is_referent_alive() ||
1439 1318 next != NULL) {
1440 1319 // The referent has been cleared, or is alive, or the Reference is not
1441 1320 // active; we need to trace and mark its cohort.
1442 1321 if (TraceReferenceGC) {
1443 1322 gclog_or_tty->print_cr("Precleaning Reference (" INTPTR_FORMAT ": %s)",
1444 1323 iter.obj(), iter.obj()->blueprint()->internal_name());
1445 1324 }
1446 1325 // Remove Reference object from list
1447 1326 iter.remove();
1448 1327 // Keep alive its cohort.
1449 1328 iter.make_referent_alive();
1450 1329 if (UseCompressedOops) {
1451 1330 narrowOop* next_addr = (narrowOop*)java_lang_ref_Reference::next_addr(obj);
1452 1331 keep_alive->do_oop(next_addr);
1453 1332 } else {
1454 1333 oop* next_addr = (oop*)java_lang_ref_Reference::next_addr(obj);
1455 1334 keep_alive->do_oop(next_addr);
1456 1335 }
1457 1336 iter.move_to_next();
1458 1337 } else {
1459 1338 iter.next();
1460 1339 }
1461 1340 }
1462 1341 // Close the reachable set
1463 1342 complete_gc->do_void();
1464 1343
↓ open down ↓ |
192 lines elided |
↑ open up ↑ |
1465 1344 NOT_PRODUCT(
1466 1345 if (PrintGCDetails && PrintReferenceGC && (iter.processed() > 0)) {
1467 1346 gclog_or_tty->print_cr(" Dropped %d Refs out of %d "
1468 1347 "Refs in discovered list " INTPTR_FORMAT,
1469 1348 iter.removed(), iter.processed(), (address)refs_list.head());
1470 1349 }
1471 1350 )
1472 1351 }
1473 1352
1474 1353 const char* ReferenceProcessor::list_name(int i) {
1475 - assert(i >= 0 && i <= _max_num_q * subclasses_of_ref, "Out of bounds index");
1354 + assert(i >= 0 && i <= _max_num_q * number_of_subclasses_of_ref(),
1355 + "Out of bounds index");
1356 +
1476 1357 int j = i / _max_num_q;
1477 1358 switch (j) {
1478 1359 case 0: return "SoftRef";
1479 1360 case 1: return "WeakRef";
1480 1361 case 2: return "FinalRef";
1481 1362 case 3: return "PhantomRef";
1482 1363 }
1483 1364 ShouldNotReachHere();
1484 1365 return NULL;
1485 1366 }
1486 1367
1487 1368 #ifndef PRODUCT
1488 1369 void ReferenceProcessor::verify_ok_to_handle_reflists() {
1489 1370 // empty for now
1490 1371 }
1491 1372 #endif
1492 1373
1493 1374 #ifndef PRODUCT
1494 1375 void ReferenceProcessor::clear_discovered_references() {
1495 1376 guarantee(!_discovering_refs, "Discovering refs?");
1496 - for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
1377 + for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
1497 1378 clear_discovered_references(_discoveredSoftRefs[i]);
1498 1379 }
1499 1380 }
1500 1381
1501 1382 #endif // PRODUCT
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX