Print this page
rev 2661 : [mq]: g1-reference-processing
Split |
Close |
Expand all |
Collapse all |
--- old/src/share/vm/memory/referenceProcessor.cpp
+++ new/src/share/vm/memory/referenceProcessor.cpp
1 1 /*
2 2 * Copyright (c) 2001, 2011, Oracle and/or its affiliates. All rights reserved.
3 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 4 *
5 5 * This code is free software; you can redistribute it and/or modify it
6 6 * under the terms of the GNU General Public License version 2 only, as
7 7 * published by the Free Software Foundation.
8 8 *
9 9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 12 * version 2 for more details (a copy is included in the LICENSE file that
13 13 * accompanied this code).
14 14 *
15 15 * You should have received a copy of the GNU General Public License version
16 16 * 2 along with this work; if not, write to the Free Software Foundation,
17 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 18 *
19 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 20 * or visit www.oracle.com if you need additional information or have any
21 21 * questions.
22 22 *
23 23 */
24 24
25 25 #include "precompiled.hpp"
26 26 #include "classfile/javaClasses.hpp"
27 27 #include "classfile/systemDictionary.hpp"
↓ open down ↓ |
27 lines elided |
↑ open up ↑ |
28 28 #include "gc_interface/collectedHeap.hpp"
29 29 #include "gc_interface/collectedHeap.inline.hpp"
30 30 #include "memory/referencePolicy.hpp"
31 31 #include "memory/referenceProcessor.hpp"
32 32 #include "oops/oop.inline.hpp"
33 33 #include "runtime/java.hpp"
34 34 #include "runtime/jniHandles.hpp"
35 35
36 36 ReferencePolicy* ReferenceProcessor::_always_clear_soft_ref_policy = NULL;
37 37 ReferencePolicy* ReferenceProcessor::_default_soft_ref_policy = NULL;
38 -const int subclasses_of_ref = REF_PHANTOM - REF_OTHER;
39 38 bool ReferenceProcessor::_pending_list_uses_discovered_field = false;
40 39
41 -// List of discovered references.
42 -class DiscoveredList {
43 -public:
44 - DiscoveredList() : _len(0), _compressed_head(0), _oop_head(NULL) { }
45 - oop head() const {
46 - return UseCompressedOops ? oopDesc::decode_heap_oop(_compressed_head) :
47 - _oop_head;
48 - }
49 - HeapWord* adr_head() {
50 - return UseCompressedOops ? (HeapWord*)&_compressed_head :
51 - (HeapWord*)&_oop_head;
52 - }
53 - void set_head(oop o) {
54 - if (UseCompressedOops) {
55 - // Must compress the head ptr.
56 - _compressed_head = oopDesc::encode_heap_oop(o);
57 - } else {
58 - _oop_head = o;
59 - }
60 - }
61 - bool empty() const { return head() == NULL; }
62 - size_t length() { return _len; }
63 - void set_length(size_t len) { _len = len; }
64 - void inc_length(size_t inc) { _len += inc; assert(_len > 0, "Error"); }
65 - void dec_length(size_t dec) { _len -= dec; }
66 -private:
67 - // Set value depending on UseCompressedOops. This could be a template class
68 - // but then we have to fix all the instantiations and declarations that use this class.
69 - oop _oop_head;
70 - narrowOop _compressed_head;
71 - size_t _len;
72 -};
73 -
74 40 void referenceProcessor_init() {
75 41 ReferenceProcessor::init_statics();
76 42 }
77 43
78 44 void ReferenceProcessor::init_statics() {
79 45 // Initialize the master soft ref clock.
80 46 java_lang_ref_SoftReference::set_clock(os::javaTimeMillis());
81 47
82 48 _always_clear_soft_ref_policy = new AlwaysClearPolicy();
83 49 _default_soft_ref_policy = new COMPILER2_PRESENT(LRUMaxHeapPolicy())
84 50 NOT_COMPILER2(LRUCurrentHeapPolicy());
85 51 if (_always_clear_soft_ref_policy == NULL || _default_soft_ref_policy == NULL) {
86 52 vm_exit_during_initialization("Could not allocate reference policy object");
87 53 }
88 54 guarantee(RefDiscoveryPolicy == ReferenceBasedDiscovery ||
89 55 RefDiscoveryPolicy == ReferentBasedDiscovery,
90 56 "Unrecongnized RefDiscoveryPolicy");
91 57 _pending_list_uses_discovered_field = JDK_Version::current().pending_list_uses_discovered_field();
92 58 }
93 59
94 60 ReferenceProcessor::ReferenceProcessor(MemRegion span,
95 61 bool mt_processing,
96 62 int mt_processing_degree,
97 63 bool mt_discovery,
98 64 int mt_discovery_degree,
99 65 bool atomic_discovery,
100 66 BoolObjectClosure* is_alive_non_header,
101 67 bool discovered_list_needs_barrier) :
102 68 _discovering_refs(false),
103 69 _enqueuing_is_done(false),
104 70 _is_alive_non_header(is_alive_non_header),
↓ open down ↓ |
21 lines elided |
↑ open up ↑ |
105 71 _discovered_list_needs_barrier(discovered_list_needs_barrier),
106 72 _bs(NULL),
107 73 _processing_is_mt(mt_processing),
108 74 _next_id(0)
109 75 {
110 76 _span = span;
111 77 _discovery_is_atomic = atomic_discovery;
112 78 _discovery_is_mt = mt_discovery;
113 79 _num_q = MAX2(1, mt_processing_degree);
114 80 _max_num_q = MAX2(_num_q, mt_discovery_degree);
115 - _discoveredSoftRefs = NEW_C_HEAP_ARRAY(DiscoveredList, _max_num_q * subclasses_of_ref);
81 + _discoveredSoftRefs = NEW_C_HEAP_ARRAY(DiscoveredList, _max_num_q * subclasses_of_ref());
116 82 if (_discoveredSoftRefs == NULL) {
117 83 vm_exit_during_initialization("Could not allocated RefProc Array");
118 84 }
119 85 _discoveredWeakRefs = &_discoveredSoftRefs[_max_num_q];
120 86 _discoveredFinalRefs = &_discoveredWeakRefs[_max_num_q];
121 87 _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_q];
122 88 // Initialized all entries to NULL
123 - for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
89 + for (int i = 0; i < _max_num_q * subclasses_of_ref(); i++) {
124 90 _discoveredSoftRefs[i].set_head(NULL);
125 91 _discoveredSoftRefs[i].set_length(0);
126 92 }
127 93 // If we do barriers, cache a copy of the barrier set.
128 94 if (discovered_list_needs_barrier) {
129 95 _bs = Universe::heap()->barrier_set();
130 96 }
131 97 setup_policy(false /* default soft ref policy */);
132 98 }
133 99
134 100 #ifndef PRODUCT
135 101 void ReferenceProcessor::verify_no_references_recorded() {
136 102 guarantee(!_discovering_refs, "Discovering refs?");
137 - for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
138 - guarantee(_discoveredSoftRefs[i].empty(),
103 + for (int i = 0; i < _max_num_q * subclasses_of_ref(); i++) {
104 + guarantee(_discoveredSoftRefs[i].is_empty(),
139 105 "Found non-empty discovered list");
140 106 }
141 107 }
142 108 #endif
143 109
144 110 void ReferenceProcessor::weak_oops_do(OopClosure* f) {
145 111 // Should this instead be
146 - // for (int i = 0; i < subclasses_of_ref; i++_ {
112 + // for (int i = 0; i < subclasses_of_ref(); i++_ {
147 113 // for (int j = 0; j < _num_q; j++) {
148 114 // int index = i * _max_num_q + j;
149 - for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
115 + for (int i = 0; i < _max_num_q * subclasses_of_ref(); i++) {
150 116 if (UseCompressedOops) {
151 117 f->do_oop((narrowOop*)_discoveredSoftRefs[i].adr_head());
152 118 } else {
153 119 f->do_oop((oop*)_discoveredSoftRefs[i].adr_head());
154 120 }
155 121 }
156 122 }
157 123
158 124 void ReferenceProcessor::update_soft_ref_master_clock() {
159 125 // Update (advance) the soft ref master clock field. This must be done
160 126 // after processing the soft ref list.
161 127 jlong now = os::javaTimeMillis();
162 128 jlong clock = java_lang_ref_SoftReference::clock();
163 129 NOT_PRODUCT(
164 130 if (now < clock) {
165 131 warning("time warp: %d to %d", clock, now);
166 132 }
167 133 )
168 134 // In product mode, protect ourselves from system time being adjusted
169 135 // externally and going backward; see note in the implementation of
170 136 // GenCollectedHeap::time_since_last_gc() for the right way to fix
171 137 // this uniformly throughout the VM; see bug-id 4741166. XXX
172 138 if (now > clock) {
173 139 java_lang_ref_SoftReference::set_clock(now);
174 140 }
175 141 // Else leave clock stalled at its old value until time progresses
176 142 // past clock value.
177 143 }
178 144
179 145 void ReferenceProcessor::process_discovered_references(
180 146 BoolObjectClosure* is_alive,
181 147 OopClosure* keep_alive,
182 148 VoidClosure* complete_gc,
183 149 AbstractRefProcTaskExecutor* task_executor) {
184 150 NOT_PRODUCT(verify_ok_to_handle_reflists());
185 151
186 152 assert(!enqueuing_is_done(), "If here enqueuing should not be complete");
187 153 // Stop treating discovered references specially.
188 154 disable_discovery();
189 155
190 156 bool trace_time = PrintGCDetails && PrintReferenceGC;
191 157 // Soft references
192 158 {
193 159 TraceTime tt("SoftReference", trace_time, false, gclog_or_tty);
194 160 process_discovered_reflist(_discoveredSoftRefs, _current_soft_ref_policy, true,
195 161 is_alive, keep_alive, complete_gc, task_executor);
196 162 }
197 163
198 164 update_soft_ref_master_clock();
199 165
200 166 // Weak references
201 167 {
202 168 TraceTime tt("WeakReference", trace_time, false, gclog_or_tty);
203 169 process_discovered_reflist(_discoveredWeakRefs, NULL, true,
204 170 is_alive, keep_alive, complete_gc, task_executor);
205 171 }
206 172
207 173 // Final references
208 174 {
209 175 TraceTime tt("FinalReference", trace_time, false, gclog_or_tty);
210 176 process_discovered_reflist(_discoveredFinalRefs, NULL, false,
211 177 is_alive, keep_alive, complete_gc, task_executor);
212 178 }
213 179
214 180 // Phantom references
215 181 {
216 182 TraceTime tt("PhantomReference", trace_time, false, gclog_or_tty);
217 183 process_discovered_reflist(_discoveredPhantomRefs, NULL, false,
218 184 is_alive, keep_alive, complete_gc, task_executor);
219 185 }
220 186
221 187 // Weak global JNI references. It would make more sense (semantically) to
222 188 // traverse these simultaneously with the regular weak references above, but
223 189 // that is not how the JDK1.2 specification is. See #4126360. Native code can
224 190 // thus use JNI weak references to circumvent the phantom references and
225 191 // resurrect a "post-mortem" object.
226 192 {
227 193 TraceTime tt("JNI Weak Reference", trace_time, false, gclog_or_tty);
228 194 if (task_executor != NULL) {
229 195 task_executor->set_single_threaded_mode();
230 196 }
231 197 process_phaseJNI(is_alive, keep_alive, complete_gc);
232 198 }
233 199 }
234 200
235 201 #ifndef PRODUCT
236 202 // Calculate the number of jni handles.
237 203 uint ReferenceProcessor::count_jni_refs() {
238 204 class AlwaysAliveClosure: public BoolObjectClosure {
239 205 public:
240 206 virtual bool do_object_b(oop obj) { return true; }
241 207 virtual void do_object(oop obj) { assert(false, "Don't call"); }
242 208 };
243 209
244 210 class CountHandleClosure: public OopClosure {
245 211 private:
246 212 int _count;
247 213 public:
248 214 CountHandleClosure(): _count(0) {}
249 215 void do_oop(oop* unused) { _count++; }
250 216 void do_oop(narrowOop* unused) { ShouldNotReachHere(); }
251 217 int count() { return _count; }
252 218 };
253 219 CountHandleClosure global_handle_count;
254 220 AlwaysAliveClosure always_alive;
255 221 JNIHandles::weak_oops_do(&always_alive, &global_handle_count);
256 222 return global_handle_count.count();
257 223 }
258 224 #endif
259 225
260 226 void ReferenceProcessor::process_phaseJNI(BoolObjectClosure* is_alive,
261 227 OopClosure* keep_alive,
262 228 VoidClosure* complete_gc) {
263 229 #ifndef PRODUCT
264 230 if (PrintGCDetails && PrintReferenceGC) {
265 231 unsigned int count = count_jni_refs();
266 232 gclog_or_tty->print(", %u refs", count);
267 233 }
268 234 #endif
269 235 JNIHandles::weak_oops_do(is_alive, keep_alive);
270 236 complete_gc->do_void();
271 237 }
272 238
273 239
274 240 template <class T>
275 241 bool enqueue_discovered_ref_helper(ReferenceProcessor* ref,
276 242 AbstractRefProcTaskExecutor* task_executor) {
277 243
278 244 // Remember old value of pending references list
279 245 T* pending_list_addr = (T*)java_lang_ref_Reference::pending_list_addr();
280 246 T old_pending_list_value = *pending_list_addr;
281 247
282 248 // Enqueue references that are not made active again, and
283 249 // clear the decks for the next collection (cycle).
284 250 ref->enqueue_discovered_reflists((HeapWord*)pending_list_addr, task_executor);
285 251 // Do the oop-check on pending_list_addr missed in
286 252 // enqueue_discovered_reflist. We should probably
287 253 // do a raw oop_check so that future such idempotent
288 254 // oop_stores relying on the oop-check side-effect
289 255 // may be elided automatically and safely without
290 256 // affecting correctness.
291 257 oop_store(pending_list_addr, oopDesc::load_decode_heap_oop(pending_list_addr));
292 258
293 259 // Stop treating discovered references specially.
294 260 ref->disable_discovery();
295 261
296 262 // Return true if new pending references were added
297 263 return old_pending_list_value != *pending_list_addr;
298 264 }
299 265
300 266 bool ReferenceProcessor::enqueue_discovered_references(AbstractRefProcTaskExecutor* task_executor) {
301 267 NOT_PRODUCT(verify_ok_to_handle_reflists());
302 268 if (UseCompressedOops) {
303 269 return enqueue_discovered_ref_helper<narrowOop>(this, task_executor);
304 270 } else {
305 271 return enqueue_discovered_ref_helper<oop>(this, task_executor);
306 272 }
307 273 }
308 274
309 275 void ReferenceProcessor::enqueue_discovered_reflist(DiscoveredList& refs_list,
310 276 HeapWord* pending_list_addr) {
311 277 // Given a list of refs linked through the "discovered" field
312 278 // (java.lang.ref.Reference.discovered), self-loop their "next" field
313 279 // thus distinguishing them from active References, then
314 280 // prepend them to the pending list.
315 281 // BKWRD COMPATIBILITY NOTE: For older JDKs (prior to the fix for 4956777),
316 282 // the "next" field is used to chain the pending list, not the discovered
317 283 // field.
318 284
319 285 if (TraceReferenceGC && PrintGCDetails) {
320 286 gclog_or_tty->print_cr("ReferenceProcessor::enqueue_discovered_reflist list "
321 287 INTPTR_FORMAT, (address)refs_list.head());
322 288 }
323 289
324 290 oop obj = NULL;
325 291 oop next_d = refs_list.head();
326 292 if (pending_list_uses_discovered_field()) { // New behaviour
327 293 // Walk down the list, self-looping the next field
328 294 // so that the References are not considered active.
329 295 while (obj != next_d) {
330 296 obj = next_d;
331 297 assert(obj->is_instanceRef(), "should be reference object");
332 298 next_d = java_lang_ref_Reference::discovered(obj);
333 299 if (TraceReferenceGC && PrintGCDetails) {
334 300 gclog_or_tty->print_cr(" obj " INTPTR_FORMAT "/next_d " INTPTR_FORMAT,
335 301 obj, next_d);
336 302 }
337 303 assert(java_lang_ref_Reference::next(obj) == NULL,
338 304 "Reference not active; should not be discovered");
339 305 // Self-loop next, so as to make Ref not active.
340 306 java_lang_ref_Reference::set_next(obj, obj);
341 307 if (next_d == obj) { // obj is last
342 308 // Swap refs_list into pendling_list_addr and
343 309 // set obj's discovered to what we read from pending_list_addr.
344 310 oop old = oopDesc::atomic_exchange_oop(refs_list.head(), pending_list_addr);
345 311 // Need oop_check on pending_list_addr above;
346 312 // see special oop-check code at the end of
347 313 // enqueue_discovered_reflists() further below.
348 314 java_lang_ref_Reference::set_discovered(obj, old); // old may be NULL
349 315 }
350 316 }
351 317 } else { // Old behaviour
352 318 // Walk down the list, copying the discovered field into
353 319 // the next field and clearing the discovered field.
354 320 while (obj != next_d) {
355 321 obj = next_d;
356 322 assert(obj->is_instanceRef(), "should be reference object");
357 323 next_d = java_lang_ref_Reference::discovered(obj);
358 324 if (TraceReferenceGC && PrintGCDetails) {
359 325 gclog_or_tty->print_cr(" obj " INTPTR_FORMAT "/next_d " INTPTR_FORMAT,
360 326 obj, next_d);
361 327 }
362 328 assert(java_lang_ref_Reference::next(obj) == NULL,
363 329 "The reference should not be enqueued");
364 330 if (next_d == obj) { // obj is last
365 331 // Swap refs_list into pendling_list_addr and
366 332 // set obj's next to what we read from pending_list_addr.
367 333 oop old = oopDesc::atomic_exchange_oop(refs_list.head(), pending_list_addr);
368 334 // Need oop_check on pending_list_addr above;
369 335 // see special oop-check code at the end of
370 336 // enqueue_discovered_reflists() further below.
371 337 if (old == NULL) {
372 338 // obj should be made to point to itself, since
373 339 // pending list was empty.
374 340 java_lang_ref_Reference::set_next(obj, obj);
375 341 } else {
376 342 java_lang_ref_Reference::set_next(obj, old);
377 343 }
378 344 } else {
379 345 java_lang_ref_Reference::set_next(obj, next_d);
380 346 }
381 347 java_lang_ref_Reference::set_discovered(obj, (oop) NULL);
382 348 }
383 349 }
384 350 }
385 351
386 352 // Parallel enqueue task
387 353 class RefProcEnqueueTask: public AbstractRefProcTaskExecutor::EnqueueTask {
388 354 public:
389 355 RefProcEnqueueTask(ReferenceProcessor& ref_processor,
390 356 DiscoveredList discovered_refs[],
391 357 HeapWord* pending_list_addr,
392 358 int n_queues)
393 359 : EnqueueTask(ref_processor, discovered_refs,
394 360 pending_list_addr, n_queues)
395 361 { }
396 362
↓ open down ↓ |
237 lines elided |
↑ open up ↑ |
397 363 virtual void work(unsigned int work_id) {
398 364 assert(work_id < (unsigned int)_ref_processor.max_num_q(), "Index out-of-bounds");
399 365 // Simplest first cut: static partitioning.
400 366 int index = work_id;
401 367 // The increment on "index" must correspond to the maximum number of queues
402 368 // (n_queues) with which that ReferenceProcessor was created. That
403 369 // is because of the "clever" way the discovered references lists were
404 370 // allocated and are indexed into.
405 371 assert(_n_queues == (int) _ref_processor.max_num_q(), "Different number not expected");
406 372 for (int j = 0;
407 - j < subclasses_of_ref;
373 + j < ReferenceProcessor::subclasses_of_ref();
408 374 j++, index += _n_queues) {
409 375 _ref_processor.enqueue_discovered_reflist(
410 376 _refs_lists[index], _pending_list_addr);
411 377 _refs_lists[index].set_head(NULL);
412 378 _refs_lists[index].set_length(0);
413 379 }
414 380 }
415 381 };
416 382
417 383 // Enqueue references that are not made active again
418 384 void ReferenceProcessor::enqueue_discovered_reflists(HeapWord* pending_list_addr,
419 385 AbstractRefProcTaskExecutor* task_executor) {
420 386 if (_processing_is_mt && task_executor != NULL) {
421 387 // Parallel code
422 388 RefProcEnqueueTask tsk(*this, _discoveredSoftRefs,
423 389 pending_list_addr, _max_num_q);
424 390 task_executor->execute(tsk);
425 391 } else {
426 392 // Serial code: call the parent class's implementation
427 - for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
393 + for (int i = 0; i < _max_num_q * subclasses_of_ref(); i++) {
428 394 enqueue_discovered_reflist(_discoveredSoftRefs[i], pending_list_addr);
429 395 _discoveredSoftRefs[i].set_head(NULL);
430 396 _discoveredSoftRefs[i].set_length(0);
431 397 }
432 398 }
433 399 }
434 400
435 -// Iterator for the list of discovered references.
436 -class DiscoveredListIterator {
437 -public:
438 - inline DiscoveredListIterator(DiscoveredList& refs_list,
439 - OopClosure* keep_alive,
440 - BoolObjectClosure* is_alive);
441 -
442 - // End Of List.
443 - inline bool has_next() const { return _ref != NULL; }
444 -
445 - // Get oop to the Reference object.
446 - inline oop obj() const { return _ref; }
447 -
448 - // Get oop to the referent object.
449 - inline oop referent() const { return _referent; }
450 -
451 - // Returns true if referent is alive.
452 - inline bool is_referent_alive() const;
453 -
454 - // Loads data for the current reference.
455 - // The "allow_null_referent" argument tells us to allow for the possibility
456 - // of a NULL referent in the discovered Reference object. This typically
457 - // happens in the case of concurrent collectors that may have done the
458 - // discovery concurrently, or interleaved, with mutator execution.
459 - inline void load_ptrs(DEBUG_ONLY(bool allow_null_referent));
460 -
461 - // Move to the next discovered reference.
462 - inline void next();
463 -
464 - // Remove the current reference from the list
465 - inline void remove();
466 -
467 - // Make the Reference object active again.
468 - inline void make_active() { java_lang_ref_Reference::set_next(_ref, NULL); }
469 -
470 - // Make the referent alive.
471 - inline void make_referent_alive() {
472 - if (UseCompressedOops) {
473 - _keep_alive->do_oop((narrowOop*)_referent_addr);
474 - } else {
475 - _keep_alive->do_oop((oop*)_referent_addr);
476 - }
477 - }
478 -
479 - // Update the discovered field.
480 - inline void update_discovered() {
481 - // First _prev_next ref actually points into DiscoveredList (gross).
482 - if (UseCompressedOops) {
483 - if (!oopDesc::is_null(*(narrowOop*)_prev_next)) {
484 - _keep_alive->do_oop((narrowOop*)_prev_next);
485 - }
486 - } else {
487 - if (!oopDesc::is_null(*(oop*)_prev_next)) {
488 - _keep_alive->do_oop((oop*)_prev_next);
489 - }
490 - }
491 - }
492 -
493 - // NULL out referent pointer.
494 - inline void clear_referent() { oop_store_raw(_referent_addr, NULL); }
495 -
496 - // Statistics
497 - NOT_PRODUCT(
498 - inline size_t processed() const { return _processed; }
499 - inline size_t removed() const { return _removed; }
500 - )
501 -
502 - inline void move_to_next();
503 -
504 -private:
505 - DiscoveredList& _refs_list;
506 - HeapWord* _prev_next;
507 - oop _prev;
508 - oop _ref;
509 - HeapWord* _discovered_addr;
510 - oop _next;
511 - HeapWord* _referent_addr;
512 - oop _referent;
513 - OopClosure* _keep_alive;
514 - BoolObjectClosure* _is_alive;
515 - DEBUG_ONLY(
516 - oop _first_seen; // cyclic linked list check
517 - )
518 - NOT_PRODUCT(
519 - size_t _processed;
520 - size_t _removed;
521 - )
522 -};
523 -
524 -inline DiscoveredListIterator::DiscoveredListIterator(DiscoveredList& refs_list,
525 - OopClosure* keep_alive,
526 - BoolObjectClosure* is_alive)
527 - : _refs_list(refs_list),
528 - _prev_next(refs_list.adr_head()),
529 - _prev(NULL),
530 - _ref(refs_list.head()),
531 -#ifdef ASSERT
532 - _first_seen(refs_list.head()),
533 -#endif
534 -#ifndef PRODUCT
535 - _processed(0),
536 - _removed(0),
537 -#endif
538 - _next(NULL),
539 - _keep_alive(keep_alive),
540 - _is_alive(is_alive)
541 -{ }
542 -
543 -inline bool DiscoveredListIterator::is_referent_alive() const {
544 - return _is_alive->do_object_b(_referent);
545 -}
546 -
547 -inline void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) {
401 +void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) {
548 402 _discovered_addr = java_lang_ref_Reference::discovered_addr(_ref);
549 403 oop discovered = java_lang_ref_Reference::discovered(_ref);
550 404 assert(_discovered_addr && discovered->is_oop_or_null(),
551 405 "discovered field is bad");
552 406 _next = discovered;
553 407 _referent_addr = java_lang_ref_Reference::referent_addr(_ref);
554 408 _referent = java_lang_ref_Reference::referent(_ref);
555 409 assert(Universe::heap()->is_in_reserved_or_null(_referent),
556 410 "Wrong oop found in java.lang.Reference object");
557 411 assert(allow_null_referent ?
558 412 _referent->is_oop_or_null()
559 413 : _referent->is_oop(),
560 414 "bad referent");
561 415 }
562 416
563 -inline void DiscoveredListIterator::next() {
564 - _prev_next = _discovered_addr;
565 - _prev = _ref;
566 - move_to_next();
567 -}
568 -
569 -inline void DiscoveredListIterator::remove() {
417 +void DiscoveredListIterator::remove() {
570 418 assert(_ref->is_oop(), "Dropping a bad reference");
571 419 oop_store_raw(_discovered_addr, NULL);
572 420
573 421 // First _prev_next ref actually points into DiscoveredList (gross).
574 422 oop new_next;
575 423 if (_next == _ref) {
576 424 // At the end of the list, we should make _prev point to itself.
577 425 // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
578 426 // and _prev will be NULL.
579 427 new_next = _prev;
580 428 } else {
581 429 new_next = _next;
582 430 }
583 431
584 432 if (UseCompressedOops) {
↓ open down ↓ |
5 lines elided |
↑ open up ↑ |
585 433 // Remove Reference object from list.
586 434 oopDesc::encode_store_heap_oop((narrowOop*)_prev_next, new_next);
587 435 } else {
588 436 // Remove Reference object from list.
589 437 oopDesc::store_heap_oop((oop*)_prev_next, new_next);
590 438 }
591 439 NOT_PRODUCT(_removed++);
592 440 _refs_list.dec_length(1);
593 441 }
594 442
595 -inline void DiscoveredListIterator::move_to_next() {
596 - if (_ref == _next) {
597 - // End of the list.
598 - _ref = NULL;
443 +// Make the Reference object active again.
444 +void DiscoveredListIterator::make_active() {
445 + // For G1 we don't want to use set_next - it
446 + // will dirty the card for the next field of
447 + // the reference object and will fail
448 + // CT verification.
449 + if (UseG1GC) {
450 + BarrierSet* bs = oopDesc::bs();
451 + HeapWord* next_addr = java_lang_ref_Reference::next_addr(_ref);
452 +
453 + if (UseCompressedOops) {
454 + bs->write_ref_field_pre((narrowOop*)next_addr, NULL);
455 + } else {
456 + bs->write_ref_field_pre((oop*)next_addr, NULL);
457 + }
458 + java_lang_ref_Reference::set_next_raw(_ref, NULL);
599 459 } else {
600 - _ref = _next;
460 + java_lang_ref_Reference::set_next(_ref, NULL);
601 461 }
602 - assert(_ref != _first_seen, "cyclic ref_list found");
603 - NOT_PRODUCT(_processed++);
462 +}
463 +
464 +void DiscoveredListIterator::clear_referent() {
465 + oop_store_raw(_referent_addr, NULL);
604 466 }
605 467
606 468 // NOTE: process_phase*() are largely similar, and at a high level
607 469 // merely iterate over the extant list applying a predicate to
608 470 // each of its elements and possibly removing that element from the
609 471 // list and applying some further closures to that element.
610 472 // We should consider the possibility of replacing these
611 473 // process_phase*() methods by abstracting them into
612 474 // a single general iterator invocation that receives appropriate
613 475 // closures that accomplish this work.
614 476
615 477 // (SoftReferences only) Traverse the list and remove any SoftReferences whose
616 478 // referents are not alive, but that should be kept alive for policy reasons.
617 479 // Keep alive the transitive closure of all such referents.
618 480 void
619 481 ReferenceProcessor::process_phase1(DiscoveredList& refs_list,
620 482 ReferencePolicy* policy,
621 483 BoolObjectClosure* is_alive,
622 484 OopClosure* keep_alive,
623 485 VoidClosure* complete_gc) {
624 486 assert(policy != NULL, "Must have a non-NULL policy");
625 487 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
626 488 // Decide which softly reachable refs should be kept alive.
627 489 while (iter.has_next()) {
628 490 iter.load_ptrs(DEBUG_ONLY(!discovery_is_atomic() /* allow_null_referent */));
629 491 bool referent_is_dead = (iter.referent() != NULL) && !iter.is_referent_alive();
630 492 if (referent_is_dead && !policy->should_clear_reference(iter.obj())) {
631 493 if (TraceReferenceGC) {
632 494 gclog_or_tty->print_cr("Dropping reference (" INTPTR_FORMAT ": %s" ") by policy",
633 495 iter.obj(), iter.obj()->blueprint()->internal_name());
634 496 }
635 497 // Remove Reference object from list
636 498 iter.remove();
637 499 // Make the Reference object active again
638 500 iter.make_active();
639 501 // keep the referent around
640 502 iter.make_referent_alive();
641 503 iter.move_to_next();
642 504 } else {
643 505 iter.next();
644 506 }
645 507 }
646 508 // Close the reachable set
647 509 complete_gc->do_void();
648 510 NOT_PRODUCT(
649 511 if (PrintGCDetails && TraceReferenceGC) {
650 512 gclog_or_tty->print_cr(" Dropped %d dead Refs out of %d "
651 513 "discovered Refs by policy, from list " INTPTR_FORMAT,
652 514 iter.removed(), iter.processed(), (address)refs_list.head());
653 515 }
654 516 )
655 517 }
656 518
657 519 // Traverse the list and remove any Refs that are not active, or
658 520 // whose referents are either alive or NULL.
659 521 void
660 522 ReferenceProcessor::pp2_work(DiscoveredList& refs_list,
661 523 BoolObjectClosure* is_alive,
662 524 OopClosure* keep_alive) {
663 525 assert(discovery_is_atomic(), "Error");
664 526 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
665 527 while (iter.has_next()) {
666 528 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
667 529 DEBUG_ONLY(oop next = java_lang_ref_Reference::next(iter.obj());)
668 530 assert(next == NULL, "Should not discover inactive Reference");
669 531 if (iter.is_referent_alive()) {
670 532 if (TraceReferenceGC) {
671 533 gclog_or_tty->print_cr("Dropping strongly reachable reference (" INTPTR_FORMAT ": %s)",
672 534 iter.obj(), iter.obj()->blueprint()->internal_name());
673 535 }
674 536 // The referent is reachable after all.
675 537 // Remove Reference object from list.
676 538 iter.remove();
677 539 // Update the referent pointer as necessary: Note that this
678 540 // should not entail any recursive marking because the
679 541 // referent must already have been traversed.
680 542 iter.make_referent_alive();
681 543 iter.move_to_next();
682 544 } else {
683 545 iter.next();
684 546 }
685 547 }
686 548 NOT_PRODUCT(
687 549 if (PrintGCDetails && TraceReferenceGC && (iter.processed() > 0)) {
688 550 gclog_or_tty->print_cr(" Dropped %d active Refs out of %d "
689 551 "Refs in discovered list " INTPTR_FORMAT,
690 552 iter.removed(), iter.processed(), (address)refs_list.head());
691 553 }
692 554 )
693 555 }
694 556
695 557 void
696 558 ReferenceProcessor::pp2_work_concurrent_discovery(DiscoveredList& refs_list,
697 559 BoolObjectClosure* is_alive,
698 560 OopClosure* keep_alive,
699 561 VoidClosure* complete_gc) {
700 562 assert(!discovery_is_atomic(), "Error");
701 563 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
702 564 while (iter.has_next()) {
703 565 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
704 566 HeapWord* next_addr = java_lang_ref_Reference::next_addr(iter.obj());
705 567 oop next = java_lang_ref_Reference::next(iter.obj());
706 568 if ((iter.referent() == NULL || iter.is_referent_alive() ||
707 569 next != NULL)) {
708 570 assert(next->is_oop_or_null(), "bad next field");
709 571 // Remove Reference object from list
710 572 iter.remove();
711 573 // Trace the cohorts
712 574 iter.make_referent_alive();
713 575 if (UseCompressedOops) {
714 576 keep_alive->do_oop((narrowOop*)next_addr);
715 577 } else {
716 578 keep_alive->do_oop((oop*)next_addr);
717 579 }
718 580 iter.move_to_next();
719 581 } else {
720 582 iter.next();
721 583 }
722 584 }
723 585 // Now close the newly reachable set
724 586 complete_gc->do_void();
725 587 NOT_PRODUCT(
726 588 if (PrintGCDetails && TraceReferenceGC && (iter.processed() > 0)) {
727 589 gclog_or_tty->print_cr(" Dropped %d active Refs out of %d "
728 590 "Refs in discovered list " INTPTR_FORMAT,
729 591 iter.removed(), iter.processed(), (address)refs_list.head());
730 592 }
731 593 )
732 594 }
733 595
734 596 // Traverse the list and process the referents, by either
735 597 // clearing them or keeping them (and their reachable
736 598 // closure) alive.
737 599 void
738 600 ReferenceProcessor::process_phase3(DiscoveredList& refs_list,
739 601 bool clear_referent,
740 602 BoolObjectClosure* is_alive,
741 603 OopClosure* keep_alive,
742 604 VoidClosure* complete_gc) {
743 605 ResourceMark rm;
744 606 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
745 607 while (iter.has_next()) {
746 608 iter.update_discovered();
747 609 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
748 610 if (clear_referent) {
749 611 // NULL out referent pointer
750 612 iter.clear_referent();
751 613 } else {
752 614 // keep the referent around
753 615 iter.make_referent_alive();
754 616 }
755 617 if (TraceReferenceGC) {
756 618 gclog_or_tty->print_cr("Adding %sreference (" INTPTR_FORMAT ": %s) as pending",
757 619 clear_referent ? "cleared " : "",
758 620 iter.obj(), iter.obj()->blueprint()->internal_name());
759 621 }
760 622 assert(iter.obj()->is_oop(UseConcMarkSweepGC), "Adding a bad reference");
761 623 iter.next();
762 624 }
763 625 // Remember to update the next pointer of the last ref.
764 626 iter.update_discovered();
765 627 // Close the reachable set
766 628 complete_gc->do_void();
767 629 }
768 630
769 631 void
770 632 ReferenceProcessor::clear_discovered_references(DiscoveredList& refs_list) {
771 633 oop obj = NULL;
772 634 oop next = refs_list.head();
773 635 while (next != obj) {
774 636 obj = next;
775 637 next = java_lang_ref_Reference::discovered(obj);
776 638 java_lang_ref_Reference::set_discovered_raw(obj, NULL);
777 639 }
778 640 refs_list.set_head(NULL);
↓ open down ↓ |
165 lines elided |
↑ open up ↑ |
779 641 refs_list.set_length(0);
780 642 }
781 643
782 644 void
783 645 ReferenceProcessor::abandon_partial_discovered_list(DiscoveredList& refs_list) {
784 646 clear_discovered_references(refs_list);
785 647 }
786 648
787 649 void ReferenceProcessor::abandon_partial_discovery() {
788 650 // loop over the lists
789 - for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
651 + for (int i = 0; i < _max_num_q * subclasses_of_ref(); i++) {
790 652 if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
791 - gclog_or_tty->print_cr("\nAbandoning %s discovered list",
792 - list_name(i));
653 + gclog_or_tty->print_cr("\nAbandoning %s discovered list", list_name(i));
793 654 }
794 655 abandon_partial_discovered_list(_discoveredSoftRefs[i]);
795 656 }
796 657 }
797 658
798 659 class RefProcPhase1Task: public AbstractRefProcTaskExecutor::ProcessTask {
799 660 public:
800 661 RefProcPhase1Task(ReferenceProcessor& ref_processor,
801 662 DiscoveredList refs_lists[],
802 663 ReferencePolicy* policy,
803 664 bool marks_oops_alive)
804 665 : ProcessTask(ref_processor, refs_lists, marks_oops_alive),
805 666 _policy(policy)
806 667 { }
807 668 virtual void work(unsigned int i, BoolObjectClosure& is_alive,
808 669 OopClosure& keep_alive,
809 670 VoidClosure& complete_gc)
810 671 {
811 672 Thread* thr = Thread::current();
812 673 int refs_list_index = ((WorkerThread*)thr)->id();
813 674 _ref_processor.process_phase1(_refs_lists[refs_list_index], _policy,
814 675 &is_alive, &keep_alive, &complete_gc);
815 676 }
816 677 private:
817 678 ReferencePolicy* _policy;
818 679 };
819 680
820 681 class RefProcPhase2Task: public AbstractRefProcTaskExecutor::ProcessTask {
821 682 public:
822 683 RefProcPhase2Task(ReferenceProcessor& ref_processor,
823 684 DiscoveredList refs_lists[],
824 685 bool marks_oops_alive)
825 686 : ProcessTask(ref_processor, refs_lists, marks_oops_alive)
826 687 { }
827 688 virtual void work(unsigned int i, BoolObjectClosure& is_alive,
828 689 OopClosure& keep_alive,
829 690 VoidClosure& complete_gc)
830 691 {
831 692 _ref_processor.process_phase2(_refs_lists[i],
832 693 &is_alive, &keep_alive, &complete_gc);
833 694 }
834 695 };
835 696
836 697 class RefProcPhase3Task: public AbstractRefProcTaskExecutor::ProcessTask {
837 698 public:
838 699 RefProcPhase3Task(ReferenceProcessor& ref_processor,
839 700 DiscoveredList refs_lists[],
840 701 bool clear_referent,
841 702 bool marks_oops_alive)
842 703 : ProcessTask(ref_processor, refs_lists, marks_oops_alive),
843 704 _clear_referent(clear_referent)
844 705 { }
845 706 virtual void work(unsigned int i, BoolObjectClosure& is_alive,
846 707 OopClosure& keep_alive,
847 708 VoidClosure& complete_gc)
848 709 {
849 710 // Don't use "refs_list_index" calculated in this way because
850 711 // balance_queues() has moved the Ref's into the first n queues.
851 712 // Thread* thr = Thread::current();
852 713 // int refs_list_index = ((WorkerThread*)thr)->id();
853 714 // _ref_processor.process_phase3(_refs_lists[refs_list_index], _clear_referent,
854 715 _ref_processor.process_phase3(_refs_lists[i], _clear_referent,
855 716 &is_alive, &keep_alive, &complete_gc);
856 717 }
857 718 private:
858 719 bool _clear_referent;
859 720 };
860 721
861 722 // Balances reference queues.
862 723 // Move entries from all queues[0, 1, ..., _max_num_q-1] to
863 724 // queues[0, 1, ..., _num_q-1] because only the first _num_q
864 725 // corresponding to the active workers will be processed.
865 726 void ReferenceProcessor::balance_queues(DiscoveredList ref_lists[])
866 727 {
867 728 // calculate total length
868 729 size_t total_refs = 0;
869 730 if (TraceReferenceGC && PrintGCDetails) {
870 731 gclog_or_tty->print_cr("\nBalance ref_lists ");
871 732 }
872 733
873 734 for (int i = 0; i < _max_num_q; ++i) {
874 735 total_refs += ref_lists[i].length();
875 736 if (TraceReferenceGC && PrintGCDetails) {
876 737 gclog_or_tty->print("%d ", ref_lists[i].length());
877 738 }
878 739 }
879 740 if (TraceReferenceGC && PrintGCDetails) {
880 741 gclog_or_tty->print_cr(" = %d", total_refs);
881 742 }
882 743 size_t avg_refs = total_refs / _num_q + 1;
883 744 int to_idx = 0;
884 745 for (int from_idx = 0; from_idx < _max_num_q; from_idx++) {
885 746 bool move_all = false;
886 747 if (from_idx >= _num_q) {
887 748 move_all = ref_lists[from_idx].length() > 0;
888 749 }
889 750 while ((ref_lists[from_idx].length() > avg_refs) ||
890 751 move_all) {
891 752 assert(to_idx < _num_q, "Sanity Check!");
892 753 if (ref_lists[to_idx].length() < avg_refs) {
893 754 // move superfluous refs
894 755 size_t refs_to_move;
895 756 // Move all the Ref's if the from queue will not be processed.
896 757 if (move_all) {
897 758 refs_to_move = MIN2(ref_lists[from_idx].length(),
898 759 avg_refs - ref_lists[to_idx].length());
899 760 } else {
900 761 refs_to_move = MIN2(ref_lists[from_idx].length() - avg_refs,
901 762 avg_refs - ref_lists[to_idx].length());
902 763 }
903 764
904 765 assert(refs_to_move > 0, "otherwise the code below will fail");
905 766
↓ open down ↓ |
103 lines elided |
↑ open up ↑ |
906 767 oop move_head = ref_lists[from_idx].head();
907 768 oop move_tail = move_head;
908 769 oop new_head = move_head;
909 770 // find an element to split the list on
910 771 for (size_t j = 0; j < refs_to_move; ++j) {
911 772 move_tail = new_head;
912 773 new_head = java_lang_ref_Reference::discovered(new_head);
913 774 }
914 775
915 776 // Add the chain to the to list.
916 - if (ref_lists[to_idx].head() == NULL) {
917 - // to list is empty. Make a loop at the end.
918 - java_lang_ref_Reference::set_discovered(move_tail, move_tail);
777 + if (_discovered_list_needs_barrier) {
778 + if (ref_lists[to_idx].head() == NULL) {
779 + // to list is empty. Make a loop at the end.
780 + java_lang_ref_Reference::set_discovered(move_tail, move_tail);
781 + } else {
782 + java_lang_ref_Reference::set_discovered(move_tail, ref_lists[to_idx].head());
783 + }
919 784 } else {
920 - java_lang_ref_Reference::set_discovered(move_tail, ref_lists[to_idx].head());
785 + HeapWord* const discovered_addr = java_lang_ref_Reference::discovered_addr(move_tail);
786 + if (ref_lists[to_idx].head() == NULL) {
787 + // to list is empty. Make a loop at the end.
788 + oop_store_raw(discovered_addr, move_tail);
789 + } else {
790 + oop_store_raw(discovered_addr, ref_lists[to_idx].head());
791 + }
921 792 }
922 793 ref_lists[to_idx].set_head(move_head);
923 794 ref_lists[to_idx].inc_length(refs_to_move);
924 795
925 796 // Remove the chain from the from list.
926 797 if (move_tail == new_head) {
927 798 // We found the end of the from list.
928 799 ref_lists[from_idx].set_head(NULL);
929 800 } else {
930 801 ref_lists[from_idx].set_head(new_head);
931 802 }
932 803 ref_lists[from_idx].dec_length(refs_to_move);
933 804 if (ref_lists[from_idx].length() == 0) {
934 805 break;
935 806 }
936 807 } else {
937 808 to_idx = (to_idx + 1) % _num_q;
938 809 }
939 810 }
940 811 }
941 812 #ifdef ASSERT
942 813 size_t balanced_total_refs = 0;
943 814 for (int i = 0; i < _max_num_q; ++i) {
944 815 balanced_total_refs += ref_lists[i].length();
945 816 if (TraceReferenceGC && PrintGCDetails) {
946 817 gclog_or_tty->print("%d ", ref_lists[i].length());
947 818 }
948 819 }
949 820 if (TraceReferenceGC && PrintGCDetails) {
950 821 gclog_or_tty->print_cr(" = %d", balanced_total_refs);
951 822 gclog_or_tty->flush();
952 823 }
953 824 assert(total_refs == balanced_total_refs, "Balancing was incomplete");
954 825 #endif
955 826 }
956 827
957 828 void ReferenceProcessor::balance_all_queues() {
958 829 balance_queues(_discoveredSoftRefs);
959 830 balance_queues(_discoveredWeakRefs);
960 831 balance_queues(_discoveredFinalRefs);
961 832 balance_queues(_discoveredPhantomRefs);
962 833 }
963 834
964 835 void
965 836 ReferenceProcessor::process_discovered_reflist(
966 837 DiscoveredList refs_lists[],
967 838 ReferencePolicy* policy,
968 839 bool clear_referent,
969 840 BoolObjectClosure* is_alive,
970 841 OopClosure* keep_alive,
971 842 VoidClosure* complete_gc,
972 843 AbstractRefProcTaskExecutor* task_executor)
973 844 {
974 845 bool mt_processing = task_executor != NULL && _processing_is_mt;
975 846 // If discovery used MT and a dynamic number of GC threads, then
976 847 // the queues must be balanced for correctness if fewer than the
977 848 // maximum number of queues were used. The number of queue used
978 849 // during discovery may be different than the number to be used
979 850 // for processing so don't depend of _num_q < _max_num_q as part
980 851 // of the test.
981 852 bool must_balance = _discovery_is_mt;
982 853
983 854 if ((mt_processing && ParallelRefProcBalancingEnabled) ||
984 855 must_balance) {
985 856 balance_queues(refs_lists);
986 857 }
987 858 if (PrintReferenceGC && PrintGCDetails) {
988 859 size_t total = 0;
989 860 for (int i = 0; i < _max_num_q; ++i) {
990 861 total += refs_lists[i].length();
991 862 }
992 863 gclog_or_tty->print(", %u refs", total);
993 864 }
994 865
995 866 // Phase 1 (soft refs only):
996 867 // . Traverse the list and remove any SoftReferences whose
997 868 // referents are not alive, but that should be kept alive for
998 869 // policy reasons. Keep alive the transitive closure of all
999 870 // such referents.
1000 871 if (policy != NULL) {
1001 872 if (mt_processing) {
1002 873 RefProcPhase1Task phase1(*this, refs_lists, policy, true /*marks_oops_alive*/);
1003 874 task_executor->execute(phase1);
1004 875 } else {
1005 876 for (int i = 0; i < _max_num_q; i++) {
1006 877 process_phase1(refs_lists[i], policy,
1007 878 is_alive, keep_alive, complete_gc);
1008 879 }
1009 880 }
1010 881 } else { // policy == NULL
1011 882 assert(refs_lists != _discoveredSoftRefs,
1012 883 "Policy must be specified for soft references.");
1013 884 }
1014 885
1015 886 // Phase 2:
1016 887 // . Traverse the list and remove any refs whose referents are alive.
1017 888 if (mt_processing) {
1018 889 RefProcPhase2Task phase2(*this, refs_lists, !discovery_is_atomic() /*marks_oops_alive*/);
1019 890 task_executor->execute(phase2);
1020 891 } else {
1021 892 for (int i = 0; i < _max_num_q; i++) {
1022 893 process_phase2(refs_lists[i], is_alive, keep_alive, complete_gc);
1023 894 }
1024 895 }
1025 896
1026 897 // Phase 3:
1027 898 // . Traverse the list and process referents as appropriate.
1028 899 if (mt_processing) {
1029 900 RefProcPhase3Task phase3(*this, refs_lists, clear_referent, true /*marks_oops_alive*/);
1030 901 task_executor->execute(phase3);
1031 902 } else {
↓ open down ↓ |
101 lines elided |
↑ open up ↑ |
1032 903 for (int i = 0; i < _max_num_q; i++) {
1033 904 process_phase3(refs_lists[i], clear_referent,
1034 905 is_alive, keep_alive, complete_gc);
1035 906 }
1036 907 }
1037 908 }
1038 909
1039 910 void ReferenceProcessor::clean_up_discovered_references() {
1040 911 // loop over the lists
1041 912 // Should this instead be
1042 - // for (int i = 0; i < subclasses_of_ref; i++_ {
913 + // for (int i = 0; i < subclasses_of_ref(); i++) {
1043 914 // for (int j = 0; j < _num_q; j++) {
1044 915 // int index = i * _max_num_q + j;
1045 - for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
916 + for (int i = 0; i < _max_num_q * subclasses_of_ref(); i++) {
1046 917 if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
1047 918 gclog_or_tty->print_cr(
1048 919 "\nScrubbing %s discovered list of Null referents",
1049 920 list_name(i));
1050 921 }
1051 922 clean_up_discovered_reflist(_discoveredSoftRefs[i]);
1052 923 }
1053 924 }
1054 925
1055 926 void ReferenceProcessor::clean_up_discovered_reflist(DiscoveredList& refs_list) {
1056 927 assert(!discovery_is_atomic(), "Else why call this method?");
1057 928 DiscoveredListIterator iter(refs_list, NULL, NULL);
1058 929 while (iter.has_next()) {
1059 930 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
1060 931 oop next = java_lang_ref_Reference::next(iter.obj());
1061 932 assert(next->is_oop_or_null(), "bad next field");
1062 933 // If referent has been cleared or Reference is not active,
1063 934 // drop it.
1064 935 if (iter.referent() == NULL || next != NULL) {
1065 936 debug_only(
1066 937 if (PrintGCDetails && TraceReferenceGC) {
1067 938 gclog_or_tty->print_cr("clean_up_discovered_list: Dropping Reference: "
1068 939 INTPTR_FORMAT " with next field: " INTPTR_FORMAT
1069 940 " and referent: " INTPTR_FORMAT,
1070 941 iter.obj(), next, iter.referent());
1071 942 }
1072 943 )
1073 944 // Remove Reference object from list
1074 945 iter.remove();
1075 946 iter.move_to_next();
1076 947 } else {
1077 948 iter.next();
1078 949 }
1079 950 }
1080 951 NOT_PRODUCT(
1081 952 if (PrintGCDetails && TraceReferenceGC) {
1082 953 gclog_or_tty->print(
1083 954 " Removed %d Refs with NULL referents out of %d discovered Refs",
1084 955 iter.removed(), iter.processed());
1085 956 }
1086 957 )
1087 958 }
1088 959
1089 960 inline DiscoveredList* ReferenceProcessor::get_discovered_list(ReferenceType rt) {
1090 961 int id = 0;
1091 962 // Determine the queue index to use for this object.
1092 963 if (_discovery_is_mt) {
1093 964 // During a multi-threaded discovery phase,
1094 965 // each thread saves to its "own" list.
1095 966 Thread* thr = Thread::current();
1096 967 id = thr->as_Worker_thread()->id();
1097 968 } else {
1098 969 // single-threaded discovery, we save in round-robin
1099 970 // fashion to each of the lists.
1100 971 if (_processing_is_mt) {
1101 972 id = next_id();
1102 973 }
1103 974 }
1104 975 assert(0 <= id && id < _max_num_q, "Id is out-of-bounds (call Freud?)");
1105 976
1106 977 // Get the discovered queue to which we will add
1107 978 DiscoveredList* list = NULL;
1108 979 switch (rt) {
1109 980 case REF_OTHER:
1110 981 // Unknown reference type, no special treatment
1111 982 break;
1112 983 case REF_SOFT:
1113 984 list = &_discoveredSoftRefs[id];
1114 985 break;
1115 986 case REF_WEAK:
1116 987 list = &_discoveredWeakRefs[id];
1117 988 break;
1118 989 case REF_FINAL:
1119 990 list = &_discoveredFinalRefs[id];
1120 991 break;
1121 992 case REF_PHANTOM:
1122 993 list = &_discoveredPhantomRefs[id];
1123 994 break;
1124 995 case REF_NONE:
1125 996 // we should not reach here if we are an instanceRefKlass
1126 997 default:
1127 998 ShouldNotReachHere();
1128 999 }
1129 1000 if (TraceReferenceGC && PrintGCDetails) {
1130 1001 gclog_or_tty->print_cr("Thread %d gets list " INTPTR_FORMAT, id, list);
1131 1002 }
1132 1003 return list;
1133 1004 }
1134 1005
1135 1006 inline void
1136 1007 ReferenceProcessor::add_to_discovered_list_mt(DiscoveredList& refs_list,
1137 1008 oop obj,
1138 1009 HeapWord* discovered_addr) {
1139 1010 assert(_discovery_is_mt, "!_discovery_is_mt should have been handled by caller");
1140 1011 // First we must make sure this object is only enqueued once. CAS in a non null
1141 1012 // discovered_addr.
1142 1013 oop current_head = refs_list.head();
1143 1014 // The last ref must have its discovered field pointing to itself.
1144 1015 oop next_discovered = (current_head != NULL) ? current_head : obj;
1145 1016
1146 1017 // Note: In the case of G1, this specific pre-barrier is strictly
1147 1018 // not necessary because the only case we are interested in
1148 1019 // here is when *discovered_addr is NULL (see the CAS further below),
1149 1020 // so this will expand to nothing. As a result, we have manually
1150 1021 // elided this out for G1, but left in the test for some future
1151 1022 // collector that might have need for a pre-barrier here, e.g.:-
1152 1023 // _bs->write_ref_field_pre((oop* or narrowOop*)discovered_addr, next_discovered);
1153 1024 assert(!_discovered_list_needs_barrier || UseG1GC,
1154 1025 "Need to check non-G1 collector: "
1155 1026 "may need a pre-write-barrier for CAS from NULL below");
1156 1027 oop retest = oopDesc::atomic_compare_exchange_oop(next_discovered, discovered_addr,
1157 1028 NULL);
1158 1029 if (retest == NULL) {
1159 1030 // This thread just won the right to enqueue the object.
1160 1031 // We have separate lists for enqueueing, so no synchronization
1161 1032 // is necessary.
1162 1033 refs_list.set_head(obj);
1163 1034 refs_list.inc_length(1);
1164 1035 if (_discovered_list_needs_barrier) {
1165 1036 _bs->write_ref_field((void*)discovered_addr, next_discovered);
1166 1037 }
1167 1038
1168 1039 if (TraceReferenceGC) {
1169 1040 gclog_or_tty->print_cr("Discovered reference (mt) (" INTPTR_FORMAT ": %s)",
1170 1041 obj, obj->blueprint()->internal_name());
1171 1042 }
1172 1043 } else {
1173 1044 // If retest was non NULL, another thread beat us to it:
1174 1045 // The reference has already been discovered...
1175 1046 if (TraceReferenceGC) {
1176 1047 gclog_or_tty->print_cr("Already discovered reference (" INTPTR_FORMAT ": %s)",
1177 1048 obj, obj->blueprint()->internal_name());
1178 1049 }
1179 1050 }
1180 1051 }
1181 1052
1182 1053 #ifndef PRODUCT
1183 1054 // Non-atomic (i.e. concurrent) discovery might allow us
1184 1055 // to observe j.l.References with NULL referents, being those
1185 1056 // cleared concurrently by mutators during (or after) discovery.
1186 1057 void ReferenceProcessor::verify_referent(oop obj) {
1187 1058 bool da = discovery_is_atomic();
1188 1059 oop referent = java_lang_ref_Reference::referent(obj);
1189 1060 assert(da ? referent->is_oop() : referent->is_oop_or_null(),
1190 1061 err_msg("Bad referent " INTPTR_FORMAT " found in Reference "
1191 1062 INTPTR_FORMAT " during %satomic discovery ",
1192 1063 (intptr_t)referent, (intptr_t)obj, da ? "" : "non-"));
1193 1064 }
1194 1065 #endif
1195 1066
1196 1067 // We mention two of several possible choices here:
1197 1068 // #0: if the reference object is not in the "originating generation"
1198 1069 // (or part of the heap being collected, indicated by our "span"
1199 1070 // we don't treat it specially (i.e. we scan it as we would
1200 1071 // a normal oop, treating its references as strong references).
1201 1072 // This means that references can't be discovered unless their
1202 1073 // referent is also in the same span. This is the simplest,
1203 1074 // most "local" and most conservative approach, albeit one
1204 1075 // that may cause weak references to be enqueued least promptly.
1205 1076 // We call this choice the "ReferenceBasedDiscovery" policy.
1206 1077 // #1: the reference object may be in any generation (span), but if
1207 1078 // the referent is in the generation (span) being currently collected
1208 1079 // then we can discover the reference object, provided
1209 1080 // the object has not already been discovered by
1210 1081 // a different concurrently running collector (as may be the
1211 1082 // case, for instance, if the reference object is in CMS and
1212 1083 // the referent in DefNewGeneration), and provided the processing
1213 1084 // of this reference object by the current collector will
1214 1085 // appear atomic to every other collector in the system.
1215 1086 // (Thus, for instance, a concurrent collector may not
1216 1087 // discover references in other generations even if the
1217 1088 // referent is in its own generation). This policy may,
1218 1089 // in certain cases, enqueue references somewhat sooner than
1219 1090 // might Policy #0 above, but at marginally increased cost
1220 1091 // and complexity in processing these references.
1221 1092 // We call this choice the "RefeferentBasedDiscovery" policy.
1222 1093 bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
1223 1094 // Make sure we are discovering refs (rather than processing discovered refs).
1224 1095 if (!_discovering_refs || !RegisterReferences) {
1225 1096 return false;
1226 1097 }
1227 1098 // We only discover active references.
1228 1099 oop next = java_lang_ref_Reference::next(obj);
1229 1100 if (next != NULL) { // Ref is no longer active
1230 1101 return false;
1231 1102 }
1232 1103
1233 1104 HeapWord* obj_addr = (HeapWord*)obj;
1234 1105 if (RefDiscoveryPolicy == ReferenceBasedDiscovery &&
1235 1106 !_span.contains(obj_addr)) {
1236 1107 // Reference is not in the originating generation;
1237 1108 // don't treat it specially (i.e. we want to scan it as a normal
1238 1109 // object with strong references).
1239 1110 return false;
1240 1111 }
1241 1112
1242 1113 // We only discover references whose referents are not (yet)
1243 1114 // known to be strongly reachable.
1244 1115 if (is_alive_non_header() != NULL) {
1245 1116 verify_referent(obj);
1246 1117 if (is_alive_non_header()->do_object_b(java_lang_ref_Reference::referent(obj))) {
1247 1118 return false; // referent is reachable
1248 1119 }
1249 1120 }
1250 1121 if (rt == REF_SOFT) {
1251 1122 // For soft refs we can decide now if these are not
1252 1123 // current candidates for clearing, in which case we
↓ open down ↓ |
197 lines elided |
↑ open up ↑ |
1253 1124 // can mark through them now, rather than delaying that
1254 1125 // to the reference-processing phase. Since all current
1255 1126 // time-stamp policies advance the soft-ref clock only
1256 1127 // at a major collection cycle, this is always currently
1257 1128 // accurate.
1258 1129 if (!_current_soft_ref_policy->should_clear_reference(obj)) {
1259 1130 return false;
1260 1131 }
1261 1132 }
1262 1133
1134 + ResourceMark rm; // Needed for tracing.
1135 +
1263 1136 HeapWord* const discovered_addr = java_lang_ref_Reference::discovered_addr(obj);
1264 1137 const oop discovered = java_lang_ref_Reference::discovered(obj);
1265 1138 assert(discovered->is_oop_or_null(), "bad discovered field");
1266 1139 if (discovered != NULL) {
1267 1140 // The reference has already been discovered...
1268 1141 if (TraceReferenceGC) {
1269 1142 gclog_or_tty->print_cr("Already discovered reference (" INTPTR_FORMAT ": %s)",
1270 1143 obj, obj->blueprint()->internal_name());
1271 1144 }
1272 1145 if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
1273 1146 // assumes that an object is not processed twice;
1274 1147 // if it's been already discovered it must be on another
1275 1148 // generation's discovered list; so we won't discover it.
1276 1149 return false;
1277 1150 } else {
1278 1151 assert(RefDiscoveryPolicy == ReferenceBasedDiscovery,
1279 1152 "Unrecognized policy");
1280 1153 // Check assumption that an object is not potentially
1281 1154 // discovered twice except by concurrent collectors that potentially
1282 1155 // trace the same Reference object twice.
1283 1156 assert(UseConcMarkSweepGC || UseG1GC,
1284 1157 "Only possible with a concurrent marking collector");
1285 1158 return true;
1286 1159 }
1287 1160 }
1288 1161
1289 1162 if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
1290 1163 verify_referent(obj);
1291 1164 // Discover if and only if EITHER:
1292 1165 // .. reference is in our span, OR
1293 1166 // .. we are an atomic collector and referent is in our span
1294 1167 if (_span.contains(obj_addr) ||
1295 1168 (discovery_is_atomic() &&
1296 1169 _span.contains(java_lang_ref_Reference::referent(obj)))) {
1297 1170 // should_enqueue = true;
1298 1171 } else {
1299 1172 return false;
1300 1173 }
1301 1174 } else {
1302 1175 assert(RefDiscoveryPolicy == ReferenceBasedDiscovery &&
1303 1176 _span.contains(obj_addr), "code inconsistency");
1304 1177 }
1305 1178
1306 1179 // Get the right type of discovered queue head.
1307 1180 DiscoveredList* list = get_discovered_list(rt);
1308 1181 if (list == NULL) {
1309 1182 return false; // nothing special needs to be done
1310 1183 }
1311 1184
1312 1185 if (_discovery_is_mt) {
1313 1186 add_to_discovered_list_mt(*list, obj, discovered_addr);
1314 1187 } else {
1315 1188 // If "_discovered_list_needs_barrier", we do write barriers when
1316 1189 // updating the discovered reference list. Otherwise, we do a raw store
1317 1190 // here: the field will be visited later when processing the discovered
1318 1191 // references.
1319 1192 oop current_head = list->head();
1320 1193 // The last ref must have its discovered field pointing to itself.
1321 1194 oop next_discovered = (current_head != NULL) ? current_head : obj;
1322 1195
1323 1196 // As in the case further above, since we are over-writing a NULL
1324 1197 // pre-value, we can safely elide the pre-barrier here for the case of G1.
1325 1198 // e.g.:- _bs->write_ref_field_pre((oop* or narrowOop*)discovered_addr, next_discovered);
1326 1199 assert(discovered == NULL, "control point invariant");
1327 1200 assert(!_discovered_list_needs_barrier || UseG1GC,
1328 1201 "For non-G1 collector, may need a pre-write-barrier for CAS from NULL below");
1329 1202 oop_store_raw(discovered_addr, next_discovered);
1330 1203 if (_discovered_list_needs_barrier) {
1331 1204 _bs->write_ref_field((void*)discovered_addr, next_discovered);
1332 1205 }
1333 1206 list->set_head(obj);
1334 1207 list->inc_length(1);
1335 1208
1336 1209 if (TraceReferenceGC) {
1337 1210 gclog_or_tty->print_cr("Discovered reference (" INTPTR_FORMAT ": %s)",
1338 1211 obj, obj->blueprint()->internal_name());
1339 1212 }
1340 1213 }
1341 1214 assert(obj->is_oop(), "Discovered a bad reference");
1342 1215 verify_referent(obj);
1343 1216 return true;
1344 1217 }
1345 1218
1346 1219 // Preclean the discovered references by removing those
1347 1220 // whose referents are alive, and by marking from those that
1348 1221 // are not active. These lists can be handled here
1349 1222 // in any order and, indeed, concurrently.
1350 1223 void ReferenceProcessor::preclean_discovered_references(
1351 1224 BoolObjectClosure* is_alive,
1352 1225 OopClosure* keep_alive,
1353 1226 VoidClosure* complete_gc,
1354 1227 YieldClosure* yield,
1355 1228 bool should_unload_classes) {
1356 1229
1357 1230 NOT_PRODUCT(verify_ok_to_handle_reflists());
1358 1231
1359 1232 #ifdef ASSERT
1360 1233 bool must_remember_klasses = ClassUnloading && !UseConcMarkSweepGC ||
1361 1234 CMSClassUnloadingEnabled && UseConcMarkSweepGC ||
1362 1235 ExplicitGCInvokesConcurrentAndUnloadsClasses &&
1363 1236 UseConcMarkSweepGC && should_unload_classes;
1364 1237 RememberKlassesChecker mx(must_remember_klasses);
1365 1238 #endif
1366 1239 // Soft references
1367 1240 {
1368 1241 TraceTime tt("Preclean SoftReferences", PrintGCDetails && PrintReferenceGC,
1369 1242 false, gclog_or_tty);
1370 1243 for (int i = 0; i < _max_num_q; i++) {
1371 1244 if (yield->should_return()) {
1372 1245 return;
1373 1246 }
1374 1247 preclean_discovered_reflist(_discoveredSoftRefs[i], is_alive,
1375 1248 keep_alive, complete_gc, yield);
1376 1249 }
1377 1250 }
1378 1251
1379 1252 // Weak references
1380 1253 {
1381 1254 TraceTime tt("Preclean WeakReferences", PrintGCDetails && PrintReferenceGC,
1382 1255 false, gclog_or_tty);
1383 1256 for (int i = 0; i < _max_num_q; i++) {
1384 1257 if (yield->should_return()) {
1385 1258 return;
1386 1259 }
1387 1260 preclean_discovered_reflist(_discoveredWeakRefs[i], is_alive,
1388 1261 keep_alive, complete_gc, yield);
1389 1262 }
1390 1263 }
1391 1264
1392 1265 // Final references
1393 1266 {
1394 1267 TraceTime tt("Preclean FinalReferences", PrintGCDetails && PrintReferenceGC,
1395 1268 false, gclog_or_tty);
1396 1269 for (int i = 0; i < _max_num_q; i++) {
1397 1270 if (yield->should_return()) {
1398 1271 return;
1399 1272 }
1400 1273 preclean_discovered_reflist(_discoveredFinalRefs[i], is_alive,
1401 1274 keep_alive, complete_gc, yield);
1402 1275 }
1403 1276 }
1404 1277
1405 1278 // Phantom references
1406 1279 {
1407 1280 TraceTime tt("Preclean PhantomReferences", PrintGCDetails && PrintReferenceGC,
1408 1281 false, gclog_or_tty);
1409 1282 for (int i = 0; i < _max_num_q; i++) {
1410 1283 if (yield->should_return()) {
1411 1284 return;
1412 1285 }
1413 1286 preclean_discovered_reflist(_discoveredPhantomRefs[i], is_alive,
1414 1287 keep_alive, complete_gc, yield);
1415 1288 }
1416 1289 }
1417 1290 }
1418 1291
1419 1292 // Walk the given discovered ref list, and remove all reference objects
1420 1293 // whose referents are still alive, whose referents are NULL or which
1421 1294 // are not active (have a non-NULL next field). NOTE: When we are
1422 1295 // thus precleaning the ref lists (which happens single-threaded today),
1423 1296 // we do not disable refs discovery to honour the correct semantics of
1424 1297 // java.lang.Reference. As a result, we need to be careful below
1425 1298 // that ref removal steps interleave safely with ref discovery steps
1426 1299 // (in this thread).
1427 1300 void
1428 1301 ReferenceProcessor::preclean_discovered_reflist(DiscoveredList& refs_list,
1429 1302 BoolObjectClosure* is_alive,
1430 1303 OopClosure* keep_alive,
1431 1304 VoidClosure* complete_gc,
1432 1305 YieldClosure* yield) {
1433 1306 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
1434 1307 while (iter.has_next()) {
1435 1308 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
1436 1309 oop obj = iter.obj();
1437 1310 oop next = java_lang_ref_Reference::next(obj);
1438 1311 if (iter.referent() == NULL || iter.is_referent_alive() ||
1439 1312 next != NULL) {
1440 1313 // The referent has been cleared, or is alive, or the Reference is not
1441 1314 // active; we need to trace and mark its cohort.
1442 1315 if (TraceReferenceGC) {
1443 1316 gclog_or_tty->print_cr("Precleaning Reference (" INTPTR_FORMAT ": %s)",
1444 1317 iter.obj(), iter.obj()->blueprint()->internal_name());
1445 1318 }
1446 1319 // Remove Reference object from list
1447 1320 iter.remove();
1448 1321 // Keep alive its cohort.
1449 1322 iter.make_referent_alive();
1450 1323 if (UseCompressedOops) {
1451 1324 narrowOop* next_addr = (narrowOop*)java_lang_ref_Reference::next_addr(obj);
1452 1325 keep_alive->do_oop(next_addr);
1453 1326 } else {
1454 1327 oop* next_addr = (oop*)java_lang_ref_Reference::next_addr(obj);
1455 1328 keep_alive->do_oop(next_addr);
1456 1329 }
1457 1330 iter.move_to_next();
1458 1331 } else {
1459 1332 iter.next();
1460 1333 }
1461 1334 }
1462 1335 // Close the reachable set
1463 1336 complete_gc->do_void();
1464 1337
↓ open down ↓ |
192 lines elided |
↑ open up ↑ |
1465 1338 NOT_PRODUCT(
1466 1339 if (PrintGCDetails && PrintReferenceGC && (iter.processed() > 0)) {
1467 1340 gclog_or_tty->print_cr(" Dropped %d Refs out of %d "
1468 1341 "Refs in discovered list " INTPTR_FORMAT,
1469 1342 iter.removed(), iter.processed(), (address)refs_list.head());
1470 1343 }
1471 1344 )
1472 1345 }
1473 1346
1474 1347 const char* ReferenceProcessor::list_name(int i) {
1475 - assert(i >= 0 && i <= _max_num_q * subclasses_of_ref, "Out of bounds index");
1348 + assert(i >= 0 && i <= _max_num_q * subclasses_of_ref(), "Out of bounds index");
1476 1349 int j = i / _max_num_q;
1477 1350 switch (j) {
1478 1351 case 0: return "SoftRef";
1479 1352 case 1: return "WeakRef";
1480 1353 case 2: return "FinalRef";
1481 1354 case 3: return "PhantomRef";
1482 1355 }
1483 1356 ShouldNotReachHere();
1484 1357 return NULL;
1485 1358 }
1486 1359
1487 1360 #ifndef PRODUCT
1488 1361 void ReferenceProcessor::verify_ok_to_handle_reflists() {
1489 1362 // empty for now
1490 1363 }
1491 1364 #endif
1492 1365
1493 1366 #ifndef PRODUCT
1494 1367 void ReferenceProcessor::clear_discovered_references() {
1495 1368 guarantee(!_discovering_refs, "Discovering refs?");
1496 - for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
1369 + for (int i = 0; i < _max_num_q * subclasses_of_ref(); i++) {
1497 1370 clear_discovered_references(_discoveredSoftRefs[i]);
1498 1371 }
1499 1372 }
1500 1373
1501 1374 #endif // PRODUCT
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX