286 // At the end of the list, we should make _prev point to itself.
287 // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
288 // and _prev will be NULL.
289 new_next = _prev_discovered;
290 } else {
291 new_next = _next_discovered;
292 }
293 // Remove Reference object from discovered list. Note that G1 does not need a
294 // pre-barrier here because we know the Reference has already been found/marked,
295 // that's how it ended up in the discovered list in the first place.
296 RawAccess<>::oop_store(_prev_discovered_addr, new_next);
297 NOT_PRODUCT(_removed++);
298 _refs_list.dec_length(1);
299 }
300
301 void DiscoveredListIterator::clear_referent() {
302 RawAccess<>::oop_store(_referent_addr, oop(NULL));
303 }
304
305 void DiscoveredListIterator::enqueue() {
306 // Self-loop next, so as to make Ref not active.
307 java_lang_ref_Reference::set_next_raw(_current_discovered, _current_discovered);
308
309 HeapAccess<AS_NO_KEEPALIVE>::oop_store_at(_current_discovered,
310 java_lang_ref_Reference::discovered_offset,
311 _next_discovered);
312 }
313
314 void DiscoveredListIterator::complete_enqueue() {
315 if (_prev_discovered != NULL) {
316 // This is the last object.
317 // Swap refs_list into pending list and set obj's
318 // discovered to what we read from the pending list.
319 oop old = Universe::swap_reference_pending_list(_refs_list.head());
320 HeapAccess<AS_NO_KEEPALIVE>::oop_store_at(_prev_discovered, java_lang_ref_Reference::discovered_offset, old);
321 }
322 }
323
324 // NOTE: process_phase*() are largely similar, and at a high level
325 // merely iterate over the extant list applying a predicate to
326 // each of its elements and possibly removing that element from the
327 // list and applying some further closures to that element.
328 // We should consider the possibility of replacing these
347 bool referent_is_dead = (iter.referent() != NULL) && !iter.is_referent_alive();
348 if (referent_is_dead &&
349 !policy->should_clear_reference(iter.obj(), _soft_ref_timestamp_clock)) {
350 log_develop_trace(gc, ref)("Dropping reference (" INTPTR_FORMAT ": %s" ") by policy",
351 p2i(iter.obj()), iter.obj()->klass()->internal_name());
352 // Remove Reference object from list
353 iter.remove();
354 // keep the referent around
355 iter.make_referent_alive();
356 iter.move_to_next();
357 } else {
358 iter.next();
359 }
360 }
361 // Close the reachable set
362 complete_gc->do_void();
363 log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " dead Refs out of " SIZE_FORMAT " discovered Refs by policy, from list " INTPTR_FORMAT,
364 iter.removed(), iter.processed(), p2i(&refs_list));
365 }
366
367 void ReferenceProcessor::process_phase2(DiscoveredList& refs_list,
368 BoolObjectClosure* is_alive,
369 OopClosure* keep_alive,
370 VoidClosure* complete_gc) {
371 if (discovery_is_atomic()) {
372 // complete_gc is ignored in this case for this phase
373 pp2_work(refs_list, is_alive, keep_alive);
374 } else {
375 assert(complete_gc != NULL, "Error");
376 pp2_work_concurrent_discovery(refs_list, is_alive,
377 keep_alive, complete_gc);
378 }
379 }
380 // Traverse the list and remove any Refs that are not active, or
381 // whose referents are either alive or NULL.
382 void
383 ReferenceProcessor::pp2_work(DiscoveredList& refs_list,
384 BoolObjectClosure* is_alive,
385 OopClosure* keep_alive) {
386 assert(discovery_is_atomic(), "Error");
387 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
388 while (iter.has_next()) {
389 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
390 DEBUG_ONLY(oop next = java_lang_ref_Reference::next(iter.obj());)
391 assert(next == NULL, "Should not discover inactive Reference");
392 if (iter.is_referent_alive()) {
393 log_develop_trace(gc, ref)("Dropping strongly reachable reference (" INTPTR_FORMAT ": %s)",
394 p2i(iter.obj()), iter.obj()->klass()->internal_name());
395 // The referent is reachable after all.
396 // Remove Reference object from list.
397 iter.remove();
398 // Update the referent pointer as necessary: Note that this
399 // should not entail any recursive marking because the
400 // referent must already have been traversed.
401 iter.make_referent_alive();
402 iter.move_to_next();
403 } else {
404 iter.next();
405 }
406 }
407 NOT_PRODUCT(
408 if (iter.processed() > 0) {
409 log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " active Refs out of " SIZE_FORMAT
410 " Refs in discovered list " INTPTR_FORMAT,
411 iter.removed(), iter.processed(), p2i(&refs_list));
412 }
413 )
414 }
415
416 void
417 ReferenceProcessor::pp2_work_concurrent_discovery(DiscoveredList& refs_list,
418 BoolObjectClosure* is_alive,
419 OopClosure* keep_alive,
420 VoidClosure* complete_gc) {
421 assert(!discovery_is_atomic(), "Error");
422 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
423 while (iter.has_next()) {
424 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
425 HeapWord* next_addr = java_lang_ref_Reference::next_addr_raw(iter.obj());
426 oop next = java_lang_ref_Reference::next(iter.obj());
427 if ((iter.referent() == NULL || iter.is_referent_alive() ||
428 next != NULL)) {
429 assert(oopDesc::is_oop_or_null(next), "Expected an oop or NULL for next field at " PTR_FORMAT, p2i(next));
430 // Remove Reference object from list
431 iter.remove();
432 // Trace the cohorts
433 iter.make_referent_alive();
434 if (UseCompressedOops) {
435 keep_alive->do_oop((narrowOop*)next_addr);
436 } else {
437 keep_alive->do_oop((oop*)next_addr);
438 }
439 iter.move_to_next();
440 } else {
441 iter.next();
442 }
443 }
444 // Now close the newly reachable set
445 complete_gc->do_void();
446 NOT_PRODUCT(
447 if (iter.processed() > 0) {
448 log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " active Refs out of " SIZE_FORMAT
449 " Refs in discovered list " INTPTR_FORMAT,
450 iter.removed(), iter.processed(), p2i(&refs_list));
451 }
452 )
453 }
454
455 void ReferenceProcessor::process_phase3(DiscoveredList& refs_list,
456 bool clear_referent,
457 BoolObjectClosure* is_alive,
458 OopClosure* keep_alive,
459 VoidClosure* complete_gc) {
460 ResourceMark rm;
461 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
462 while (iter.has_next()) {
463 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
464 if (clear_referent) {
465 // NULL out referent pointer
466 iter.clear_referent();
467 } else {
468 // keep the referent around
469 iter.make_referent_alive();
470 }
471 iter.enqueue();
472 log_develop_trace(gc, ref)("Adding %sreference (" INTPTR_FORMAT ": %s) as pending",
473 clear_referent ? "cleared " : "", p2i(iter.obj()), iter.obj()->klass()->internal_name());
474 assert(oopDesc::is_oop(iter.obj(), UseConcMarkSweepGC), "Adding a bad reference");
475 iter.next();
476 }
477 iter.complete_enqueue();
478 // Close the reachable set
479 complete_gc->do_void();
480 // Clear the list.
481 refs_list.set_head(NULL);
482 refs_list.set_length(0);
483 }
484
485 void
486 ReferenceProcessor::clear_discovered_references(DiscoveredList& refs_list) {
487 oop obj = NULL;
488 oop next = refs_list.head();
489 while (next != obj) {
896 // the referent is in the generation (span) being currently collected
897 // then we can discover the reference object, provided
898 // the object has not already been discovered by
899 // a different concurrently running collector (as may be the
900 // case, for instance, if the reference object is in CMS and
901 // the referent in DefNewGeneration), and provided the processing
902 // of this reference object by the current collector will
903 // appear atomic to every other collector in the system.
904 // (Thus, for instance, a concurrent collector may not
905 // discover references in other generations even if the
906 // referent is in its own generation). This policy may,
907 // in certain cases, enqueue references somewhat sooner than
908 // might Policy #0 above, but at marginally increased cost
909 // and complexity in processing these references.
910 // We call this choice the "RefeferentBasedDiscovery" policy.
911 bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
912 // Make sure we are discovering refs (rather than processing discovered refs).
913 if (!_discovering_refs || !RegisterReferences) {
914 return false;
915 }
916 // We only discover active references.
917 oop next = java_lang_ref_Reference::next(obj);
918 if (next != NULL) { // Ref is no longer active
919 return false;
920 }
921
922 if (RefDiscoveryPolicy == ReferenceBasedDiscovery &&
923 !is_subject_to_discovery(obj)) {
924 // Reference is not in the originating generation;
925 // don't treat it specially (i.e. we want to scan it as a normal
926 // object with strong references).
927 return false;
928 }
929
930 // We only discover references whose referents are not (yet)
931 // known to be strongly reachable.
932 if (is_alive_non_header() != NULL) {
933 verify_referent(obj);
934 if (is_alive_non_header()->do_object_b(java_lang_ref_Reference::referent(obj))) {
935 return false; // referent is reachable
936 }
937 }
938 if (rt == REF_SOFT) {
1104
1105 // Walk the given discovered ref list, and remove all reference objects
1106 // whose referents are still alive, whose referents are NULL or which
1107 // are not active (have a non-NULL next field). NOTE: When we are
1108 // thus precleaning the ref lists (which happens single-threaded today),
1109 // we do not disable refs discovery to honor the correct semantics of
1110 // java.lang.Reference. As a result, we need to be careful below
1111 // that ref removal steps interleave safely with ref discovery steps
1112 // (in this thread).
1113 bool ReferenceProcessor::preclean_discovered_reflist(DiscoveredList& refs_list,
1114 BoolObjectClosure* is_alive,
1115 OopClosure* keep_alive,
1116 VoidClosure* complete_gc,
1117 YieldClosure* yield) {
1118 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
1119 while (iter.has_next()) {
1120 if (yield->should_return_fine_grain()) {
1121 return true;
1122 }
1123 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
1124 oop obj = iter.obj();
1125 oop next = java_lang_ref_Reference::next(obj);
1126 if (iter.referent() == NULL || iter.is_referent_alive() || next != NULL) {
1127 // The referent has been cleared, or is alive, or the Reference is not
1128 // active; we need to trace and mark its cohort.
1129 log_develop_trace(gc, ref)("Precleaning Reference (" INTPTR_FORMAT ": %s)",
1130 p2i(iter.obj()), iter.obj()->klass()->internal_name());
1131 // Remove Reference object from list
1132 iter.remove();
1133 // Keep alive its cohort.
1134 iter.make_referent_alive();
1135 if (UseCompressedOops) {
1136 narrowOop* next_addr = (narrowOop*)java_lang_ref_Reference::next_addr_raw(obj);
1137 keep_alive->do_oop(next_addr);
1138 } else {
1139 oop* next_addr = (oop*)java_lang_ref_Reference::next_addr_raw(obj);
1140 keep_alive->do_oop(next_addr);
1141 }
1142 iter.move_to_next();
1143 } else {
1144 iter.next();
1145 }
1146 }
1147 // Close the reachable set
1148 complete_gc->do_void();
1149
1150 NOT_PRODUCT(
1151 if (iter.processed() > 0) {
1152 log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " Refs out of " SIZE_FORMAT " Refs in discovered list " INTPTR_FORMAT,
1153 iter.removed(), iter.processed(), p2i(&refs_list));
1154 }
1155 )
1156 return false;
1157 }
1158
1159 const char* ReferenceProcessor::list_name(uint i) {
1160 assert(i <= _max_num_queues * number_of_subclasses_of_ref(),
1161 "Out of bounds index");
|
286 // At the end of the list, we should make _prev point to itself.
287 // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
288 // and _prev will be NULL.
289 new_next = _prev_discovered;
290 } else {
291 new_next = _next_discovered;
292 }
293 // Remove Reference object from discovered list. Note that G1 does not need a
294 // pre-barrier here because we know the Reference has already been found/marked,
295 // that's how it ended up in the discovered list in the first place.
296 RawAccess<>::oop_store(_prev_discovered_addr, new_next);
297 NOT_PRODUCT(_removed++);
298 _refs_list.dec_length(1);
299 }
300
301 void DiscoveredListIterator::clear_referent() {
302 RawAccess<>::oop_store(_referent_addr, oop(NULL));
303 }
304
305 void DiscoveredListIterator::enqueue() {
306 HeapAccess<AS_NO_KEEPALIVE>::oop_store_at(_current_discovered,
307 java_lang_ref_Reference::discovered_offset,
308 _next_discovered);
309 }
310
311 void DiscoveredListIterator::complete_enqueue() {
312 if (_prev_discovered != NULL) {
313 // This is the last object.
314 // Swap refs_list into pending list and set obj's
315 // discovered to what we read from the pending list.
316 oop old = Universe::swap_reference_pending_list(_refs_list.head());
317 HeapAccess<AS_NO_KEEPALIVE>::oop_store_at(_prev_discovered, java_lang_ref_Reference::discovered_offset, old);
318 }
319 }
320
321 // NOTE: process_phase*() are largely similar, and at a high level
322 // merely iterate over the extant list applying a predicate to
323 // each of its elements and possibly removing that element from the
324 // list and applying some further closures to that element.
325 // We should consider the possibility of replacing these
344 bool referent_is_dead = (iter.referent() != NULL) && !iter.is_referent_alive();
345 if (referent_is_dead &&
346 !policy->should_clear_reference(iter.obj(), _soft_ref_timestamp_clock)) {
347 log_develop_trace(gc, ref)("Dropping reference (" INTPTR_FORMAT ": %s" ") by policy",
348 p2i(iter.obj()), iter.obj()->klass()->internal_name());
349 // Remove Reference object from list
350 iter.remove();
351 // keep the referent around
352 iter.make_referent_alive();
353 iter.move_to_next();
354 } else {
355 iter.next();
356 }
357 }
358 // Close the reachable set
359 complete_gc->do_void();
360 log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " dead Refs out of " SIZE_FORMAT " discovered Refs by policy, from list " INTPTR_FORMAT,
361 iter.removed(), iter.processed(), p2i(&refs_list));
362 }
363
364 inline void log_dropped_ref(const DiscoveredListIterator& iter, const char* reason) {
365 log_develop_trace(gc, ref)("Dropping %s reference " PTR_FORMAT ": %s",
366 reason, p2i(iter.obj()),
367 iter.obj()->klass()->internal_name());
368 }
369
370 // Traverse the list and remove any Refs whose referents are alive,
371 // or NULL if discovery is not atomic.
372 void ReferenceProcessor::process_phase2(DiscoveredList& refs_list,
373 BoolObjectClosure* is_alive,
374 OopClosure* keep_alive,
375 VoidClosure* complete_gc) {
376 // complete_gc is unused.
377 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
378 while (iter.has_next()) {
379 iter.load_ptrs(DEBUG_ONLY(!discovery_is_atomic() /* allow_null_referent */));
380 if (iter.referent() == NULL) {
381 // Reference has been cleared since discovery; only possible if
382 // discovery is not atomic (checked by load_ptrs). Remove
383 // reference from list.
384 log_dropped_ref(iter, "cleared");
385 iter.remove();
386 iter.move_to_next();
387 } else if (iter.is_referent_alive()) {
388 // The referent is reachable after all.
389 // Remove reference from list.
390 log_dropped_ref(iter, "reachable");
391 iter.remove();
392 // Update the referent pointer as necessary. Note that this
393 // should not entail any recursive marking because the
394 // referent must already have been traversed.
395 iter.make_referent_alive();
396 iter.move_to_next();
397 } else {
398 iter.next();
399 }
400 }
401 NOT_PRODUCT(
402 if (iter.processed() > 0) {
403 log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " active Refs out of " SIZE_FORMAT
404 " Refs in discovered list " INTPTR_FORMAT,
405 iter.removed(), iter.processed(), p2i(&refs_list));
406 }
407 )
408 }
409
410 void ReferenceProcessor::process_phase3(DiscoveredList& refs_list,
411 bool clear_referent,
412 BoolObjectClosure* is_alive,
413 OopClosure* keep_alive,
414 VoidClosure* complete_gc) {
415 ResourceMark rm;
416 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
417 while (iter.has_next()) {
418 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
419 if (clear_referent) {
420 // NULL out referent pointer
421 iter.clear_referent();
422 } else {
423 // Current reference is a FinalReference; that's the only kind we
424 // don't clear the referent, instead keeping it for calling finalize.
425 iter.make_referent_alive();
426 // Self-loop next, to mark it not active.
427 assert(java_lang_ref_Reference::next(iter.obj()) == NULL, "enqueued FinalReference");
428 java_lang_ref_Reference::set_next_raw(iter.obj(), iter.obj());
429 }
430 iter.enqueue();
431 log_develop_trace(gc, ref)("Adding %sreference (" INTPTR_FORMAT ": %s) as pending",
432 clear_referent ? "cleared " : "", p2i(iter.obj()), iter.obj()->klass()->internal_name());
433 assert(oopDesc::is_oop(iter.obj(), UseConcMarkSweepGC), "Adding a bad reference");
434 iter.next();
435 }
436 iter.complete_enqueue();
437 // Close the reachable set
438 complete_gc->do_void();
439 // Clear the list.
440 refs_list.set_head(NULL);
441 refs_list.set_length(0);
442 }
443
444 void
445 ReferenceProcessor::clear_discovered_references(DiscoveredList& refs_list) {
446 oop obj = NULL;
447 oop next = refs_list.head();
448 while (next != obj) {
855 // the referent is in the generation (span) being currently collected
856 // then we can discover the reference object, provided
857 // the object has not already been discovered by
858 // a different concurrently running collector (as may be the
859 // case, for instance, if the reference object is in CMS and
860 // the referent in DefNewGeneration), and provided the processing
861 // of this reference object by the current collector will
862 // appear atomic to every other collector in the system.
863 // (Thus, for instance, a concurrent collector may not
864 // discover references in other generations even if the
865 // referent is in its own generation). This policy may,
866 // in certain cases, enqueue references somewhat sooner than
867 // might Policy #0 above, but at marginally increased cost
868 // and complexity in processing these references.
869 // We call this choice the "RefeferentBasedDiscovery" policy.
870 bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
871 // Make sure we are discovering refs (rather than processing discovered refs).
872 if (!_discovering_refs || !RegisterReferences) {
873 return false;
874 }
875
876 if ((rt == REF_FINAL) && (java_lang_ref_Reference::next(obj) != NULL)) {
877 // Don't rediscover non-active FinalReferences.
878 return false;
879 }
880
881 if (RefDiscoveryPolicy == ReferenceBasedDiscovery &&
882 !is_subject_to_discovery(obj)) {
883 // Reference is not in the originating generation;
884 // don't treat it specially (i.e. we want to scan it as a normal
885 // object with strong references).
886 return false;
887 }
888
889 // We only discover references whose referents are not (yet)
890 // known to be strongly reachable.
891 if (is_alive_non_header() != NULL) {
892 verify_referent(obj);
893 if (is_alive_non_header()->do_object_b(java_lang_ref_Reference::referent(obj))) {
894 return false; // referent is reachable
895 }
896 }
897 if (rt == REF_SOFT) {
1063
1064 // Walk the given discovered ref list, and remove all reference objects
1065 // whose referents are still alive, whose referents are NULL or which
1066 // are not active (have a non-NULL next field). NOTE: When we are
1067 // thus precleaning the ref lists (which happens single-threaded today),
1068 // we do not disable refs discovery to honor the correct semantics of
1069 // java.lang.Reference. As a result, we need to be careful below
1070 // that ref removal steps interleave safely with ref discovery steps
1071 // (in this thread).
1072 bool ReferenceProcessor::preclean_discovered_reflist(DiscoveredList& refs_list,
1073 BoolObjectClosure* is_alive,
1074 OopClosure* keep_alive,
1075 VoidClosure* complete_gc,
1076 YieldClosure* yield) {
1077 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
1078 while (iter.has_next()) {
1079 if (yield->should_return_fine_grain()) {
1080 return true;
1081 }
1082 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
1083 if (iter.referent() == NULL || iter.is_referent_alive()) {
1084 // The referent has been cleared, or is alive; we need to trace
1085 // and mark its cohort.
1086 log_develop_trace(gc, ref)("Precleaning Reference (" INTPTR_FORMAT ": %s)",
1087 p2i(iter.obj()), iter.obj()->klass()->internal_name());
1088 // Remove Reference object from list
1089 iter.remove();
1090 // Keep alive its cohort.
1091 iter.make_referent_alive();
1092 iter.move_to_next();
1093 } else {
1094 iter.next();
1095 }
1096 }
1097 // Close the reachable set
1098 complete_gc->do_void();
1099
1100 NOT_PRODUCT(
1101 if (iter.processed() > 0) {
1102 log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " Refs out of " SIZE_FORMAT " Refs in discovered list " INTPTR_FORMAT,
1103 iter.removed(), iter.processed(), p2i(&refs_list));
1104 }
1105 )
1106 return false;
1107 }
1108
1109 const char* ReferenceProcessor::list_name(uint i) {
1110 assert(i <= _max_num_queues * number_of_subclasses_of_ref(),
1111 "Out of bounds index");
|