< prev index next >

src/hotspot/share/gc/shared/referenceProcessor.cpp

Print this page
rev 50093 : [mq]: pp2_work


 286     // At the end of the list, we should make _prev point to itself.
 287     // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
 288     // and _prev will be NULL.
 289     new_next = _prev_discovered;
 290   } else {
 291     new_next = _next_discovered;
 292   }
 293   // Remove Reference object from discovered list. Note that G1 does not need a
 294   // pre-barrier here because we know the Reference has already been found/marked,
 295   // that's how it ended up in the discovered list in the first place.
 296   RawAccess<>::oop_store(_prev_discovered_addr, new_next);
 297   NOT_PRODUCT(_removed++);
 298   _refs_list.dec_length(1);
 299 }
 300 
 301 void DiscoveredListIterator::clear_referent() {
 302   RawAccess<>::oop_store(_referent_addr, oop(NULL));
 303 }
 304 
 305 void DiscoveredListIterator::enqueue() {
 306   // Self-loop next, so as to make Ref not active.
 307   java_lang_ref_Reference::set_next_raw(_current_discovered, _current_discovered);
 308 
 309   HeapAccess<AS_NO_KEEPALIVE>::oop_store_at(_current_discovered,
 310                                             java_lang_ref_Reference::discovered_offset,
 311                                             _next_discovered);
 312 }
 313 
 314 void DiscoveredListIterator::complete_enqueue() {
 315   if (_prev_discovered != NULL) {
 316     // This is the last object.
 317     // Swap refs_list into pending list and set obj's
 318     // discovered to what we read from the pending list.
 319     oop old = Universe::swap_reference_pending_list(_refs_list.head());
 320     HeapAccess<AS_NO_KEEPALIVE>::oop_store_at(_prev_discovered, java_lang_ref_Reference::discovered_offset, old);
 321   }
 322 }
 323 
 324 // NOTE: process_phase*() are largely similar, and at a high level
 325 // merely iterate over the extant list applying a predicate to
 326 // each of its elements and possibly removing that element from the
 327 // list and applying some further closures to that element.
 328 // We should consider the possibility of replacing these


 347     bool referent_is_dead = (iter.referent() != NULL) && !iter.is_referent_alive();
 348     if (referent_is_dead &&
 349         !policy->should_clear_reference(iter.obj(), _soft_ref_timestamp_clock)) {
 350       log_develop_trace(gc, ref)("Dropping reference (" INTPTR_FORMAT ": %s"  ") by policy",
 351                                  p2i(iter.obj()), iter.obj()->klass()->internal_name());
 352       // Remove Reference object from list
 353       iter.remove();
 354       // keep the referent around
 355       iter.make_referent_alive();
 356       iter.move_to_next();
 357     } else {
 358       iter.next();
 359     }
 360   }
 361   // Close the reachable set
 362   complete_gc->do_void();
 363   log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " dead Refs out of " SIZE_FORMAT " discovered Refs by policy, from list " INTPTR_FORMAT,
 364                              iter.removed(), iter.processed(), p2i(&refs_list));
 365 }
 366 








 367 void ReferenceProcessor::process_phase2(DiscoveredList&    refs_list,
 368                                         BoolObjectClosure* is_alive,
 369                                         OopClosure*        keep_alive,
 370                                         VoidClosure*       complete_gc) {
 371   if (discovery_is_atomic()) {
 372     // complete_gc is ignored in this case for this phase
 373     pp2_work(refs_list, is_alive, keep_alive);
 374   } else {
 375     assert(complete_gc != NULL, "Error");
 376     pp2_work_concurrent_discovery(refs_list, is_alive,
 377                                   keep_alive, complete_gc);
 378   }
 379 }
 380 // Traverse the list and remove any Refs that are not active, or
 381 // whose referents are either alive or NULL.
 382 void
 383 ReferenceProcessor::pp2_work(DiscoveredList&    refs_list,
 384                              BoolObjectClosure* is_alive,
 385                              OopClosure*        keep_alive) {
 386   assert(discovery_is_atomic(), "Error");
 387   DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
 388   while (iter.has_next()) {
 389     iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
 390     DEBUG_ONLY(oop next = java_lang_ref_Reference::next(iter.obj());)
 391     assert(next == NULL, "Should not discover inactive Reference");
 392     if (iter.is_referent_alive()) {
 393       log_develop_trace(gc, ref)("Dropping strongly reachable reference (" INTPTR_FORMAT ": %s)",
 394                                  p2i(iter.obj()), iter.obj()->klass()->internal_name());


 395       // The referent is reachable after all.
 396       // Remove Reference object from list.

 397       iter.remove();
 398       // Update the referent pointer as necessary: Note that this
 399       // should not entail any recursive marking because the
 400       // referent must already have been traversed.
 401       iter.make_referent_alive();
 402       iter.move_to_next();
 403     } else {
 404       iter.next();
 405     }
 406   }
 407   NOT_PRODUCT(
 408     if (iter.processed() > 0) {
 409       log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " active Refs out of " SIZE_FORMAT
 410         " Refs in discovered list " INTPTR_FORMAT,
 411         iter.removed(), iter.processed(), p2i(&refs_list));
 412     }
 413   )
 414 }
 415 
 416 void
 417 ReferenceProcessor::pp2_work_concurrent_discovery(DiscoveredList&    refs_list,
 418                                                   BoolObjectClosure* is_alive,
 419                                                   OopClosure*        keep_alive,
 420                                                   VoidClosure*       complete_gc) {
 421   assert(!discovery_is_atomic(), "Error");
 422   DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
 423   while (iter.has_next()) {
 424     iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
 425     HeapWord* next_addr = java_lang_ref_Reference::next_addr_raw(iter.obj());
 426     oop next = java_lang_ref_Reference::next(iter.obj());
 427     if ((iter.referent() == NULL || iter.is_referent_alive() ||
 428          next != NULL)) {
 429       assert(oopDesc::is_oop_or_null(next), "Expected an oop or NULL for next field at " PTR_FORMAT, p2i(next));
 430       // Remove Reference object from list
 431       iter.remove();
 432       // Trace the cohorts
 433       iter.make_referent_alive();
 434       if (UseCompressedOops) {
 435         keep_alive->do_oop((narrowOop*)next_addr);
 436       } else {
 437         keep_alive->do_oop((oop*)next_addr);
 438       }
 439       iter.move_to_next();
 440     } else {
 441       iter.next();
 442     }
 443   }
 444   // Now close the newly reachable set
 445   complete_gc->do_void();
 446   NOT_PRODUCT(
 447     if (iter.processed() > 0) {
 448       log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " active Refs out of " SIZE_FORMAT
 449         " Refs in discovered list " INTPTR_FORMAT,
 450         iter.removed(), iter.processed(), p2i(&refs_list));
 451     }
 452   )
 453 }
 454 
 455 void ReferenceProcessor::process_phase3(DiscoveredList&    refs_list,
 456                                         bool               clear_referent,
 457                                         BoolObjectClosure* is_alive,
 458                                         OopClosure*        keep_alive,
 459                                         VoidClosure*       complete_gc) {
 460   ResourceMark rm;
 461   DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
 462   while (iter.has_next()) {
 463     iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
 464     if (clear_referent) {
 465       // NULL out referent pointer
 466       iter.clear_referent();
 467     } else {
 468       // keep the referent around

 469       iter.make_referent_alive();



 470     }
 471     iter.enqueue();
 472     log_develop_trace(gc, ref)("Adding %sreference (" INTPTR_FORMAT ": %s) as pending",
 473                                clear_referent ? "cleared " : "", p2i(iter.obj()), iter.obj()->klass()->internal_name());
 474     assert(oopDesc::is_oop(iter.obj(), UseConcMarkSweepGC), "Adding a bad reference");
 475     iter.next();
 476   }
 477   iter.complete_enqueue();
 478   // Close the reachable set
 479   complete_gc->do_void();
 480   // Clear the list.
 481   refs_list.set_head(NULL);
 482   refs_list.set_length(0);
 483 }
 484 
 485 void
 486 ReferenceProcessor::clear_discovered_references(DiscoveredList& refs_list) {
 487   oop obj = NULL;
 488   oop next = refs_list.head();
 489   while (next != obj) {


 881 //     the referent is in the generation (span) being currently collected
 882 //     then we can discover the reference object, provided
 883 //     the object has not already been discovered by
 884 //     a different concurrently running collector (as may be the
 885 //     case, for instance, if the reference object is in CMS and
 886 //     the referent in DefNewGeneration), and provided the processing
 887 //     of this reference object by the current collector will
 888 //     appear atomic to every other collector in the system.
 889 //     (Thus, for instance, a concurrent collector may not
 890 //     discover references in other generations even if the
 891 //     referent is in its own generation). This policy may,
 892 //     in certain cases, enqueue references somewhat sooner than
 893 //     might Policy #0 above, but at marginally increased cost
 894 //     and complexity in processing these references.
 895 //     We call this choice the "RefeferentBasedDiscovery" policy.
 896 bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
 897   // Make sure we are discovering refs (rather than processing discovered refs).
 898   if (!_discovering_refs || !RegisterReferences) {
 899     return false;
 900   }
 901   // We only discover active references.
 902   oop next = java_lang_ref_Reference::next(obj);
 903   if (next != NULL) {   // Ref is no longer active
 904     return false;
 905   }
 906 
 907   if (RefDiscoveryPolicy == ReferenceBasedDiscovery &&
 908       !is_subject_to_discovery(obj)) {
 909     // Reference is not in the originating generation;
 910     // don't treat it specially (i.e. we want to scan it as a normal
 911     // object with strong references).
 912     return false;
 913   }
 914 
 915   // We only discover references whose referents are not (yet)
 916   // known to be strongly reachable.
 917   if (is_alive_non_header() != NULL) {
 918     verify_referent(obj);
 919     if (is_alive_non_header()->do_object_b(java_lang_ref_Reference::referent(obj))) {
 920       return false;  // referent is reachable
 921     }
 922   }
 923   if (rt == REF_SOFT) {


1072 }
1073 
1074 // Walk the given discovered ref list, and remove all reference objects
1075 // whose referents are still alive, whose referents are NULL or which
1076 // are not active (have a non-NULL next field). NOTE: When we are
1077 // thus precleaning the ref lists (which happens single-threaded today),
1078 // we do not disable refs discovery to honor the correct semantics of
1079 // java.lang.Reference. As a result, we need to be careful below
1080 // that ref removal steps interleave safely with ref discovery steps
1081 // (in this thread).
1082 void
1083 ReferenceProcessor::preclean_discovered_reflist(DiscoveredList&    refs_list,
1084                                                 BoolObjectClosure* is_alive,
1085                                                 OopClosure*        keep_alive,
1086                                                 VoidClosure*       complete_gc,
1087                                                 YieldClosure*      yield) {
1088   DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
1089   while (iter.has_next()) {
1090     iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
1091     oop obj = iter.obj();
1092     oop next = java_lang_ref_Reference::next(obj);
1093     if (iter.referent() == NULL || iter.is_referent_alive() ||
1094         next != NULL) {
1095       // The referent has been cleared, or is alive, or the Reference is not
1096       // active; we need to trace and mark its cohort.
1097       log_develop_trace(gc, ref)("Precleaning Reference (" INTPTR_FORMAT ": %s)",
1098                                  p2i(iter.obj()), iter.obj()->klass()->internal_name());
1099       // Remove Reference object from list
1100       iter.remove();
1101       // Keep alive its cohort.
1102       iter.make_referent_alive();
1103       if (UseCompressedOops) {
1104         narrowOop* next_addr = (narrowOop*)java_lang_ref_Reference::next_addr_raw(obj);
1105         keep_alive->do_oop(next_addr);
1106       } else {
1107         oop* next_addr = (oop*)java_lang_ref_Reference::next_addr_raw(obj);
1108         keep_alive->do_oop(next_addr);
1109       }
1110       iter.move_to_next();
1111     } else {
1112       iter.next();
1113     }
1114   }
1115   // Close the reachable set
1116   complete_gc->do_void();
1117 
1118   NOT_PRODUCT(
1119     if (iter.processed() > 0) {
1120       log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " Refs out of " SIZE_FORMAT " Refs in discovered list " INTPTR_FORMAT,
1121         iter.removed(), iter.processed(), p2i(&refs_list));
1122     }
1123   )
1124 }
1125 
1126 const char* ReferenceProcessor::list_name(uint i) {
1127    assert(i <= _max_num_queues * number_of_subclasses_of_ref(),
1128           "Out of bounds index");
1129 


 286     // At the end of the list, we should make _prev point to itself.
 287     // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
 288     // and _prev will be NULL.
 289     new_next = _prev_discovered;
 290   } else {
 291     new_next = _next_discovered;
 292   }
 293   // Remove Reference object from discovered list. Note that G1 does not need a
 294   // pre-barrier here because we know the Reference has already been found/marked,
 295   // that's how it ended up in the discovered list in the first place.
 296   RawAccess<>::oop_store(_prev_discovered_addr, new_next);
 297   NOT_PRODUCT(_removed++);
 298   _refs_list.dec_length(1);
 299 }
 300 
 301 void DiscoveredListIterator::clear_referent() {
 302   RawAccess<>::oop_store(_referent_addr, oop(NULL));
 303 }
 304 
 305 void DiscoveredListIterator::enqueue() {



 306   HeapAccess<AS_NO_KEEPALIVE>::oop_store_at(_current_discovered,
 307                                             java_lang_ref_Reference::discovered_offset,
 308                                             _next_discovered);
 309 }
 310 
 311 void DiscoveredListIterator::complete_enqueue() {
 312   if (_prev_discovered != NULL) {
 313     // This is the last object.
 314     // Swap refs_list into pending list and set obj's
 315     // discovered to what we read from the pending list.
 316     oop old = Universe::swap_reference_pending_list(_refs_list.head());
 317     HeapAccess<AS_NO_KEEPALIVE>::oop_store_at(_prev_discovered, java_lang_ref_Reference::discovered_offset, old);
 318   }
 319 }
 320 
 321 // NOTE: process_phase*() are largely similar, and at a high level
 322 // merely iterate over the extant list applying a predicate to
 323 // each of its elements and possibly removing that element from the
 324 // list and applying some further closures to that element.
 325 // We should consider the possibility of replacing these


 344     bool referent_is_dead = (iter.referent() != NULL) && !iter.is_referent_alive();
 345     if (referent_is_dead &&
 346         !policy->should_clear_reference(iter.obj(), _soft_ref_timestamp_clock)) {
 347       log_develop_trace(gc, ref)("Dropping reference (" INTPTR_FORMAT ": %s"  ") by policy",
 348                                  p2i(iter.obj()), iter.obj()->klass()->internal_name());
 349       // Remove Reference object from list
 350       iter.remove();
 351       // keep the referent around
 352       iter.make_referent_alive();
 353       iter.move_to_next();
 354     } else {
 355       iter.next();
 356     }
 357   }
 358   // Close the reachable set
 359   complete_gc->do_void();
 360   log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " dead Refs out of " SIZE_FORMAT " discovered Refs by policy, from list " INTPTR_FORMAT,
 361                              iter.removed(), iter.processed(), p2i(&refs_list));
 362 }
 363 
 364 inline void log_dropped_ref(const DiscoveredListIterator& iter, const char* reason) {
 365   log_develop_trace(gc, ref)("Dropping %s reference " PTR_FORMAT ": %s",
 366                              reason, p2i(iter.obj()),
 367                              iter.obj()->klass()->internal_name());
 368 }
 369 
 370 // Traverse the list and remove any Refs whose referents are alive,
 371 // or NULL if discovery is not atomic.
 372 void ReferenceProcessor::process_phase2(DiscoveredList&    refs_list,
 373                                         BoolObjectClosure* is_alive,
 374                                         OopClosure*        keep_alive,
 375                                         VoidClosure*       complete_gc) {
 376   // complete_gc is unused.















 377   DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
 378   while (iter.has_next()) {
 379     iter.load_ptrs(DEBUG_ONLY(!discovery_is_atomic() /* allow_null_referent */));
 380     if (iter.referent() == NULL) {
 381       // Reference has been cleared since discovery; only possible if
 382       // discovery is not atomic.  Remove reference from list.
 383       log_dropped_ref(iter, "cleared");
 384       iter.remove();
 385       iter.move_to_next();
 386     } else if (iter.is_referent_alive()) {
 387       // The referent is reachable after all.
 388       // Remove reference from list.
 389       log_dropped_ref(iter, "reachable");
 390       iter.remove();
 391       // Update the referent pointer as necessary.  Note that this
 392       // should not entail any recursive marking because the
 393       // referent must already have been traversed.
 394       iter.make_referent_alive();
 395       iter.move_to_next();
 396     } else {
 397       iter.next();
 398     }
 399   }
 400   NOT_PRODUCT(
 401     if (iter.processed() > 0) {
 402       log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " active Refs out of " SIZE_FORMAT
 403         " Refs in discovered list " INTPTR_FORMAT,
 404         iter.removed(), iter.processed(), p2i(&refs_list));
 405     }
 406   )
 407 }
 408 







































 409 void ReferenceProcessor::process_phase3(DiscoveredList&    refs_list,
 410                                         bool               clear_referent,
 411                                         BoolObjectClosure* is_alive,
 412                                         OopClosure*        keep_alive,
 413                                         VoidClosure*       complete_gc) {
 414   ResourceMark rm;
 415   DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
 416   while (iter.has_next()) {
 417     iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
 418     if (clear_referent) {
 419       // NULL out referent pointer
 420       iter.clear_referent();
 421     } else {
 422       // Current reference is a FinalReference; that's the only kind we
 423       // don't clear the referent, instead keeping it for calling finalize.
 424       iter.make_referent_alive();
 425       // Self-loop next, to mark it not active.
 426       assert(java_lang_ref_Reference::next(iter.obj()) == NULL, "enqueued FinalReference");
 427       java_lang_ref_Reference::set_next_raw(iter.obj(), iter.obj());
 428     }
 429     iter.enqueue();
 430     log_develop_trace(gc, ref)("Adding %sreference (" INTPTR_FORMAT ": %s) as pending",
 431                                clear_referent ? "cleared " : "", p2i(iter.obj()), iter.obj()->klass()->internal_name());
 432     assert(oopDesc::is_oop(iter.obj(), UseConcMarkSweepGC), "Adding a bad reference");
 433     iter.next();
 434   }
 435   iter.complete_enqueue();
 436   // Close the reachable set
 437   complete_gc->do_void();
 438   // Clear the list.
 439   refs_list.set_head(NULL);
 440   refs_list.set_length(0);
 441 }
 442 
 443 void
 444 ReferenceProcessor::clear_discovered_references(DiscoveredList& refs_list) {
 445   oop obj = NULL;
 446   oop next = refs_list.head();
 447   while (next != obj) {


 839 //     the referent is in the generation (span) being currently collected
 840 //     then we can discover the reference object, provided
 841 //     the object has not already been discovered by
 842 //     a different concurrently running collector (as may be the
 843 //     case, for instance, if the reference object is in CMS and
 844 //     the referent in DefNewGeneration), and provided the processing
 845 //     of this reference object by the current collector will
 846 //     appear atomic to every other collector in the system.
 847 //     (Thus, for instance, a concurrent collector may not
 848 //     discover references in other generations even if the
 849 //     referent is in its own generation). This policy may,
 850 //     in certain cases, enqueue references somewhat sooner than
 851 //     might Policy #0 above, but at marginally increased cost
 852 //     and complexity in processing these references.
 853 //     We call this choice the "RefeferentBasedDiscovery" policy.
 854 bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
 855   // Make sure we are discovering refs (rather than processing discovered refs).
 856   if (!_discovering_refs || !RegisterReferences) {
 857     return false;
 858   }
 859 
 860   if ((rt == REF_FINAL) && (java_lang_ref_Reference::next(obj) != NULL)) {
 861     // Don't rediscover non-active FinalReferences.
 862     return false;
 863   }
 864 
 865   if (RefDiscoveryPolicy == ReferenceBasedDiscovery &&
 866       !is_subject_to_discovery(obj)) {
 867     // Reference is not in the originating generation;
 868     // don't treat it specially (i.e. we want to scan it as a normal
 869     // object with strong references).
 870     return false;
 871   }
 872 
 873   // We only discover references whose referents are not (yet)
 874   // known to be strongly reachable.
 875   if (is_alive_non_header() != NULL) {
 876     verify_referent(obj);
 877     if (is_alive_non_header()->do_object_b(java_lang_ref_Reference::referent(obj))) {
 878       return false;  // referent is reachable
 879     }
 880   }
 881   if (rt == REF_SOFT) {


1030 }
1031 
1032 // Walk the given discovered ref list, and remove all reference objects
1033 // whose referents are still alive, whose referents are NULL or which
1034 // are not active (have a non-NULL next field). NOTE: When we are
1035 // thus precleaning the ref lists (which happens single-threaded today),
1036 // we do not disable refs discovery to honor the correct semantics of
1037 // java.lang.Reference. As a result, we need to be careful below
1038 // that ref removal steps interleave safely with ref discovery steps
1039 // (in this thread).
1040 void
1041 ReferenceProcessor::preclean_discovered_reflist(DiscoveredList&    refs_list,
1042                                                 BoolObjectClosure* is_alive,
1043                                                 OopClosure*        keep_alive,
1044                                                 VoidClosure*       complete_gc,
1045                                                 YieldClosure*      yield) {
1046   DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
1047   while (iter.has_next()) {
1048     iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
1049     oop obj = iter.obj();
1050     if (iter.referent() == NULL || iter.is_referent_alive()) {
1051       // The referent has been cleared, or is alive; we need to trace
1052       // and mark its cohort.


1053       log_develop_trace(gc, ref)("Precleaning Reference (" INTPTR_FORMAT ": %s)",
1054                                  p2i(iter.obj()), iter.obj()->klass()->internal_name());
1055       // Remove Reference object from list
1056       iter.remove();
1057       // Keep alive its cohort.
1058       iter.make_referent_alive();







1059       iter.move_to_next();
1060     } else {
1061       iter.next();
1062     }
1063   }
1064   // Close the reachable set
1065   complete_gc->do_void();
1066 
1067   NOT_PRODUCT(
1068     if (iter.processed() > 0) {
1069       log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " Refs out of " SIZE_FORMAT " Refs in discovered list " INTPTR_FORMAT,
1070         iter.removed(), iter.processed(), p2i(&refs_list));
1071     }
1072   )
1073 }
1074 
1075 const char* ReferenceProcessor::list_name(uint i) {
1076    assert(i <= _max_num_queues * number_of_subclasses_of_ref(),
1077           "Out of bounds index");
1078 
< prev index next >