< prev index next >

src/share/vm/memory/referenceProcessor.cpp

Print this page
rev 10551 : [backport] Remove safe_equals()
rev 10593 : Move JNI Weak References workaround to Shenandoah-specific root processor


 288     void do_oop(oop* unused)       { _count++; }
 289     void do_oop(narrowOop* unused) { ShouldNotReachHere(); }
 290     int count() { return _count; }
 291   };
 292   CountHandleClosure global_handle_count;
 293   AlwaysAliveClosure always_alive;
 294   JNIHandles::weak_oops_do(&always_alive, &global_handle_count);
 295   return global_handle_count.count();
 296 }
 297 #endif
 298 
 299 void ReferenceProcessor::process_phaseJNI(BoolObjectClosure* is_alive,
 300                                           OopClosure*        keep_alive,
 301                                           VoidClosure*       complete_gc) {
 302 #ifndef PRODUCT
 303   if (PrintGCDetails && PrintReferenceGC) {
 304     unsigned int count = count_jni_refs();
 305     gclog_or_tty->print(", %u refs", count);
 306   }
 307 #endif
 308   if (UseShenandoahGC) {
 309     // Workaround bugs with JNI weak reference processing, by pessimistically
 310     // assuming all JNI weak refs are alive. This effectively makes JNI weak refs
 311     // non-reclaimable. // TODO: Fix this properly
 312     class AlwaysAliveClosure: public BoolObjectClosure {
 313     public:
 314       virtual bool do_object_b(oop obj) { return true; }
 315     };
 316 
 317     AlwaysAliveClosure always_alive;
 318     JNIHandles::weak_oops_do(&always_alive, keep_alive);
 319   } else {
 320     JNIHandles::weak_oops_do(is_alive, keep_alive);
 321   }
 322   complete_gc->do_void();
 323 }
 324 
 325 
 326 template <class T>
 327 bool enqueue_discovered_ref_helper(ReferenceProcessor* ref,
 328                                    AbstractRefProcTaskExecutor* task_executor) {
 329 
 330   // Remember old value of pending references list
 331   T* pending_list_addr = (T*)java_lang_ref_Reference::pending_list_addr();
 332   T old_pending_list_value = *pending_list_addr;
 333 
 334   // Enqueue references that are not made active again, and
 335   // clear the decks for the next collection (cycle).
 336   ref->enqueue_discovered_reflists((HeapWord*)pending_list_addr, task_executor);
 337   // Do the post-barrier on pending_list_addr missed in
 338   // enqueue_discovered_reflist.
 339   oopDesc::bs()->write_ref_field(pending_list_addr, oopDesc::load_decode_heap_oop(pending_list_addr));
 340 
 341   // Stop treating discovered references specially.
 342   ref->disable_discovery();
 343 
 344   // Return true if new pending references were added
 345   return ! oopDesc::safe_equals(old_pending_list_value, *pending_list_addr);
 346 }
 347 
 348 bool ReferenceProcessor::enqueue_discovered_references(AbstractRefProcTaskExecutor* task_executor) {
 349   NOT_PRODUCT(verify_ok_to_handle_reflists());
 350   if (UseCompressedOops) {
 351     return enqueue_discovered_ref_helper<narrowOop>(this, task_executor);
 352   } else {
 353     return enqueue_discovered_ref_helper<oop>(this, task_executor);
 354   }
 355 }
 356 
 357 void ReferenceProcessor::enqueue_discovered_reflist(DiscoveredList& refs_list,
 358                                                     HeapWord* pending_list_addr) {
 359   // Given a list of refs linked through the "discovered" field
 360   // (java.lang.ref.Reference.discovered), self-loop their "next" field
 361   // thus distinguishing them from active References, then
 362   // prepend them to the pending list.
 363   //
 364   // The Java threads will see the Reference objects linked together through
 365   // the discovered field. Instead of trying to do the write barrier updates
 366   // in all places in the reference processor where we manipulate the discovered
 367   // field we make sure to do the barrier here where we anyway iterate through
 368   // all linked Reference objects. Note that it is important to not dirty any
 369   // cards during reference processing since this will cause card table
 370   // verification to fail for G1.
 371   //
 372   // BKWRD COMPATIBILITY NOTE: For older JDKs (prior to the fix for 4956777),
 373   // the "next" field is used to chain the pending list, not the discovered
 374   // field.
 375   if (TraceReferenceGC && PrintGCDetails) {
 376     gclog_or_tty->print_cr("ReferenceProcessor::enqueue_discovered_reflist list "
 377                            INTPTR_FORMAT, (address)refs_list.head());
 378   }
 379 
 380   oop obj = NULL;
 381   oop next_d = refs_list.head();
 382   if (pending_list_uses_discovered_field()) { // New behavior
 383     // Walk down the list, self-looping the next field
 384     // so that the References are not considered active.
 385     while (! oopDesc::safe_equals(obj, next_d)) {
 386       obj = next_d;
 387       assert(obj->is_instanceRef(), "should be reference object");
 388       next_d = java_lang_ref_Reference::discovered(obj);
 389       if (TraceReferenceGC && PrintGCDetails) {
 390         gclog_or_tty->print_cr("        obj " INTPTR_FORMAT "/next_d " INTPTR_FORMAT,
 391                                (void *)obj, (void *)next_d);
 392       }
 393       assert(java_lang_ref_Reference::next(obj) == NULL,
 394              "Reference not active; should not be discovered");
 395       // Self-loop next, so as to make Ref not active.
 396       java_lang_ref_Reference::set_next_raw(obj, obj);
 397       if (! oopDesc::safe_equals(next_d, obj)) {
 398         oopDesc::bs()->write_ref_field(java_lang_ref_Reference::discovered_addr(obj), next_d);
 399       } else {
 400         // This is the last object.
 401         // Swap refs_list into pending_list_addr and
 402         // set obj's discovered to what we read from pending_list_addr.
 403         oop old = oopDesc::atomic_exchange_oop(refs_list.head(), pending_list_addr);
 404         // Need post-barrier on pending_list_addr. See enqueue_discovered_ref_helper() above.
 405         java_lang_ref_Reference::set_discovered_raw(obj, old); // old may be NULL
 406         oopDesc::bs()->write_ref_field(java_lang_ref_Reference::discovered_addr(obj), old);
 407       }
 408     }
 409   } else { // Old behaviour
 410     // Walk down the list, copying the discovered field into
 411     // the next field and clearing the discovered field.
 412     while (obj != next_d) {
 413       obj = next_d;
 414       assert(obj->is_instanceRef(), "should be reference object");
 415       next_d = java_lang_ref_Reference::discovered(obj);
 416       if (TraceReferenceGC && PrintGCDetails) {
 417         gclog_or_tty->print_cr("        obj " INTPTR_FORMAT "/next_d " INTPTR_FORMAT,


 495   oop discovered = java_lang_ref_Reference::discovered(_ref);
 496   assert(_discovered_addr && discovered->is_oop_or_null(),
 497          "discovered field is bad");
 498   _next = discovered;
 499   _referent_addr = java_lang_ref_Reference::referent_addr(_ref);
 500   _referent = java_lang_ref_Reference::referent(_ref);
 501   assert(Universe::heap()->is_in_reserved_or_null(_referent),
 502          "Wrong oop found in java.lang.Reference object");
 503   assert(allow_null_referent ?
 504              _referent->is_oop_or_null()
 505            : _referent->is_oop(),
 506          "bad referent");
 507 }
 508 
 509 void DiscoveredListIterator::remove() {
 510   assert(_ref->is_oop(), "Dropping a bad reference");
 511   oop_store_raw(_discovered_addr, NULL);
 512 
 513   // First _prev_next ref actually points into DiscoveredList (gross).
 514   oop new_next;
 515   if (oopDesc::safe_equals(_next, _ref)) {
 516     // At the end of the list, we should make _prev point to itself.
 517     // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
 518     // and _prev will be NULL.
 519     new_next = _prev;
 520   } else {
 521     new_next = _next;
 522   }
 523   // Remove Reference object from discovered list. Note that G1 does not need a
 524   // pre-barrier here because we know the Reference has already been found/marked,
 525   // that's how it ended up in the discovered list in the first place.
 526   oop_store_raw(_prev_next, new_next);
 527   NOT_PRODUCT(_removed++);
 528   _refs_list.dec_length(1);
 529 }
 530 
 531 // Make the Reference object active again.
 532 void DiscoveredListIterator::make_active() {
 533   // The pre barrier for G1 is probably just needed for the old
 534   // reference processing behavior. Should we guard this with
 535   // ReferenceProcessor::pending_list_uses_discovered_field() ?


 699       iter.make_referent_alive();
 700     }
 701     if (TraceReferenceGC) {
 702       gclog_or_tty->print_cr("Adding %sreference (" INTPTR_FORMAT ": %s) as pending",
 703                              clear_referent ? "cleared " : "",
 704                              (void *)iter.obj(), iter.obj()->klass()->internal_name());
 705     }
 706     assert(iter.obj()->is_oop(UseConcMarkSweepGC), "Adding a bad reference");
 707     iter.next();
 708   }
 709   // Remember to update the next pointer of the last ref.
 710   iter.update_discovered();
 711   // Close the reachable set
 712   complete_gc->do_void();
 713 }
 714 
 715 void
 716 ReferenceProcessor::clear_discovered_references(DiscoveredList& refs_list) {
 717   oop obj = NULL;
 718   oop next = refs_list.head();
 719   while (! oopDesc::safe_equals(next, obj)) {
 720     obj = next;
 721     next = java_lang_ref_Reference::discovered(obj);
 722     java_lang_ref_Reference::set_discovered_raw(obj, NULL);
 723   }
 724   refs_list.set_head(NULL);
 725   refs_list.set_length(0);
 726 }
 727 
 728 void
 729 ReferenceProcessor::abandon_partial_discovered_list(DiscoveredList& refs_list) {
 730   clear_discovered_references(refs_list);
 731 }
 732 
 733 void ReferenceProcessor::abandon_partial_discovery() {
 734   // loop over the lists
 735   for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
 736     if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
 737       gclog_or_tty->print_cr("\nAbandoning %s discovered list", list_name(i));
 738     }
 739     abandon_partial_discovered_list(_discovered_refs[i]);


 851         oop move_head = ref_lists[from_idx].head();
 852         oop move_tail = move_head;
 853         oop new_head  = move_head;
 854         // find an element to split the list on
 855         for (size_t j = 0; j < refs_to_move; ++j) {
 856           move_tail = new_head;
 857           new_head = java_lang_ref_Reference::discovered(new_head);
 858         }
 859 
 860         // Add the chain to the to list.
 861         if (ref_lists[to_idx].head() == NULL) {
 862           // to list is empty. Make a loop at the end.
 863           java_lang_ref_Reference::set_discovered_raw(move_tail, move_tail);
 864         } else {
 865           java_lang_ref_Reference::set_discovered_raw(move_tail, ref_lists[to_idx].head());
 866         }
 867         ref_lists[to_idx].set_head(move_head);
 868         ref_lists[to_idx].inc_length(refs_to_move);
 869 
 870         // Remove the chain from the from list.
 871         if (oopDesc::safe_equals(move_tail, new_head)) {
 872           // We found the end of the from list.
 873           ref_lists[from_idx].set_head(NULL);
 874         } else {
 875           ref_lists[from_idx].set_head(new_head);
 876         }
 877         ref_lists[from_idx].dec_length(refs_to_move);
 878         if (ref_lists[from_idx].length() == 0) {
 879           break;
 880         }
 881       } else {
 882         to_idx = (to_idx + 1) % _num_q;
 883       }
 884     }
 885   }
 886 #ifdef ASSERT
 887   size_t balanced_total_refs = 0;
 888   for (uint i = 0; i < _max_num_q; ++i) {
 889     balanced_total_refs += ref_lists[i].length();
 890     if (TraceReferenceGC && PrintGCDetails) {
 891       gclog_or_tty->print("%d ", ref_lists[i].length());


1140 //     the referent is in the generation (span) being currently collected
1141 //     then we can discover the reference object, provided
1142 //     the object has not already been discovered by
1143 //     a different concurrently running collector (as may be the
1144 //     case, for instance, if the reference object is in CMS and
1145 //     the referent in DefNewGeneration), and provided the processing
1146 //     of this reference object by the current collector will
1147 //     appear atomic to every other collector in the system.
1148 //     (Thus, for instance, a concurrent collector may not
1149 //     discover references in other generations even if the
1150 //     referent is in its own generation). This policy may,
1151 //     in certain cases, enqueue references somewhat sooner than
1152 //     might Policy #0 above, but at marginally increased cost
1153 //     and complexity in processing these references.
1154 //     We call this choice the "RefeferentBasedDiscovery" policy.
1155 bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
1156   // Make sure we are discovering refs (rather than processing discovered refs).
1157   if (!_discovering_refs || !RegisterReferences) {
1158     return false;
1159   }
1160   DEBUG_ONLY(oopDesc::bs()->verify_safe_oop(obj);)
1161   // We only discover active references.
1162   oop next = java_lang_ref_Reference::next(obj);
1163   if (next != NULL) {   // Ref is no longer active
1164     return false;
1165   }
1166 
1167   HeapWord* obj_addr = (HeapWord*)obj;
1168   if (RefDiscoveryPolicy == ReferenceBasedDiscovery &&
1169       !_span.contains(obj_addr)) {
1170     // Reference is not in the originating generation;
1171     // don't treat it specially (i.e. we want to scan it as a normal
1172     // object with strong references).
1173     return false;
1174   }
1175 
1176   // We only discover references whose referents are not (yet)
1177   // known to be strongly reachable.
1178   if (is_alive_non_header() != NULL) {
1179     verify_referent(obj);
1180     if (is_alive_non_header()->do_object_b(java_lang_ref_Reference::referent(obj))) {




 288     void do_oop(oop* unused)       { _count++; }
 289     void do_oop(narrowOop* unused) { ShouldNotReachHere(); }
 290     int count() { return _count; }
 291   };
 292   CountHandleClosure global_handle_count;
 293   AlwaysAliveClosure always_alive;
 294   JNIHandles::weak_oops_do(&always_alive, &global_handle_count);
 295   return global_handle_count.count();
 296 }
 297 #endif
 298 
 299 void ReferenceProcessor::process_phaseJNI(BoolObjectClosure* is_alive,
 300                                           OopClosure*        keep_alive,
 301                                           VoidClosure*       complete_gc) {
 302 #ifndef PRODUCT
 303   if (PrintGCDetails && PrintReferenceGC) {
 304     unsigned int count = count_jni_refs();
 305     gclog_or_tty->print(", %u refs", count);
 306   }
 307 #endif












 308   JNIHandles::weak_oops_do(is_alive, keep_alive);

 309   complete_gc->do_void();
 310 }
 311 
 312 
 313 template <class T>
 314 bool enqueue_discovered_ref_helper(ReferenceProcessor* ref,
 315                                    AbstractRefProcTaskExecutor* task_executor) {
 316 
 317   // Remember old value of pending references list
 318   T* pending_list_addr = (T*)java_lang_ref_Reference::pending_list_addr();
 319   T old_pending_list_value = *pending_list_addr;
 320 
 321   // Enqueue references that are not made active again, and
 322   // clear the decks for the next collection (cycle).
 323   ref->enqueue_discovered_reflists((HeapWord*)pending_list_addr, task_executor);
 324   // Do the post-barrier on pending_list_addr missed in
 325   // enqueue_discovered_reflist.
 326   oopDesc::bs()->write_ref_field(pending_list_addr, oopDesc::load_decode_heap_oop(pending_list_addr));
 327 
 328   // Stop treating discovered references specially.
 329   ref->disable_discovery();
 330 
 331   // Return true if new pending references were added
 332   return ! oopDesc::unsafe_equals(old_pending_list_value, *pending_list_addr);
 333 }
 334 
 335 bool ReferenceProcessor::enqueue_discovered_references(AbstractRefProcTaskExecutor* task_executor) {
 336   NOT_PRODUCT(verify_ok_to_handle_reflists());
 337   if (UseCompressedOops) {
 338     return enqueue_discovered_ref_helper<narrowOop>(this, task_executor);
 339   } else {
 340     return enqueue_discovered_ref_helper<oop>(this, task_executor);
 341   }
 342 }
 343 
 344 void ReferenceProcessor::enqueue_discovered_reflist(DiscoveredList& refs_list,
 345                                                     HeapWord* pending_list_addr) {
 346   // Given a list of refs linked through the "discovered" field
 347   // (java.lang.ref.Reference.discovered), self-loop their "next" field
 348   // thus distinguishing them from active References, then
 349   // prepend them to the pending list.
 350   //
 351   // The Java threads will see the Reference objects linked together through
 352   // the discovered field. Instead of trying to do the write barrier updates
 353   // in all places in the reference processor where we manipulate the discovered
 354   // field we make sure to do the barrier here where we anyway iterate through
 355   // all linked Reference objects. Note that it is important to not dirty any
 356   // cards during reference processing since this will cause card table
 357   // verification to fail for G1.
 358   //
 359   // BKWRD COMPATIBILITY NOTE: For older JDKs (prior to the fix for 4956777),
 360   // the "next" field is used to chain the pending list, not the discovered
 361   // field.
 362   if (TraceReferenceGC && PrintGCDetails) {
 363     gclog_or_tty->print_cr("ReferenceProcessor::enqueue_discovered_reflist list "
 364                            INTPTR_FORMAT, (address)refs_list.head());
 365   }
 366 
 367   oop obj = NULL;
 368   oop next_d = refs_list.head();
 369   if (pending_list_uses_discovered_field()) { // New behavior
 370     // Walk down the list, self-looping the next field
 371     // so that the References are not considered active.
 372     while (! oopDesc::unsafe_equals(obj, next_d)) {
 373       obj = next_d;
 374       assert(obj->is_instanceRef(), "should be reference object");
 375       next_d = java_lang_ref_Reference::discovered(obj);
 376       if (TraceReferenceGC && PrintGCDetails) {
 377         gclog_or_tty->print_cr("        obj " INTPTR_FORMAT "/next_d " INTPTR_FORMAT,
 378                                (void *)obj, (void *)next_d);
 379       }
 380       assert(java_lang_ref_Reference::next(obj) == NULL,
 381              "Reference not active; should not be discovered");
 382       // Self-loop next, so as to make Ref not active.
 383       java_lang_ref_Reference::set_next_raw(obj, obj);
 384       if (! oopDesc::unsafe_equals(next_d, obj)) {
 385         oopDesc::bs()->write_ref_field(java_lang_ref_Reference::discovered_addr(obj), next_d);
 386       } else {
 387         // This is the last object.
 388         // Swap refs_list into pending_list_addr and
 389         // set obj's discovered to what we read from pending_list_addr.
 390         oop old = oopDesc::atomic_exchange_oop(refs_list.head(), pending_list_addr);
 391         // Need post-barrier on pending_list_addr. See enqueue_discovered_ref_helper() above.
 392         java_lang_ref_Reference::set_discovered_raw(obj, old); // old may be NULL
 393         oopDesc::bs()->write_ref_field(java_lang_ref_Reference::discovered_addr(obj), old);
 394       }
 395     }
 396   } else { // Old behaviour
 397     // Walk down the list, copying the discovered field into
 398     // the next field and clearing the discovered field.
 399     while (obj != next_d) {
 400       obj = next_d;
 401       assert(obj->is_instanceRef(), "should be reference object");
 402       next_d = java_lang_ref_Reference::discovered(obj);
 403       if (TraceReferenceGC && PrintGCDetails) {
 404         gclog_or_tty->print_cr("        obj " INTPTR_FORMAT "/next_d " INTPTR_FORMAT,


 482   oop discovered = java_lang_ref_Reference::discovered(_ref);
 483   assert(_discovered_addr && discovered->is_oop_or_null(),
 484          "discovered field is bad");
 485   _next = discovered;
 486   _referent_addr = java_lang_ref_Reference::referent_addr(_ref);
 487   _referent = java_lang_ref_Reference::referent(_ref);
 488   assert(Universe::heap()->is_in_reserved_or_null(_referent),
 489          "Wrong oop found in java.lang.Reference object");
 490   assert(allow_null_referent ?
 491              _referent->is_oop_or_null()
 492            : _referent->is_oop(),
 493          "bad referent");
 494 }
 495 
 496 void DiscoveredListIterator::remove() {
 497   assert(_ref->is_oop(), "Dropping a bad reference");
 498   oop_store_raw(_discovered_addr, NULL);
 499 
 500   // First _prev_next ref actually points into DiscoveredList (gross).
 501   oop new_next;
 502   if (oopDesc::unsafe_equals(_next, _ref)) {
 503     // At the end of the list, we should make _prev point to itself.
 504     // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
 505     // and _prev will be NULL.
 506     new_next = _prev;
 507   } else {
 508     new_next = _next;
 509   }
 510   // Remove Reference object from discovered list. Note that G1 does not need a
 511   // pre-barrier here because we know the Reference has already been found/marked,
 512   // that's how it ended up in the discovered list in the first place.
 513   oop_store_raw(_prev_next, new_next);
 514   NOT_PRODUCT(_removed++);
 515   _refs_list.dec_length(1);
 516 }
 517 
 518 // Make the Reference object active again.
 519 void DiscoveredListIterator::make_active() {
 520   // The pre barrier for G1 is probably just needed for the old
 521   // reference processing behavior. Should we guard this with
 522   // ReferenceProcessor::pending_list_uses_discovered_field() ?


 686       iter.make_referent_alive();
 687     }
 688     if (TraceReferenceGC) {
 689       gclog_or_tty->print_cr("Adding %sreference (" INTPTR_FORMAT ": %s) as pending",
 690                              clear_referent ? "cleared " : "",
 691                              (void *)iter.obj(), iter.obj()->klass()->internal_name());
 692     }
 693     assert(iter.obj()->is_oop(UseConcMarkSweepGC), "Adding a bad reference");
 694     iter.next();
 695   }
 696   // Remember to update the next pointer of the last ref.
 697   iter.update_discovered();
 698   // Close the reachable set
 699   complete_gc->do_void();
 700 }
 701 
 702 void
 703 ReferenceProcessor::clear_discovered_references(DiscoveredList& refs_list) {
 704   oop obj = NULL;
 705   oop next = refs_list.head();
 706   while (! oopDesc::unsafe_equals(next, obj)) {
 707     obj = next;
 708     next = java_lang_ref_Reference::discovered(obj);
 709     java_lang_ref_Reference::set_discovered_raw(obj, NULL);
 710   }
 711   refs_list.set_head(NULL);
 712   refs_list.set_length(0);
 713 }
 714 
 715 void
 716 ReferenceProcessor::abandon_partial_discovered_list(DiscoveredList& refs_list) {
 717   clear_discovered_references(refs_list);
 718 }
 719 
 720 void ReferenceProcessor::abandon_partial_discovery() {
 721   // loop over the lists
 722   for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
 723     if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
 724       gclog_or_tty->print_cr("\nAbandoning %s discovered list", list_name(i));
 725     }
 726     abandon_partial_discovered_list(_discovered_refs[i]);


 838         oop move_head = ref_lists[from_idx].head();
 839         oop move_tail = move_head;
 840         oop new_head  = move_head;
 841         // find an element to split the list on
 842         for (size_t j = 0; j < refs_to_move; ++j) {
 843           move_tail = new_head;
 844           new_head = java_lang_ref_Reference::discovered(new_head);
 845         }
 846 
 847         // Add the chain to the to list.
 848         if (ref_lists[to_idx].head() == NULL) {
 849           // to list is empty. Make a loop at the end.
 850           java_lang_ref_Reference::set_discovered_raw(move_tail, move_tail);
 851         } else {
 852           java_lang_ref_Reference::set_discovered_raw(move_tail, ref_lists[to_idx].head());
 853         }
 854         ref_lists[to_idx].set_head(move_head);
 855         ref_lists[to_idx].inc_length(refs_to_move);
 856 
 857         // Remove the chain from the from list.
 858         if (oopDesc::unsafe_equals(move_tail, new_head)) {
 859           // We found the end of the from list.
 860           ref_lists[from_idx].set_head(NULL);
 861         } else {
 862           ref_lists[from_idx].set_head(new_head);
 863         }
 864         ref_lists[from_idx].dec_length(refs_to_move);
 865         if (ref_lists[from_idx].length() == 0) {
 866           break;
 867         }
 868       } else {
 869         to_idx = (to_idx + 1) % _num_q;
 870       }
 871     }
 872   }
 873 #ifdef ASSERT
 874   size_t balanced_total_refs = 0;
 875   for (uint i = 0; i < _max_num_q; ++i) {
 876     balanced_total_refs += ref_lists[i].length();
 877     if (TraceReferenceGC && PrintGCDetails) {
 878       gclog_or_tty->print("%d ", ref_lists[i].length());


1127 //     the referent is in the generation (span) being currently collected
1128 //     then we can discover the reference object, provided
1129 //     the object has not already been discovered by
1130 //     a different concurrently running collector (as may be the
1131 //     case, for instance, if the reference object is in CMS and
1132 //     the referent in DefNewGeneration), and provided the processing
1133 //     of this reference object by the current collector will
1134 //     appear atomic to every other collector in the system.
1135 //     (Thus, for instance, a concurrent collector may not
1136 //     discover references in other generations even if the
1137 //     referent is in its own generation). This policy may,
1138 //     in certain cases, enqueue references somewhat sooner than
1139 //     might Policy #0 above, but at marginally increased cost
1140 //     and complexity in processing these references.
1141 //     We call this choice the "RefeferentBasedDiscovery" policy.
1142 bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
1143   // Make sure we are discovering refs (rather than processing discovered refs).
1144   if (!_discovering_refs || !RegisterReferences) {
1145     return false;
1146   }

1147   // We only discover active references.
1148   oop next = java_lang_ref_Reference::next(obj);
1149   if (next != NULL) {   // Ref is no longer active
1150     return false;
1151   }
1152 
1153   HeapWord* obj_addr = (HeapWord*)obj;
1154   if (RefDiscoveryPolicy == ReferenceBasedDiscovery &&
1155       !_span.contains(obj_addr)) {
1156     // Reference is not in the originating generation;
1157     // don't treat it specially (i.e. we want to scan it as a normal
1158     // object with strong references).
1159     return false;
1160   }
1161 
1162   // We only discover references whose referents are not (yet)
1163   // known to be strongly reachable.
1164   if (is_alive_non_header() != NULL) {
1165     verify_referent(obj);
1166     if (is_alive_non_header()->do_object_b(java_lang_ref_Reference::referent(obj))) {


< prev index next >