< prev index next >

src/share/vm/gc/shared/referenceProcessor.cpp

Print this page
rev 8978 : imported patch remove_err_msg
rev 8979 : [mq]: vmerr_static


 420   AbstractRefProcTaskExecutor* task_executor) {
 421   if (_processing_is_mt && task_executor != NULL) {
 422     // Parallel code
 423     RefProcEnqueueTask tsk(*this, _discovered_refs,
 424                            pending_list_addr, _max_num_q);
 425     task_executor->execute(tsk);
 426   } else {
 427     // Serial code: call the parent class's implementation
 428     for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
 429       enqueue_discovered_reflist(_discovered_refs[i], pending_list_addr);
 430       _discovered_refs[i].set_head(NULL);
 431       _discovered_refs[i].set_length(0);
 432     }
 433   }
 434 }
 435 
 436 void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) {
 437   _discovered_addr = java_lang_ref_Reference::discovered_addr(_ref);
 438   oop discovered = java_lang_ref_Reference::discovered(_ref);
 439   assert(_discovered_addr && discovered->is_oop_or_null(),
 440          err_msg("Expected an oop or NULL for discovered field at " PTR_FORMAT, p2i(discovered)));
 441   _next = discovered;
 442   _referent_addr = java_lang_ref_Reference::referent_addr(_ref);
 443   _referent = java_lang_ref_Reference::referent(_ref);
 444   assert(Universe::heap()->is_in_reserved_or_null(_referent),
 445          "Wrong oop found in java.lang.Reference object");
 446   assert(allow_null_referent ?
 447              _referent->is_oop_or_null()
 448            : _referent->is_oop(),
 449          err_msg("Expected an oop%s for referent field at " PTR_FORMAT,
 450                  (allow_null_referent ? " or NULL" : ""),
 451                  p2i(_referent)));
 452 }
 453 
 454 void DiscoveredListIterator::remove() {
 455   assert(_ref->is_oop(), "Dropping a bad reference");
 456   oop_store_raw(_discovered_addr, NULL);
 457 
 458   // First _prev_next ref actually points into DiscoveredList (gross).
 459   oop new_next;
 460   if (_next == _ref) {
 461     // At the end of the list, we should make _prev point to itself.
 462     // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
 463     // and _prev will be NULL.
 464     new_next = _prev;
 465   } else {
 466     new_next = _next;
 467   }
 468   // Remove Reference object from discovered list. Note that G1 does not need a
 469   // pre-barrier here because we know the Reference has already been found/marked,
 470   // that's how it ended up in the discovered list in the first place.
 471   oop_store_raw(_prev_next, new_next);


 561       gclog_or_tty->print_cr(" Dropped " SIZE_FORMAT " active Refs out of " SIZE_FORMAT
 562         " Refs in discovered list " INTPTR_FORMAT,
 563         iter.removed(), iter.processed(), p2i(refs_list.head()));
 564     }
 565   )
 566 }
 567 
 568 void
 569 ReferenceProcessor::pp2_work_concurrent_discovery(DiscoveredList&    refs_list,
 570                                                   BoolObjectClosure* is_alive,
 571                                                   OopClosure*        keep_alive,
 572                                                   VoidClosure*       complete_gc) {
 573   assert(!discovery_is_atomic(), "Error");
 574   DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
 575   while (iter.has_next()) {
 576     iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
 577     HeapWord* next_addr = java_lang_ref_Reference::next_addr(iter.obj());
 578     oop next = java_lang_ref_Reference::next(iter.obj());
 579     if ((iter.referent() == NULL || iter.is_referent_alive() ||
 580          next != NULL)) {
 581       assert(next->is_oop_or_null(), err_msg("Expected an oop or NULL for next field at " PTR_FORMAT, p2i(next)));
 582       // Remove Reference object from list
 583       iter.remove();
 584       // Trace the cohorts
 585       iter.make_referent_alive();
 586       if (UseCompressedOops) {
 587         keep_alive->do_oop((narrowOop*)next_addr);
 588       } else {
 589         keep_alive->do_oop((oop*)next_addr);
 590       }
 591       iter.move_to_next();
 592     } else {
 593       iter.next();
 594     }
 595   }
 596   // Now close the newly reachable set
 597   complete_gc->do_void();
 598   NOT_PRODUCT(
 599     if (PrintGCDetails && TraceReferenceGC && (iter.processed() > 0)) {
 600       gclog_or_tty->print_cr(" Dropped " SIZE_FORMAT " active Refs out of " SIZE_FORMAT
 601         " Refs in discovered list " INTPTR_FORMAT,


 976                              p2i(obj), obj->klass()->internal_name());
 977     }
 978   } else {
 979     // If retest was non NULL, another thread beat us to it:
 980     // The reference has already been discovered...
 981     if (TraceReferenceGC) {
 982       gclog_or_tty->print_cr("Already discovered reference (" INTPTR_FORMAT ": %s)",
 983                              p2i(obj), obj->klass()->internal_name());
 984     }
 985   }
 986 }
 987 
 988 #ifndef PRODUCT
 989 // Non-atomic (i.e. concurrent) discovery might allow us
 990 // to observe j.l.References with NULL referents, being those
 991 // cleared concurrently by mutators during (or after) discovery.
 992 void ReferenceProcessor::verify_referent(oop obj) {
 993   bool da = discovery_is_atomic();
 994   oop referent = java_lang_ref_Reference::referent(obj);
 995   assert(da ? referent->is_oop() : referent->is_oop_or_null(),
 996          err_msg("Bad referent " INTPTR_FORMAT " found in Reference "
 997                  INTPTR_FORMAT " during %satomic discovery ",
 998                  p2i(referent), p2i(obj), da ? "" : "non-"));
 999 }
1000 #endif
1001 
1002 // We mention two of several possible choices here:
1003 // #0: if the reference object is not in the "originating generation"
1004 //     (or part of the heap being collected, indicated by our "span"
1005 //     we don't treat it specially (i.e. we scan it as we would
1006 //     a normal oop, treating its references as strong references).
1007 //     This means that references can't be discovered unless their
1008 //     referent is also in the same span. This is the simplest,
1009 //     most "local" and most conservative approach, albeit one
1010 //     that may cause weak references to be enqueued least promptly.
1011 //     We call this choice the "ReferenceBasedDiscovery" policy.
1012 // #1: the reference object may be in any generation (span), but if
1013 //     the referent is in the generation (span) being currently collected
1014 //     then we can discover the reference object, provided
1015 //     the object has not already been discovered by
1016 //     a different concurrently running collector (as may be the
1017 //     case, for instance, if the reference object is in CMS and
1018 //     the referent in DefNewGeneration), and provided the processing


1053       return false;  // referent is reachable
1054     }
1055   }
1056   if (rt == REF_SOFT) {
1057     // For soft refs we can decide now if these are not
1058     // current candidates for clearing, in which case we
1059     // can mark through them now, rather than delaying that
1060     // to the reference-processing phase. Since all current
1061     // time-stamp policies advance the soft-ref clock only
1062     // at a full collection cycle, this is always currently
1063     // accurate.
1064     if (!_current_soft_ref_policy->should_clear_reference(obj, _soft_ref_timestamp_clock)) {
1065       return false;
1066     }
1067   }
1068 
1069   ResourceMark rm;      // Needed for tracing.
1070 
1071   HeapWord* const discovered_addr = java_lang_ref_Reference::discovered_addr(obj);
1072   const oop  discovered = java_lang_ref_Reference::discovered(obj);
1073   assert(discovered->is_oop_or_null(), err_msg("Expected an oop or NULL for discovered field at " PTR_FORMAT, p2i(discovered)));
1074   if (discovered != NULL) {
1075     // The reference has already been discovered...
1076     if (TraceReferenceGC) {
1077       gclog_or_tty->print_cr("Already discovered reference (" INTPTR_FORMAT ": %s)",
1078                              p2i(obj), obj->klass()->internal_name());
1079     }
1080     if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
1081       // assumes that an object is not processed twice;
1082       // if it's been already discovered it must be on another
1083       // generation's discovered list; so we won't discover it.
1084       return false;
1085     } else {
1086       assert(RefDiscoveryPolicy == ReferenceBasedDiscovery,
1087              "Unrecognized policy");
1088       // Check assumption that an object is not potentially
1089       // discovered twice except by concurrent collectors that potentially
1090       // trace the same Reference object twice.
1091       assert(UseConcMarkSweepGC || UseG1GC,
1092              "Only possible with a concurrent marking collector");
1093       return true;




 420   AbstractRefProcTaskExecutor* task_executor) {
 421   if (_processing_is_mt && task_executor != NULL) {
 422     // Parallel code
 423     RefProcEnqueueTask tsk(*this, _discovered_refs,
 424                            pending_list_addr, _max_num_q);
 425     task_executor->execute(tsk);
 426   } else {
 427     // Serial code: call the parent class's implementation
 428     for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
 429       enqueue_discovered_reflist(_discovered_refs[i], pending_list_addr);
 430       _discovered_refs[i].set_head(NULL);
 431       _discovered_refs[i].set_length(0);
 432     }
 433   }
 434 }
 435 
 436 void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) {
 437   _discovered_addr = java_lang_ref_Reference::discovered_addr(_ref);
 438   oop discovered = java_lang_ref_Reference::discovered(_ref);
 439   assert(_discovered_addr && discovered->is_oop_or_null(),
 440          "Expected an oop or NULL for discovered field at " PTR_FORMAT, p2i(discovered));
 441   _next = discovered;
 442   _referent_addr = java_lang_ref_Reference::referent_addr(_ref);
 443   _referent = java_lang_ref_Reference::referent(_ref);
 444   assert(Universe::heap()->is_in_reserved_or_null(_referent),
 445          "Wrong oop found in java.lang.Reference object");
 446   assert(allow_null_referent ?
 447              _referent->is_oop_or_null()
 448            : _referent->is_oop(),
 449          "Expected an oop%s for referent field at " PTR_FORMAT,
 450          (allow_null_referent ? " or NULL" : ""),
 451          p2i(_referent));
 452 }
 453 
 454 void DiscoveredListIterator::remove() {
 455   assert(_ref->is_oop(), "Dropping a bad reference");
 456   oop_store_raw(_discovered_addr, NULL);
 457 
 458   // First _prev_next ref actually points into DiscoveredList (gross).
 459   oop new_next;
 460   if (_next == _ref) {
 461     // At the end of the list, we should make _prev point to itself.
 462     // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
 463     // and _prev will be NULL.
 464     new_next = _prev;
 465   } else {
 466     new_next = _next;
 467   }
 468   // Remove Reference object from discovered list. Note that G1 does not need a
 469   // pre-barrier here because we know the Reference has already been found/marked,
 470   // that's how it ended up in the discovered list in the first place.
 471   oop_store_raw(_prev_next, new_next);


 561       gclog_or_tty->print_cr(" Dropped " SIZE_FORMAT " active Refs out of " SIZE_FORMAT
 562         " Refs in discovered list " INTPTR_FORMAT,
 563         iter.removed(), iter.processed(), p2i(refs_list.head()));
 564     }
 565   )
 566 }
 567 
 568 void
 569 ReferenceProcessor::pp2_work_concurrent_discovery(DiscoveredList&    refs_list,
 570                                                   BoolObjectClosure* is_alive,
 571                                                   OopClosure*        keep_alive,
 572                                                   VoidClosure*       complete_gc) {
 573   assert(!discovery_is_atomic(), "Error");
 574   DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
 575   while (iter.has_next()) {
 576     iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
 577     HeapWord* next_addr = java_lang_ref_Reference::next_addr(iter.obj());
 578     oop next = java_lang_ref_Reference::next(iter.obj());
 579     if ((iter.referent() == NULL || iter.is_referent_alive() ||
 580          next != NULL)) {
 581       assert(next->is_oop_or_null(), "Expected an oop or NULL for next field at " PTR_FORMAT, p2i(next));
 582       // Remove Reference object from list
 583       iter.remove();
 584       // Trace the cohorts
 585       iter.make_referent_alive();
 586       if (UseCompressedOops) {
 587         keep_alive->do_oop((narrowOop*)next_addr);
 588       } else {
 589         keep_alive->do_oop((oop*)next_addr);
 590       }
 591       iter.move_to_next();
 592     } else {
 593       iter.next();
 594     }
 595   }
 596   // Now close the newly reachable set
 597   complete_gc->do_void();
 598   NOT_PRODUCT(
 599     if (PrintGCDetails && TraceReferenceGC && (iter.processed() > 0)) {
 600       gclog_or_tty->print_cr(" Dropped " SIZE_FORMAT " active Refs out of " SIZE_FORMAT
 601         " Refs in discovered list " INTPTR_FORMAT,


 976                              p2i(obj), obj->klass()->internal_name());
 977     }
 978   } else {
 979     // If retest was non NULL, another thread beat us to it:
 980     // The reference has already been discovered...
 981     if (TraceReferenceGC) {
 982       gclog_or_tty->print_cr("Already discovered reference (" INTPTR_FORMAT ": %s)",
 983                              p2i(obj), obj->klass()->internal_name());
 984     }
 985   }
 986 }
 987 
 988 #ifndef PRODUCT
 989 // Non-atomic (i.e. concurrent) discovery might allow us
 990 // to observe j.l.References with NULL referents, being those
 991 // cleared concurrently by mutators during (or after) discovery.
 992 void ReferenceProcessor::verify_referent(oop obj) {
 993   bool da = discovery_is_atomic();
 994   oop referent = java_lang_ref_Reference::referent(obj);
 995   assert(da ? referent->is_oop() : referent->is_oop_or_null(),
 996          "Bad referent " INTPTR_FORMAT " found in Reference "
 997          INTPTR_FORMAT " during %satomic discovery ",
 998          p2i(referent), p2i(obj), da ? "" : "non-");
 999 }
1000 #endif
1001 
1002 // We mention two of several possible choices here:
1003 // #0: if the reference object is not in the "originating generation"
1004 //     (or part of the heap being collected, indicated by our "span"
1005 //     we don't treat it specially (i.e. we scan it as we would
1006 //     a normal oop, treating its references as strong references).
1007 //     This means that references can't be discovered unless their
1008 //     referent is also in the same span. This is the simplest,
1009 //     most "local" and most conservative approach, albeit one
1010 //     that may cause weak references to be enqueued least promptly.
1011 //     We call this choice the "ReferenceBasedDiscovery" policy.
1012 // #1: the reference object may be in any generation (span), but if
1013 //     the referent is in the generation (span) being currently collected
1014 //     then we can discover the reference object, provided
1015 //     the object has not already been discovered by
1016 //     a different concurrently running collector (as may be the
1017 //     case, for instance, if the reference object is in CMS and
1018 //     the referent in DefNewGeneration), and provided the processing


1053       return false;  // referent is reachable
1054     }
1055   }
1056   if (rt == REF_SOFT) {
1057     // For soft refs we can decide now if these are not
1058     // current candidates for clearing, in which case we
1059     // can mark through them now, rather than delaying that
1060     // to the reference-processing phase. Since all current
1061     // time-stamp policies advance the soft-ref clock only
1062     // at a full collection cycle, this is always currently
1063     // accurate.
1064     if (!_current_soft_ref_policy->should_clear_reference(obj, _soft_ref_timestamp_clock)) {
1065       return false;
1066     }
1067   }
1068 
1069   ResourceMark rm;      // Needed for tracing.
1070 
1071   HeapWord* const discovered_addr = java_lang_ref_Reference::discovered_addr(obj);
1072   const oop  discovered = java_lang_ref_Reference::discovered(obj);
1073   assert(discovered->is_oop_or_null(), "Expected an oop or NULL for discovered field at " PTR_FORMAT, p2i(discovered));
1074   if (discovered != NULL) {
1075     // The reference has already been discovered...
1076     if (TraceReferenceGC) {
1077       gclog_or_tty->print_cr("Already discovered reference (" INTPTR_FORMAT ": %s)",
1078                              p2i(obj), obj->klass()->internal_name());
1079     }
1080     if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
1081       // assumes that an object is not processed twice;
1082       // if it's been already discovered it must be on another
1083       // generation's discovered list; so we won't discover it.
1084       return false;
1085     } else {
1086       assert(RefDiscoveryPolicy == ReferenceBasedDiscovery,
1087              "Unrecognized policy");
1088       // Check assumption that an object is not potentially
1089       // discovered twice except by concurrent collectors that potentially
1090       // trace the same Reference object twice.
1091       assert(UseConcMarkSweepGC || UseG1GC,
1092              "Only possible with a concurrent marking collector");
1093       return true;


< prev index next >