456 AbstractRefProcTaskExecutor* task_executor) {
457 if (_processing_is_mt && task_executor != NULL) {
458 // Parallel code
459 RefProcEnqueueTask tsk(*this, _discovered_refs,
460 pending_list_addr, _max_num_q);
461 task_executor->execute(tsk);
462 } else {
463 // Serial code: call the parent class's implementation
464 for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
465 enqueue_discovered_reflist(_discovered_refs[i], pending_list_addr);
466 _discovered_refs[i].set_head(NULL);
467 _discovered_refs[i].set_length(0);
468 }
469 }
470 }
471
472 void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) {
473 _discovered_addr = java_lang_ref_Reference::discovered_addr(_ref);
474 oop discovered = java_lang_ref_Reference::discovered(_ref);
475 assert(_discovered_addr && discovered->is_oop_or_null(),
476 "discovered field is bad");
477 _next = discovered;
478 _referent_addr = java_lang_ref_Reference::referent_addr(_ref);
479 _referent = java_lang_ref_Reference::referent(_ref);
480 assert(Universe::heap()->is_in_reserved_or_null(_referent),
481 "Wrong oop found in java.lang.Reference object");
482 assert(allow_null_referent ?
483 _referent->is_oop_or_null()
484 : _referent->is_oop(),
485 "bad referent");
486 }
487
488 void DiscoveredListIterator::remove() {
489 assert(_ref->is_oop(), "Dropping a bad reference");
490 oop_store_raw(_discovered_addr, NULL);
491
492 // First _prev_next ref actually points into DiscoveredList (gross).
493 oop new_next;
494 if (_next == _ref) {
495 // At the end of the list, we should make _prev point to itself.
496 // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
497 // and _prev will be NULL.
498 new_next = _prev;
499 } else {
500 new_next = _next;
501 }
502 // Remove Reference object from discovered list. Note that G1 does not need a
503 // pre-barrier here because we know the Reference has already been found/marked,
504 // that's how it ended up in the discovered list in the first place.
505 oop_store_raw(_prev_next, new_next);
613 gclog_or_tty->print_cr(" Dropped %d active Refs out of %d "
614 "Refs in discovered list " INTPTR_FORMAT,
615 iter.removed(), iter.processed(), (address)refs_list.head());
616 }
617 )
618 }
619
620 void
621 ReferenceProcessor::pp2_work_concurrent_discovery(DiscoveredList& refs_list,
622 BoolObjectClosure* is_alive,
623 OopClosure* keep_alive,
624 VoidClosure* complete_gc) {
625 assert(!discovery_is_atomic(), "Error");
626 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
627 while (iter.has_next()) {
628 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
629 HeapWord* next_addr = java_lang_ref_Reference::next_addr(iter.obj());
630 oop next = java_lang_ref_Reference::next(iter.obj());
631 if ((iter.referent() == NULL || iter.is_referent_alive() ||
632 next != NULL)) {
633 assert(next->is_oop_or_null(), "bad next field");
634 // Remove Reference object from list
635 iter.remove();
636 // Trace the cohorts
637 iter.make_referent_alive();
638 if (UseCompressedOops) {
639 keep_alive->do_oop((narrowOop*)next_addr);
640 } else {
641 keep_alive->do_oop((oop*)next_addr);
642 }
643 iter.move_to_next();
644 } else {
645 iter.next();
646 }
647 }
648 // Now close the newly reachable set
649 complete_gc->do_void();
650 NOT_PRODUCT(
651 if (PrintGCDetails && TraceReferenceGC && (iter.processed() > 0)) {
652 gclog_or_tty->print_cr(" Dropped %d active Refs out of %d "
653 "Refs in discovered list " INTPTR_FORMAT,
962 }
963
964 void ReferenceProcessor::clean_up_discovered_references() {
965 // loop over the lists
966 for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
967 if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
968 gclog_or_tty->print_cr(
969 "\nScrubbing %s discovered list of Null referents",
970 list_name(i));
971 }
972 clean_up_discovered_reflist(_discovered_refs[i]);
973 }
974 }
975
976 void ReferenceProcessor::clean_up_discovered_reflist(DiscoveredList& refs_list) {
977 assert(!discovery_is_atomic(), "Else why call this method?");
978 DiscoveredListIterator iter(refs_list, NULL, NULL);
979 while (iter.has_next()) {
980 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
981 oop next = java_lang_ref_Reference::next(iter.obj());
982 assert(next->is_oop_or_null(), "bad next field");
983 // If referent has been cleared or Reference is not active,
984 // drop it.
985 if (iter.referent() == NULL || next != NULL) {
986 debug_only(
987 if (PrintGCDetails && TraceReferenceGC) {
988 gclog_or_tty->print_cr("clean_up_discovered_list: Dropping Reference: "
989 INTPTR_FORMAT " with next field: " INTPTR_FORMAT
990 " and referent: " INTPTR_FORMAT,
991 (void *)iter.obj(), (void *)next, (void *)iter.referent());
992 }
993 )
994 // Remove Reference object from list
995 iter.remove();
996 iter.move_to_next();
997 } else {
998 iter.next();
999 }
1000 }
1001 NOT_PRODUCT(
1002 if (PrintGCDetails && TraceReferenceGC) {
1155 return false; // referent is reachable
1156 }
1157 }
1158 if (rt == REF_SOFT) {
1159 // For soft refs we can decide now if these are not
1160 // current candidates for clearing, in which case we
1161 // can mark through them now, rather than delaying that
1162 // to the reference-processing phase. Since all current
1163 // time-stamp policies advance the soft-ref clock only
1164 // at a major collection cycle, this is always currently
1165 // accurate.
1166 if (!_current_soft_ref_policy->should_clear_reference(obj, _soft_ref_timestamp_clock)) {
1167 return false;
1168 }
1169 }
1170
1171 ResourceMark rm; // Needed for tracing.
1172
1173 HeapWord* const discovered_addr = java_lang_ref_Reference::discovered_addr(obj);
1174 const oop discovered = java_lang_ref_Reference::discovered(obj);
1175 assert(discovered->is_oop_or_null(), "bad discovered field");
1176 if (discovered != NULL) {
1177 // The reference has already been discovered...
1178 if (TraceReferenceGC) {
1179 gclog_or_tty->print_cr("Already discovered reference (" INTPTR_FORMAT ": %s)",
1180 (void *)obj, obj->klass()->internal_name());
1181 }
1182 if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
1183 // assumes that an object is not processed twice;
1184 // if it's been already discovered it must be on another
1185 // generation's discovered list; so we won't discover it.
1186 return false;
1187 } else {
1188 assert(RefDiscoveryPolicy == ReferenceBasedDiscovery,
1189 "Unrecognized policy");
1190 // Check assumption that an object is not potentially
1191 // discovered twice except by concurrent collectors that potentially
1192 // trace the same Reference object twice.
1193 assert(UseConcMarkSweepGC || UseG1GC,
1194 "Only possible with a concurrent marking collector");
1195 return true;
|
456 AbstractRefProcTaskExecutor* task_executor) {
457 if (_processing_is_mt && task_executor != NULL) {
458 // Parallel code
459 RefProcEnqueueTask tsk(*this, _discovered_refs,
460 pending_list_addr, _max_num_q);
461 task_executor->execute(tsk);
462 } else {
463 // Serial code: call the parent class's implementation
464 for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
465 enqueue_discovered_reflist(_discovered_refs[i], pending_list_addr);
466 _discovered_refs[i].set_head(NULL);
467 _discovered_refs[i].set_length(0);
468 }
469 }
470 }
471
472 void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) {
473 _discovered_addr = java_lang_ref_Reference::discovered_addr(_ref);
474 oop discovered = java_lang_ref_Reference::discovered(_ref);
475 assert(_discovered_addr && discovered->is_oop_or_null(),
476 err_msg("discovered field is bad: " PTR_FORMAT, p2i(discovered)));
477 _next = discovered;
478 _referent_addr = java_lang_ref_Reference::referent_addr(_ref);
479 _referent = java_lang_ref_Reference::referent(_ref);
480 assert(Universe::heap()->is_in_reserved_or_null(_referent),
481 "Wrong oop found in java.lang.Reference object");
482 assert(allow_null_referent ?
483 _referent->is_oop_or_null()
484 : _referent->is_oop(),
485 err_msg("bad referent: " PTR_FORMAT, p2i(_referent)));
486 }
487
488 void DiscoveredListIterator::remove() {
489 assert(_ref->is_oop(), "Dropping a bad reference");
490 oop_store_raw(_discovered_addr, NULL);
491
492 // First _prev_next ref actually points into DiscoveredList (gross).
493 oop new_next;
494 if (_next == _ref) {
495 // At the end of the list, we should make _prev point to itself.
496 // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
497 // and _prev will be NULL.
498 new_next = _prev;
499 } else {
500 new_next = _next;
501 }
502 // Remove Reference object from discovered list. Note that G1 does not need a
503 // pre-barrier here because we know the Reference has already been found/marked,
504 // that's how it ended up in the discovered list in the first place.
505 oop_store_raw(_prev_next, new_next);
613 gclog_or_tty->print_cr(" Dropped %d active Refs out of %d "
614 "Refs in discovered list " INTPTR_FORMAT,
615 iter.removed(), iter.processed(), (address)refs_list.head());
616 }
617 )
618 }
619
620 void
621 ReferenceProcessor::pp2_work_concurrent_discovery(DiscoveredList& refs_list,
622 BoolObjectClosure* is_alive,
623 OopClosure* keep_alive,
624 VoidClosure* complete_gc) {
625 assert(!discovery_is_atomic(), "Error");
626 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
627 while (iter.has_next()) {
628 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
629 HeapWord* next_addr = java_lang_ref_Reference::next_addr(iter.obj());
630 oop next = java_lang_ref_Reference::next(iter.obj());
631 if ((iter.referent() == NULL || iter.is_referent_alive() ||
632 next != NULL)) {
633 assert(next->is_oop_or_null(), err_msg("bad next field: " PTR_FORMAT, p2i(next)));
634 // Remove Reference object from list
635 iter.remove();
636 // Trace the cohorts
637 iter.make_referent_alive();
638 if (UseCompressedOops) {
639 keep_alive->do_oop((narrowOop*)next_addr);
640 } else {
641 keep_alive->do_oop((oop*)next_addr);
642 }
643 iter.move_to_next();
644 } else {
645 iter.next();
646 }
647 }
648 // Now close the newly reachable set
649 complete_gc->do_void();
650 NOT_PRODUCT(
651 if (PrintGCDetails && TraceReferenceGC && (iter.processed() > 0)) {
652 gclog_or_tty->print_cr(" Dropped %d active Refs out of %d "
653 "Refs in discovered list " INTPTR_FORMAT,
962 }
963
964 void ReferenceProcessor::clean_up_discovered_references() {
965 // loop over the lists
966 for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
967 if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
968 gclog_or_tty->print_cr(
969 "\nScrubbing %s discovered list of Null referents",
970 list_name(i));
971 }
972 clean_up_discovered_reflist(_discovered_refs[i]);
973 }
974 }
975
976 void ReferenceProcessor::clean_up_discovered_reflist(DiscoveredList& refs_list) {
977 assert(!discovery_is_atomic(), "Else why call this method?");
978 DiscoveredListIterator iter(refs_list, NULL, NULL);
979 while (iter.has_next()) {
980 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
981 oop next = java_lang_ref_Reference::next(iter.obj());
982 assert(next->is_oop_or_null(), err_msg("bad next field: " PTR_FORMAT, p2i(next)));
983 // If referent has been cleared or Reference is not active,
984 // drop it.
985 if (iter.referent() == NULL || next != NULL) {
986 debug_only(
987 if (PrintGCDetails && TraceReferenceGC) {
988 gclog_or_tty->print_cr("clean_up_discovered_list: Dropping Reference: "
989 INTPTR_FORMAT " with next field: " INTPTR_FORMAT
990 " and referent: " INTPTR_FORMAT,
991 (void *)iter.obj(), (void *)next, (void *)iter.referent());
992 }
993 )
994 // Remove Reference object from list
995 iter.remove();
996 iter.move_to_next();
997 } else {
998 iter.next();
999 }
1000 }
1001 NOT_PRODUCT(
1002 if (PrintGCDetails && TraceReferenceGC) {
1155 return false; // referent is reachable
1156 }
1157 }
1158 if (rt == REF_SOFT) {
1159 // For soft refs we can decide now if these are not
1160 // current candidates for clearing, in which case we
1161 // can mark through them now, rather than delaying that
1162 // to the reference-processing phase. Since all current
1163 // time-stamp policies advance the soft-ref clock only
1164 // at a major collection cycle, this is always currently
1165 // accurate.
1166 if (!_current_soft_ref_policy->should_clear_reference(obj, _soft_ref_timestamp_clock)) {
1167 return false;
1168 }
1169 }
1170
1171 ResourceMark rm; // Needed for tracing.
1172
1173 HeapWord* const discovered_addr = java_lang_ref_Reference::discovered_addr(obj);
1174 const oop discovered = java_lang_ref_Reference::discovered(obj);
1175 assert(discovered->is_oop_or_null(), err_msg("bad discovered field: " PTR_FORMAT, p2i(discovered)));
1176 if (discovered != NULL) {
1177 // The reference has already been discovered...
1178 if (TraceReferenceGC) {
1179 gclog_or_tty->print_cr("Already discovered reference (" INTPTR_FORMAT ": %s)",
1180 (void *)obj, obj->klass()->internal_name());
1181 }
1182 if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
1183 // assumes that an object is not processed twice;
1184 // if it's been already discovered it must be on another
1185 // generation's discovered list; so we won't discover it.
1186 return false;
1187 } else {
1188 assert(RefDiscoveryPolicy == ReferenceBasedDiscovery,
1189 "Unrecognized policy");
1190 // Check assumption that an object is not potentially
1191 // discovered twice except by concurrent collectors that potentially
1192 // trace the same Reference object twice.
1193 assert(UseConcMarkSweepGC || UseG1GC,
1194 "Only possible with a concurrent marking collector");
1195 return true;
|