384
385 RefProcEnqueueTimeTracker tt(phase_times, stats);
386
387 if (_processing_is_mt && task_executor != NULL) {
388 // Parallel code
389 RefProcEnqueueTask tsk(*this, _discovered_refs, _max_num_q, phase_times);
390 task_executor->execute(tsk);
391 } else {
392 // Serial code: call the parent class's implementation
393 for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
394 enqueue_discovered_reflist(_discovered_refs[i]);
395 _discovered_refs[i].set_head(NULL);
396 _discovered_refs[i].set_length(0);
397 }
398 }
399 }
400
401 void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) {
402 _discovered_addr = java_lang_ref_Reference::discovered_addr(_ref);
403 oop discovered = java_lang_ref_Reference::discovered(_ref);
404 assert(_discovered_addr && discovered->is_oop_or_null(),
405 "Expected an oop or NULL for discovered field at " PTR_FORMAT, p2i(discovered));
406 _next = discovered;
407 _referent_addr = java_lang_ref_Reference::referent_addr(_ref);
408 _referent = java_lang_ref_Reference::referent(_ref);
409 assert(Universe::heap()->is_in_reserved_or_null(_referent),
410 "Wrong oop found in java.lang.Reference object");
411 assert(allow_null_referent ?
412 _referent->is_oop_or_null()
413 : _referent->is_oop(),
414 "Expected an oop%s for referent field at " PTR_FORMAT,
415 (allow_null_referent ? " or NULL" : ""),
416 p2i(_referent));
417 }
418
419 void DiscoveredListIterator::remove() {
420 assert(_ref->is_oop(), "Dropping a bad reference");
421 oop_store_raw(_discovered_addr, NULL);
422
423 // First _prev_next ref actually points into DiscoveredList (gross).
424 oop new_next;
425 if (_next == _ref) {
426 // At the end of the list, we should make _prev point to itself.
427 // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
428 // and _prev will be NULL.
429 new_next = _prev;
430 } else {
431 new_next = _next;
432 }
433 // Remove Reference object from discovered list. Note that G1 does not need a
434 // pre-barrier here because we know the Reference has already been found/marked,
435 // that's how it ended up in the discovered list in the first place.
436 oop_store_raw(_prev_next, new_next);
437 NOT_PRODUCT(_removed++);
438 _refs_list.dec_length(1);
439 }
440
517 log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " active Refs out of " SIZE_FORMAT
518 " Refs in discovered list " INTPTR_FORMAT,
519 iter.removed(), iter.processed(), p2i(&refs_list));
520 }
521 )
522 }
523
524 void
525 ReferenceProcessor::pp2_work_concurrent_discovery(DiscoveredList& refs_list,
526 BoolObjectClosure* is_alive,
527 OopClosure* keep_alive,
528 VoidClosure* complete_gc) {
529 assert(!discovery_is_atomic(), "Error");
530 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
531 while (iter.has_next()) {
532 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
533 HeapWord* next_addr = java_lang_ref_Reference::next_addr(iter.obj());
534 oop next = java_lang_ref_Reference::next(iter.obj());
535 if ((iter.referent() == NULL || iter.is_referent_alive() ||
536 next != NULL)) {
537 assert(next->is_oop_or_null(), "Expected an oop or NULL for next field at " PTR_FORMAT, p2i(next));
538 // Remove Reference object from list
539 iter.remove();
540 // Trace the cohorts
541 iter.make_referent_alive();
542 if (UseCompressedOops) {
543 keep_alive->do_oop((narrowOop*)next_addr);
544 } else {
545 keep_alive->do_oop((oop*)next_addr);
546 }
547 iter.move_to_next();
548 } else {
549 iter.next();
550 }
551 }
552 // Now close the newly reachable set
553 complete_gc->do_void();
554 NOT_PRODUCT(
555 if (iter.processed() > 0) {
556 log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " active Refs out of " SIZE_FORMAT
557 " Refs in discovered list " INTPTR_FORMAT,
565 // closure) alive.
566 void
567 ReferenceProcessor::process_phase3(DiscoveredList& refs_list,
568 bool clear_referent,
569 BoolObjectClosure* is_alive,
570 OopClosure* keep_alive,
571 VoidClosure* complete_gc) {
572 ResourceMark rm;
573 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
574 while (iter.has_next()) {
575 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
576 if (clear_referent) {
577 // NULL out referent pointer
578 iter.clear_referent();
579 } else {
580 // keep the referent around
581 iter.make_referent_alive();
582 }
583 log_develop_trace(gc, ref)("Adding %sreference (" INTPTR_FORMAT ": %s) as pending",
584 clear_referent ? "cleared " : "", p2i(iter.obj()), iter.obj()->klass()->internal_name());
585 assert(iter.obj()->is_oop(UseConcMarkSweepGC), "Adding a bad reference");
586 iter.next();
587 }
588 // Close the reachable set
589 complete_gc->do_void();
590 }
591
592 void
593 ReferenceProcessor::clear_discovered_references(DiscoveredList& refs_list) {
594 oop obj = NULL;
595 oop next = refs_list.head();
596 while (next != obj) {
597 obj = next;
598 next = java_lang_ref_Reference::discovered(obj);
599 java_lang_ref_Reference::set_discovered_raw(obj, NULL);
600 }
601 refs_list.set_head(NULL);
602 refs_list.set_length(0);
603 }
604
605 void ReferenceProcessor::abandon_partial_discovery() {
962 refs_list.set_head(obj);
963 refs_list.inc_length(1);
964
965 log_develop_trace(gc, ref)("Discovered reference (mt) (" INTPTR_FORMAT ": %s)",
966 p2i(obj), obj->klass()->internal_name());
967 } else {
968 // If retest was non NULL, another thread beat us to it:
969 // The reference has already been discovered...
970 log_develop_trace(gc, ref)("Already discovered reference (" INTPTR_FORMAT ": %s)",
971 p2i(obj), obj->klass()->internal_name());
972 }
973 }
974
975 #ifndef PRODUCT
976 // Non-atomic (i.e. concurrent) discovery might allow us
977 // to observe j.l.References with NULL referents, being those
978 // cleared concurrently by mutators during (or after) discovery.
979 void ReferenceProcessor::verify_referent(oop obj) {
980 bool da = discovery_is_atomic();
981 oop referent = java_lang_ref_Reference::referent(obj);
982 assert(da ? referent->is_oop() : referent->is_oop_or_null(),
983 "Bad referent " INTPTR_FORMAT " found in Reference "
984 INTPTR_FORMAT " during %satomic discovery ",
985 p2i(referent), p2i(obj), da ? "" : "non-");
986 }
987 #endif
988
989 // We mention two of several possible choices here:
990 // #0: if the reference object is not in the "originating generation"
991 // (or part of the heap being collected, indicated by our "span"
992 // we don't treat it specially (i.e. we scan it as we would
993 // a normal oop, treating its references as strong references).
994 // This means that references can't be discovered unless their
995 // referent is also in the same span. This is the simplest,
996 // most "local" and most conservative approach, albeit one
997 // that may cause weak references to be enqueued least promptly.
998 // We call this choice the "ReferenceBasedDiscovery" policy.
999 // #1: the reference object may be in any generation (span), but if
1000 // the referent is in the generation (span) being currently collected
1001 // then we can discover the reference object, provided
1002 // the object has not already been discovered by
1040 return false; // referent is reachable
1041 }
1042 }
1043 if (rt == REF_SOFT) {
1044 // For soft refs we can decide now if these are not
1045 // current candidates for clearing, in which case we
1046 // can mark through them now, rather than delaying that
1047 // to the reference-processing phase. Since all current
1048 // time-stamp policies advance the soft-ref clock only
1049 // at a full collection cycle, this is always currently
1050 // accurate.
1051 if (!_current_soft_ref_policy->should_clear_reference(obj, _soft_ref_timestamp_clock)) {
1052 return false;
1053 }
1054 }
1055
1056 ResourceMark rm; // Needed for tracing.
1057
1058 HeapWord* const discovered_addr = java_lang_ref_Reference::discovered_addr(obj);
1059 const oop discovered = java_lang_ref_Reference::discovered(obj);
1060 assert(discovered->is_oop_or_null(), "Expected an oop or NULL for discovered field at " PTR_FORMAT, p2i(discovered));
1061 if (discovered != NULL) {
1062 // The reference has already been discovered...
1063 log_develop_trace(gc, ref)("Already discovered reference (" INTPTR_FORMAT ": %s)",
1064 p2i(obj), obj->klass()->internal_name());
1065 if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
1066 // assumes that an object is not processed twice;
1067 // if it's been already discovered it must be on another
1068 // generation's discovered list; so we won't discover it.
1069 return false;
1070 } else {
1071 assert(RefDiscoveryPolicy == ReferenceBasedDiscovery,
1072 "Unrecognized policy");
1073 // Check assumption that an object is not potentially
1074 // discovered twice except by concurrent collectors that potentially
1075 // trace the same Reference object twice.
1076 assert(UseConcMarkSweepGC || UseG1GC,
1077 "Only possible with a concurrent marking collector");
1078 return true;
1079 }
1080 }
1101 if (list == NULL) {
1102 return false; // nothing special needs to be done
1103 }
1104
1105 if (_discovery_is_mt) {
1106 add_to_discovered_list_mt(*list, obj, discovered_addr);
1107 } else {
1108 // We do a raw store here: the field will be visited later when processing
1109 // the discovered references.
1110 oop current_head = list->head();
1111 // The last ref must have its discovered field pointing to itself.
1112 oop next_discovered = (current_head != NULL) ? current_head : obj;
1113
1114 assert(discovered == NULL, "control point invariant");
1115 oop_store_raw(discovered_addr, next_discovered);
1116 list->set_head(obj);
1117 list->inc_length(1);
1118
1119 log_develop_trace(gc, ref)("Discovered reference (" INTPTR_FORMAT ": %s)", p2i(obj), obj->klass()->internal_name());
1120 }
1121 assert(obj->is_oop(), "Discovered a bad reference");
1122 verify_referent(obj);
1123 return true;
1124 }
1125
1126 bool ReferenceProcessor::has_discovered_references() {
1127 for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
1128 if (!_discovered_refs[i].is_empty()) {
1129 return true;
1130 }
1131 }
1132 return false;
1133 }
1134
1135 // Preclean the discovered references by removing those
1136 // whose referents are alive, and by marking from those that
1137 // are not active. These lists can be handled here
1138 // in any order and, indeed, concurrently.
1139 void ReferenceProcessor::preclean_discovered_references(
1140 BoolObjectClosure* is_alive,
1141 OopClosure* keep_alive,
|
384
385 RefProcEnqueueTimeTracker tt(phase_times, stats);
386
387 if (_processing_is_mt && task_executor != NULL) {
388 // Parallel code
389 RefProcEnqueueTask tsk(*this, _discovered_refs, _max_num_q, phase_times);
390 task_executor->execute(tsk);
391 } else {
392 // Serial code: call the parent class's implementation
393 for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
394 enqueue_discovered_reflist(_discovered_refs[i]);
395 _discovered_refs[i].set_head(NULL);
396 _discovered_refs[i].set_length(0);
397 }
398 }
399 }
400
401 void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) {
402 _discovered_addr = java_lang_ref_Reference::discovered_addr(_ref);
403 oop discovered = java_lang_ref_Reference::discovered(_ref);
404 assert(_discovered_addr && oopDesc::is_oop_or_null(discovered),
405 "Expected an oop or NULL for discovered field at " PTR_FORMAT, p2i(discovered));
406 _next = discovered;
407 _referent_addr = java_lang_ref_Reference::referent_addr(_ref);
408 _referent = java_lang_ref_Reference::referent(_ref);
409 assert(Universe::heap()->is_in_reserved_or_null(_referent),
410 "Wrong oop found in java.lang.Reference object");
411 assert(allow_null_referent ?
412 oopDesc::is_oop_or_null(_referent)
413 : oopDesc::is_oop(_referent),
414 "Expected an oop%s for referent field at " PTR_FORMAT,
415 (allow_null_referent ? " or NULL" : ""),
416 p2i(_referent));
417 }
418
419 void DiscoveredListIterator::remove() {
420 assert(oopDesc::is_oop(_ref), "Dropping a bad reference");
421 oop_store_raw(_discovered_addr, NULL);
422
423 // First _prev_next ref actually points into DiscoveredList (gross).
424 oop new_next;
425 if (_next == _ref) {
426 // At the end of the list, we should make _prev point to itself.
427 // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
428 // and _prev will be NULL.
429 new_next = _prev;
430 } else {
431 new_next = _next;
432 }
433 // Remove Reference object from discovered list. Note that G1 does not need a
434 // pre-barrier here because we know the Reference has already been found/marked,
435 // that's how it ended up in the discovered list in the first place.
436 oop_store_raw(_prev_next, new_next);
437 NOT_PRODUCT(_removed++);
438 _refs_list.dec_length(1);
439 }
440
517 log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " active Refs out of " SIZE_FORMAT
518 " Refs in discovered list " INTPTR_FORMAT,
519 iter.removed(), iter.processed(), p2i(&refs_list));
520 }
521 )
522 }
523
524 void
525 ReferenceProcessor::pp2_work_concurrent_discovery(DiscoveredList& refs_list,
526 BoolObjectClosure* is_alive,
527 OopClosure* keep_alive,
528 VoidClosure* complete_gc) {
529 assert(!discovery_is_atomic(), "Error");
530 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
531 while (iter.has_next()) {
532 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
533 HeapWord* next_addr = java_lang_ref_Reference::next_addr(iter.obj());
534 oop next = java_lang_ref_Reference::next(iter.obj());
535 if ((iter.referent() == NULL || iter.is_referent_alive() ||
536 next != NULL)) {
537 assert(oopDesc::is_oop_or_null(next), "Expected an oop or NULL for next field at " PTR_FORMAT, p2i(next));
538 // Remove Reference object from list
539 iter.remove();
540 // Trace the cohorts
541 iter.make_referent_alive();
542 if (UseCompressedOops) {
543 keep_alive->do_oop((narrowOop*)next_addr);
544 } else {
545 keep_alive->do_oop((oop*)next_addr);
546 }
547 iter.move_to_next();
548 } else {
549 iter.next();
550 }
551 }
552 // Now close the newly reachable set
553 complete_gc->do_void();
554 NOT_PRODUCT(
555 if (iter.processed() > 0) {
556 log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " active Refs out of " SIZE_FORMAT
557 " Refs in discovered list " INTPTR_FORMAT,
565 // closure) alive.
566 void
567 ReferenceProcessor::process_phase3(DiscoveredList& refs_list,
568 bool clear_referent,
569 BoolObjectClosure* is_alive,
570 OopClosure* keep_alive,
571 VoidClosure* complete_gc) {
572 ResourceMark rm;
573 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
574 while (iter.has_next()) {
575 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
576 if (clear_referent) {
577 // NULL out referent pointer
578 iter.clear_referent();
579 } else {
580 // keep the referent around
581 iter.make_referent_alive();
582 }
583 log_develop_trace(gc, ref)("Adding %sreference (" INTPTR_FORMAT ": %s) as pending",
584 clear_referent ? "cleared " : "", p2i(iter.obj()), iter.obj()->klass()->internal_name());
585 assert(oopDesc::is_oop(iter.obj(), UseConcMarkSweepGC), "Adding a bad reference");
586 iter.next();
587 }
588 // Close the reachable set
589 complete_gc->do_void();
590 }
591
592 void
593 ReferenceProcessor::clear_discovered_references(DiscoveredList& refs_list) {
594 oop obj = NULL;
595 oop next = refs_list.head();
596 while (next != obj) {
597 obj = next;
598 next = java_lang_ref_Reference::discovered(obj);
599 java_lang_ref_Reference::set_discovered_raw(obj, NULL);
600 }
601 refs_list.set_head(NULL);
602 refs_list.set_length(0);
603 }
604
605 void ReferenceProcessor::abandon_partial_discovery() {
962 refs_list.set_head(obj);
963 refs_list.inc_length(1);
964
965 log_develop_trace(gc, ref)("Discovered reference (mt) (" INTPTR_FORMAT ": %s)",
966 p2i(obj), obj->klass()->internal_name());
967 } else {
968 // If retest was non NULL, another thread beat us to it:
969 // The reference has already been discovered...
970 log_develop_trace(gc, ref)("Already discovered reference (" INTPTR_FORMAT ": %s)",
971 p2i(obj), obj->klass()->internal_name());
972 }
973 }
974
975 #ifndef PRODUCT
976 // Non-atomic (i.e. concurrent) discovery might allow us
977 // to observe j.l.References with NULL referents, being those
978 // cleared concurrently by mutators during (or after) discovery.
979 void ReferenceProcessor::verify_referent(oop obj) {
980 bool da = discovery_is_atomic();
981 oop referent = java_lang_ref_Reference::referent(obj);
982 assert(da ? oopDesc::is_oop(referent) : oopDesc::is_oop_or_null(referent),
983 "Bad referent " INTPTR_FORMAT " found in Reference "
984 INTPTR_FORMAT " during %satomic discovery ",
985 p2i(referent), p2i(obj), da ? "" : "non-");
986 }
987 #endif
988
989 // We mention two of several possible choices here:
990 // #0: if the reference object is not in the "originating generation"
991 // (or part of the heap being collected, indicated by our "span"
992 // we don't treat it specially (i.e. we scan it as we would
993 // a normal oop, treating its references as strong references).
994 // This means that references can't be discovered unless their
995 // referent is also in the same span. This is the simplest,
996 // most "local" and most conservative approach, albeit one
997 // that may cause weak references to be enqueued least promptly.
998 // We call this choice the "ReferenceBasedDiscovery" policy.
999 // #1: the reference object may be in any generation (span), but if
1000 // the referent is in the generation (span) being currently collected
1001 // then we can discover the reference object, provided
1002 // the object has not already been discovered by
1040 return false; // referent is reachable
1041 }
1042 }
1043 if (rt == REF_SOFT) {
1044 // For soft refs we can decide now if these are not
1045 // current candidates for clearing, in which case we
1046 // can mark through them now, rather than delaying that
1047 // to the reference-processing phase. Since all current
1048 // time-stamp policies advance the soft-ref clock only
1049 // at a full collection cycle, this is always currently
1050 // accurate.
1051 if (!_current_soft_ref_policy->should_clear_reference(obj, _soft_ref_timestamp_clock)) {
1052 return false;
1053 }
1054 }
1055
1056 ResourceMark rm; // Needed for tracing.
1057
1058 HeapWord* const discovered_addr = java_lang_ref_Reference::discovered_addr(obj);
1059 const oop discovered = java_lang_ref_Reference::discovered(obj);
1060 assert(oopDesc::is_oop_or_null(discovered), "Expected an oop or NULL for discovered field at " PTR_FORMAT, p2i(discovered));
1061 if (discovered != NULL) {
1062 // The reference has already been discovered...
1063 log_develop_trace(gc, ref)("Already discovered reference (" INTPTR_FORMAT ": %s)",
1064 p2i(obj), obj->klass()->internal_name());
1065 if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
1066 // assumes that an object is not processed twice;
1067 // if it's been already discovered it must be on another
1068 // generation's discovered list; so we won't discover it.
1069 return false;
1070 } else {
1071 assert(RefDiscoveryPolicy == ReferenceBasedDiscovery,
1072 "Unrecognized policy");
1073 // Check assumption that an object is not potentially
1074 // discovered twice except by concurrent collectors that potentially
1075 // trace the same Reference object twice.
1076 assert(UseConcMarkSweepGC || UseG1GC,
1077 "Only possible with a concurrent marking collector");
1078 return true;
1079 }
1080 }
1101 if (list == NULL) {
1102 return false; // nothing special needs to be done
1103 }
1104
1105 if (_discovery_is_mt) {
1106 add_to_discovered_list_mt(*list, obj, discovered_addr);
1107 } else {
1108 // We do a raw store here: the field will be visited later when processing
1109 // the discovered references.
1110 oop current_head = list->head();
1111 // The last ref must have its discovered field pointing to itself.
1112 oop next_discovered = (current_head != NULL) ? current_head : obj;
1113
1114 assert(discovered == NULL, "control point invariant");
1115 oop_store_raw(discovered_addr, next_discovered);
1116 list->set_head(obj);
1117 list->inc_length(1);
1118
1119 log_develop_trace(gc, ref)("Discovered reference (" INTPTR_FORMAT ": %s)", p2i(obj), obj->klass()->internal_name());
1120 }
1121 assert(oopDesc::is_oop(obj), "Discovered a bad reference");
1122 verify_referent(obj);
1123 return true;
1124 }
1125
1126 bool ReferenceProcessor::has_discovered_references() {
1127 for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
1128 if (!_discovered_refs[i].is_empty()) {
1129 return true;
1130 }
1131 }
1132 return false;
1133 }
1134
1135 // Preclean the discovered references by removing those
1136 // whose referents are alive, and by marking from those that
1137 // are not active. These lists can be handled here
1138 // in any order and, indeed, concurrently.
1139 void ReferenceProcessor::preclean_discovered_references(
1140 BoolObjectClosure* is_alive,
1141 OopClosure* keep_alive,
|