267 _next_discovered = discovered;
268
269 _referent_addr = java_lang_ref_Reference::referent_addr_raw(_current_discovered);
270 _referent = java_lang_ref_Reference::referent(_current_discovered);
271 assert(Universe::heap()->is_in_reserved_or_null(_referent),
272 "Wrong oop found in java.lang.Reference object");
273 assert(allow_null_referent ?
274 oopDesc::is_oop_or_null(_referent)
275 : oopDesc::is_oop(_referent),
276 "Expected an oop%s for referent field at " PTR_FORMAT,
277 (allow_null_referent ? " or NULL" : ""),
278 p2i(_referent));
279 }
280
281 void DiscoveredListIterator::remove() {
282 assert(oopDesc::is_oop(_current_discovered), "Dropping a bad reference");
283 RawAccess<>::oop_store(_current_discovered_addr, oop(NULL));
284
285 // First _prev_next ref actually points into DiscoveredList (gross).
286 oop new_next;
287 if (_next_discovered == _current_discovered) {
288 // At the end of the list, we should make _prev point to itself.
289 // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
290 // and _prev will be NULL.
291 new_next = _prev_discovered;
292 } else {
293 new_next = _next_discovered;
294 }
295 // Remove Reference object from discovered list. Note that G1 does not need a
296 // pre-barrier here because we know the Reference has already been found/marked,
297 // that's how it ended up in the discovered list in the first place.
298 RawAccess<>::oop_store(_prev_discovered_addr, new_next);
299 _removed++;
300 _refs_list.dec_length(1);
301 }
302
303 void DiscoveredListIterator::clear_referent() {
304 RawAccess<>::oop_store(_referent_addr, oop(NULL));
305 }
306
307 void DiscoveredListIterator::enqueue() {
457 } else {
458 iter.clear_referent();
459 iter.enqueue();
460 log_enqueued_ref(iter, "cleared Phantom");
461 iter.next();
462 }
463 }
464 iter.complete_enqueue();
465 // Close the reachable set; needed for collectors which keep_alive_closure do
466 // not immediately complete their work.
467 complete_gc->do_void();
468 refs_list.clear();
469
470 return iter.removed();
471 }
472
473 void
474 ReferenceProcessor::clear_discovered_references(DiscoveredList& refs_list) {
475 oop obj = NULL;
476 oop next = refs_list.head();
477 while (next != obj) {
478 obj = next;
479 next = java_lang_ref_Reference::discovered(obj);
480 java_lang_ref_Reference::set_discovered_raw(obj, NULL);
481 }
482 refs_list.clear();
483 }
484
485 void ReferenceProcessor::abandon_partial_discovery() {
486 // loop over the lists
487 for (uint i = 0; i < _max_num_queues * number_of_subclasses_of_ref(); i++) {
488 if ((i % _max_num_queues) == 0) {
489 log_develop_trace(gc, ref)("Abandoning %s discovered list", list_name(i));
490 }
491 clear_discovered_references(_discovered_refs[i]);
492 }
493 }
494
495 size_t ReferenceProcessor::total_reference_count(ReferenceType type) const {
496 DiscoveredList* list = NULL;
497
729 oop move_head = ref_lists[from_idx].head();
730 oop move_tail = move_head;
731 oop new_head = move_head;
732 // find an element to split the list on
733 for (size_t j = 0; j < refs_to_move; ++j) {
734 move_tail = new_head;
735 new_head = java_lang_ref_Reference::discovered(new_head);
736 }
737
738 // Add the chain to the to list.
739 if (ref_lists[to_idx].head() == NULL) {
740 // to list is empty. Make a loop at the end.
741 java_lang_ref_Reference::set_discovered_raw(move_tail, move_tail);
742 } else {
743 java_lang_ref_Reference::set_discovered_raw(move_tail, ref_lists[to_idx].head());
744 }
745 ref_lists[to_idx].set_head(move_head);
746 ref_lists[to_idx].inc_length(refs_to_move);
747
748 // Remove the chain from the from list.
749 if (move_tail == new_head) {
750 // We found the end of the from list.
751 ref_lists[from_idx].set_head(NULL);
752 } else {
753 ref_lists[from_idx].set_head(new_head);
754 }
755 ref_lists[from_idx].dec_length(refs_to_move);
756 if (ref_lists[from_idx].length() == 0) {
757 break;
758 }
759 } else {
760 to_idx = (to_idx + 1) % _num_queues;
761 }
762 }
763 }
764 #ifdef ASSERT
765 log_reflist_counts(ref_lists, _num_queues);
766 size_t balanced_total_refs = 0;
767 for (uint i = 0; i < _num_queues; ++i) {
768 balanced_total_refs += ref_lists[i].length();
769 }
|
267 _next_discovered = discovered;
268
269 _referent_addr = java_lang_ref_Reference::referent_addr_raw(_current_discovered);
270 _referent = java_lang_ref_Reference::referent(_current_discovered);
271 assert(Universe::heap()->is_in_reserved_or_null(_referent),
272 "Wrong oop found in java.lang.Reference object");
273 assert(allow_null_referent ?
274 oopDesc::is_oop_or_null(_referent)
275 : oopDesc::is_oop(_referent),
276 "Expected an oop%s for referent field at " PTR_FORMAT,
277 (allow_null_referent ? " or NULL" : ""),
278 p2i(_referent));
279 }
280
281 void DiscoveredListIterator::remove() {
282 assert(oopDesc::is_oop(_current_discovered), "Dropping a bad reference");
283 RawAccess<>::oop_store(_current_discovered_addr, oop(NULL));
284
285 // First _prev_next ref actually points into DiscoveredList (gross).
286 oop new_next;
287 if (oopDesc::unsafe_equals(_next_discovered, _current_discovered)) {
288 // At the end of the list, we should make _prev point to itself.
289 // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
290 // and _prev will be NULL.
291 new_next = _prev_discovered;
292 } else {
293 new_next = _next_discovered;
294 }
295 // Remove Reference object from discovered list. Note that G1 does not need a
296 // pre-barrier here because we know the Reference has already been found/marked,
297 // that's how it ended up in the discovered list in the first place.
298 RawAccess<>::oop_store(_prev_discovered_addr, new_next);
299 _removed++;
300 _refs_list.dec_length(1);
301 }
302
303 void DiscoveredListIterator::clear_referent() {
304 RawAccess<>::oop_store(_referent_addr, oop(NULL));
305 }
306
307 void DiscoveredListIterator::enqueue() {
457 } else {
458 iter.clear_referent();
459 iter.enqueue();
460 log_enqueued_ref(iter, "cleared Phantom");
461 iter.next();
462 }
463 }
464 iter.complete_enqueue();
465 // Close the reachable set; needed for collectors which keep_alive_closure do
466 // not immediately complete their work.
467 complete_gc->do_void();
468 refs_list.clear();
469
470 return iter.removed();
471 }
472
473 void
474 ReferenceProcessor::clear_discovered_references(DiscoveredList& refs_list) {
475 oop obj = NULL;
476 oop next = refs_list.head();
477 while (! oopDesc::unsafe_equals(next, obj)) {
478 obj = next;
479 next = java_lang_ref_Reference::discovered(obj);
480 java_lang_ref_Reference::set_discovered_raw(obj, NULL);
481 }
482 refs_list.clear();
483 }
484
485 void ReferenceProcessor::abandon_partial_discovery() {
486 // loop over the lists
487 for (uint i = 0; i < _max_num_queues * number_of_subclasses_of_ref(); i++) {
488 if ((i % _max_num_queues) == 0) {
489 log_develop_trace(gc, ref)("Abandoning %s discovered list", list_name(i));
490 }
491 clear_discovered_references(_discovered_refs[i]);
492 }
493 }
494
495 size_t ReferenceProcessor::total_reference_count(ReferenceType type) const {
496 DiscoveredList* list = NULL;
497
729 oop move_head = ref_lists[from_idx].head();
730 oop move_tail = move_head;
731 oop new_head = move_head;
732 // find an element to split the list on
733 for (size_t j = 0; j < refs_to_move; ++j) {
734 move_tail = new_head;
735 new_head = java_lang_ref_Reference::discovered(new_head);
736 }
737
738 // Add the chain to the to list.
739 if (ref_lists[to_idx].head() == NULL) {
740 // to list is empty. Make a loop at the end.
741 java_lang_ref_Reference::set_discovered_raw(move_tail, move_tail);
742 } else {
743 java_lang_ref_Reference::set_discovered_raw(move_tail, ref_lists[to_idx].head());
744 }
745 ref_lists[to_idx].set_head(move_head);
746 ref_lists[to_idx].inc_length(refs_to_move);
747
748 // Remove the chain from the from list.
749 if (oopDesc::unsafe_equals(move_tail, new_head)) {
750 // We found the end of the from list.
751 ref_lists[from_idx].set_head(NULL);
752 } else {
753 ref_lists[from_idx].set_head(new_head);
754 }
755 ref_lists[from_idx].dec_length(refs_to_move);
756 if (ref_lists[from_idx].length() == 0) {
757 break;
758 }
759 } else {
760 to_idx = (to_idx + 1) % _num_queues;
761 }
762 }
763 }
764 #ifdef ASSERT
765 log_reflist_counts(ref_lists, _num_queues);
766 size_t balanced_total_refs = 0;
767 for (uint i = 0; i < _num_queues; ++i) {
768 balanced_total_refs += ref_lists[i].length();
769 }
|