258 is_alive, keep_alive, complete_gc, task_executor, phase_times);
259 }
260
261 // Phantom references
262 {
263 RefProcPhaseTimesTracker tt(REF_PHANTOM, phase_times, this);
264 process_discovered_reflist(_discoveredPhantomRefs, NULL, true,
265 is_alive, keep_alive, complete_gc, task_executor, phase_times);
266 }
267
268 if (task_executor != NULL) {
269 // Record the work done by the parallel workers.
270 task_executor->set_single_threaded_mode();
271 }
272
273 phase_times->set_total_time_ms((os::elapsedTime() - start_time) * 1000);
274
275 return stats;
276 }
277
278 void ReferenceProcessor::enqueue_discovered_references(AbstractRefProcTaskExecutor* task_executor,
279 ReferenceProcessorPhaseTimes* phase_times) {
280 // Enqueue references that are not made active again, and
281 // clear the decks for the next collection (cycle).
282 enqueue_discovered_reflists(task_executor, phase_times);
283
284 // Stop treating discovered references specially.
285 disable_discovery();
286 }
287
288 void ReferenceProcessor::enqueue_discovered_reflist(DiscoveredList& refs_list) {
289 // Given a list of refs linked through the "discovered" field
290 // (java.lang.ref.Reference.discovered), self-loop their "next" field
291 // thus distinguishing them from active References, then
292 // prepend them to the pending list.
293 //
294 // The Java threads will see the Reference objects linked together through
295 // the discovered field. Instead of trying to do the write barrier updates
296 // in all places in the reference processor where we manipulate the discovered
297 // field we make sure to do the barrier here where we anyway iterate through
298 // all linked Reference objects. Note that it is important to not dirty any
299 // cards during reference processing since this will cause card table
300 // verification to fail for G1.
301 log_develop_trace(gc, ref)("ReferenceProcessor::enqueue_discovered_reflist list " INTPTR_FORMAT, p2i(&refs_list));
302
303 oop obj = NULL;
304 oop next_discovered = refs_list.head();
305 // Walk down the list, self-looping the next field
306 // so that the References are not considered active.
307 while (obj != next_discovered) {
308 obj = next_discovered;
309 assert(obj->is_instance(), "should be an instance object");
310 assert(InstanceKlass::cast(obj->klass())->is_reference_instance_klass(), "should be reference object");
311 next_discovered = java_lang_ref_Reference::discovered(obj);
312 log_develop_trace(gc, ref)(" obj " INTPTR_FORMAT "/next_discovered " INTPTR_FORMAT, p2i(obj), p2i(next_discovered));
313 assert(java_lang_ref_Reference::next(obj) == NULL,
314 "Reference not active; should not be discovered");
315 // Self-loop next, so as to make Ref not active.
316 java_lang_ref_Reference::set_next_raw(obj, obj);
317 if (next_discovered != obj) {
318 HeapAccess<AS_NO_KEEPALIVE>::oop_store_at(obj, java_lang_ref_Reference::discovered_offset, next_discovered);
319 } else {
320 // This is the last object.
321 // Swap refs_list into pending list and set obj's
322 // discovered to what we read from the pending list.
323 oop old = Universe::swap_reference_pending_list(refs_list.head());
324 HeapAccess<AS_NO_KEEPALIVE>::oop_store_at(obj, java_lang_ref_Reference::discovered_offset, old);
325 }
326 }
327 }
328
329 // Parallel enqueue task
330 class RefProcEnqueueTask: public AbstractRefProcTaskExecutor::EnqueueTask {
331 public:
332 RefProcEnqueueTask(ReferenceProcessor& ref_processor,
333 DiscoveredList discovered_refs[],
334 int n_queues,
335 ReferenceProcessorPhaseTimes* phase_times)
336 : EnqueueTask(ref_processor, discovered_refs, n_queues, phase_times)
337 { }
338
339 virtual void work(unsigned int work_id) {
340 RefProcWorkerTimeTracker tt(ReferenceProcessorPhaseTimes::RefEnqueue, _phase_times, work_id);
341
342 assert(work_id < (unsigned int)_ref_processor.max_num_queues(), "Index out-of-bounds");
343 // Simplest first cut: static partitioning.
344 int index = work_id;
345 // The increment on "index" must correspond to the maximum number of queues
346 // (n_queues) with which that ReferenceProcessor was created. That
347 // is because of the "clever" way the discovered references lists were
348 // allocated and are indexed into.
349 assert(_n_queues == (int) _ref_processor.max_num_queues(), "Different number not expected");
350 for (int j = 0;
351 j < ReferenceProcessor::number_of_subclasses_of_ref();
352 j++, index += _n_queues) {
353 _ref_processor.enqueue_discovered_reflist(_refs_lists[index]);
354 _refs_lists[index].set_head(NULL);
355 _refs_lists[index].set_length(0);
356 }
357 }
358 };
359
360 // Enqueue references that are not made active again
361 void ReferenceProcessor::enqueue_discovered_reflists(AbstractRefProcTaskExecutor* task_executor,
362 ReferenceProcessorPhaseTimes* phase_times) {
363
364 ReferenceProcessorStats stats(total_count(_discoveredSoftRefs),
365 total_count(_discoveredWeakRefs),
366 total_count(_discoveredFinalRefs),
367 total_count(_discoveredPhantomRefs));
368
369 RefProcEnqueueTimeTracker tt(phase_times, stats);
370
371 if (_processing_is_mt && task_executor != NULL) {
372 // Parallel code
373 RefProcEnqueueTask tsk(*this, _discovered_refs, _max_num_queues, phase_times);
374 task_executor->execute(tsk);
375 } else {
376 // Serial code: call the parent class's implementation
377 for (uint i = 0; i < _max_num_queues * number_of_subclasses_of_ref(); i++) {
378 enqueue_discovered_reflist(_discovered_refs[i]);
379 _discovered_refs[i].set_head(NULL);
380 _discovered_refs[i].set_length(0);
381 }
382 }
383 }
384
385 void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) {
386 _discovered_addr = java_lang_ref_Reference::discovered_addr_raw(_current_discovered);
387 oop discovered = java_lang_ref_Reference::discovered(_current_discovered);
388 assert(_discovered_addr && oopDesc::is_oop_or_null(discovered),
389 "Expected an oop or NULL for discovered field at " PTR_FORMAT, p2i(discovered));
390 _next_discovered = discovered;
391
392 _referent_addr = java_lang_ref_Reference::referent_addr_raw(_current_discovered);
393 _referent = java_lang_ref_Reference::referent(_current_discovered);
394 assert(Universe::heap()->is_in_reserved_or_null(_referent),
395 "Wrong oop found in java.lang.Reference object");
396 assert(allow_null_referent ?
397 oopDesc::is_oop_or_null(_referent)
398 : oopDesc::is_oop(_referent),
399 "Expected an oop%s for referent field at " PTR_FORMAT,
400 (allow_null_referent ? " or NULL" : ""),
401 p2i(_referent));
402 }
403
404 void DiscoveredListIterator::remove() {
410 if (_next_discovered == _current_discovered) {
411 // At the end of the list, we should make _prev point to itself.
412 // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
413 // and _prev will be NULL.
414 new_next = _prev_discovered;
415 } else {
416 new_next = _next_discovered;
417 }
418 // Remove Reference object from discovered list. Note that G1 does not need a
419 // pre-barrier here because we know the Reference has already been found/marked,
420 // that's how it ended up in the discovered list in the first place.
421 RawAccess<>::oop_store(_prev_discovered_addr, new_next);
422 NOT_PRODUCT(_removed++);
423 _refs_list.dec_length(1);
424 }
425
426 void DiscoveredListIterator::clear_referent() {
427 RawAccess<>::oop_store(_referent_addr, oop(NULL));
428 }
429
430 // NOTE: process_phase*() are largely similar, and at a high level
431 // merely iterate over the extant list applying a predicate to
432 // each of its elements and possibly removing that element from the
433 // list and applying some further closures to that element.
434 // We should consider the possibility of replacing these
435 // process_phase*() methods by abstracting them into
436 // a single general iterator invocation that receives appropriate
437 // closures that accomplish this work.
438
439 // (SoftReferences only) Traverse the list and remove any SoftReferences whose
440 // referents are not alive, but that should be kept alive for policy reasons.
441 // Keep alive the transitive closure of all such referents.
442 void
443 ReferenceProcessor::process_phase1(DiscoveredList& refs_list,
444 ReferencePolicy* policy,
445 BoolObjectClosure* is_alive,
446 OopClosure* keep_alive,
447 VoidClosure* complete_gc) {
448 assert(policy != NULL, "Must have a non-NULL policy");
449 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
557 }
558 )
559 }
560
561 void ReferenceProcessor::process_phase3(DiscoveredList& refs_list,
562 bool clear_referent,
563 BoolObjectClosure* is_alive,
564 OopClosure* keep_alive,
565 VoidClosure* complete_gc) {
566 ResourceMark rm;
567 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
568 while (iter.has_next()) {
569 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
570 if (clear_referent) {
571 // NULL out referent pointer
572 iter.clear_referent();
573 } else {
574 // keep the referent around
575 iter.make_referent_alive();
576 }
577 log_develop_trace(gc, ref)("Adding %sreference (" INTPTR_FORMAT ": %s) as pending",
578 clear_referent ? "cleared " : "", p2i(iter.obj()), iter.obj()->klass()->internal_name());
579 assert(oopDesc::is_oop(iter.obj(), UseConcMarkSweepGC), "Adding a bad reference");
580 iter.next();
581 }
582 // Close the reachable set
583 complete_gc->do_void();
584 }
585
586 void
587 ReferenceProcessor::clear_discovered_references(DiscoveredList& refs_list) {
588 oop obj = NULL;
589 oop next = refs_list.head();
590 while (next != obj) {
591 obj = next;
592 next = java_lang_ref_Reference::discovered(obj);
593 java_lang_ref_Reference::set_discovered_raw(obj, NULL);
594 }
595 refs_list.set_head(NULL);
596 refs_list.set_length(0);
597 }
598
599 void ReferenceProcessor::abandon_partial_discovery() {
600 // loop over the lists
601 for (uint i = 0; i < _max_num_queues * number_of_subclasses_of_ref(); i++) {
673 };
674
675 class RefProcPhase3Task: public AbstractRefProcTaskExecutor::ProcessTask {
676 public:
677 RefProcPhase3Task(ReferenceProcessor& ref_processor,
678 DiscoveredList refs_lists[],
679 bool clear_referent,
680 bool marks_oops_alive,
681 ReferenceProcessorPhaseTimes* phase_times)
682 : ProcessTask(ref_processor, refs_lists, marks_oops_alive, phase_times),
683 _clear_referent(clear_referent)
684 { }
685 virtual void work(unsigned int i, BoolObjectClosure& is_alive,
686 OopClosure& keep_alive,
687 VoidClosure& complete_gc)
688 {
689 RefProcWorkerTimeTracker tt(ReferenceProcessorPhaseTimes::RefPhase3, _phase_times, i);
690
691 _ref_processor.process_phase3(_refs_lists[i], _clear_referent,
692 &is_alive, &keep_alive, &complete_gc);
693 }
694 private:
695 bool _clear_referent;
696 };
697
698 #ifndef PRODUCT
699 void ReferenceProcessor::log_reflist_counts(DiscoveredList ref_lists[], uint active_length, size_t total_refs) {
700 if (!log_is_enabled(Trace, gc, ref)) {
701 return;
702 }
703
704 stringStream st;
705 for (uint i = 0; i < active_length; ++i) {
706 st.print(SIZE_FORMAT " ", ref_lists[i].length());
707 }
708 log_develop_trace(gc, ref)("%s= " SIZE_FORMAT, st.as_string(), total_refs);
709 #ifdef ASSERT
710 for (uint i = active_length; i < _max_num_queues; i++) {
711 assert(ref_lists[i].length() == 0, SIZE_FORMAT " unexpected References in %u",
712 ref_lists[i].length(), i);
786 }
787 ref_lists[from_idx].dec_length(refs_to_move);
788 if (ref_lists[from_idx].length() == 0) {
789 break;
790 }
791 } else {
792 to_idx = (to_idx + 1) % _num_queues;
793 }
794 }
795 }
796 #ifdef ASSERT
797 size_t balanced_total_refs = 0;
798 for (uint i = 0; i < _num_queues; ++i) {
799 balanced_total_refs += ref_lists[i].length();
800 }
801 log_reflist_counts(ref_lists, _num_queues, balanced_total_refs);
802 assert(total_refs == balanced_total_refs, "Balancing was incomplete");
803 #endif
804 }
805
806 void ReferenceProcessor::balance_all_queues() {
807 balance_queues(_discoveredSoftRefs);
808 balance_queues(_discoveredWeakRefs);
809 balance_queues(_discoveredFinalRefs);
810 balance_queues(_discoveredPhantomRefs);
811 }
812
813 void ReferenceProcessor::process_discovered_reflist(
814 DiscoveredList refs_lists[],
815 ReferencePolicy* policy,
816 bool clear_referent,
817 BoolObjectClosure* is_alive,
818 OopClosure* keep_alive,
819 VoidClosure* complete_gc,
820 AbstractRefProcTaskExecutor* task_executor,
821 ReferenceProcessorPhaseTimes* phase_times)
822 {
823 bool mt_processing = task_executor != NULL && _processing_is_mt;
824
825 phase_times->set_processing_is_mt(mt_processing);
826
827 if (mt_processing && ParallelRefProcBalancingEnabled) {
828 RefProcBalanceQueuesTimeTracker tt(phase_times);
829 balance_queues(refs_lists);
830 }
831
832 // Phase 1 (soft refs only):
861 task_executor->execute(phase2);
862 } else {
863 for (uint i = 0; i < _max_num_queues; i++) {
864 process_phase2(refs_lists[i], is_alive, keep_alive, complete_gc);
865 }
866 }
867 }
868
869 // Phase 3:
870 // . Traverse the list and process referents as appropriate.
871 {
872 RefProcParPhaseTimeTracker tt(ReferenceProcessorPhaseTimes::RefPhase3, phase_times);
873
874 if (mt_processing) {
875 RefProcPhase3Task phase3(*this, refs_lists, clear_referent, true /*marks_oops_alive*/, phase_times);
876 task_executor->execute(phase3);
877 } else {
878 for (uint i = 0; i < _max_num_queues; i++) {
879 process_phase3(refs_lists[i], clear_referent,
880 is_alive, keep_alive, complete_gc);
881 }
882 }
883 }
884 }
885
886 inline DiscoveredList* ReferenceProcessor::get_discovered_list(ReferenceType rt) {
887 uint id = 0;
888 // Determine the queue index to use for this object.
889 if (_discovery_is_mt) {
890 // During a multi-threaded discovery phase,
891 // each thread saves to its "own" list.
892 Thread* thr = Thread::current();
893 id = thr->as_Worker_thread()->id();
894 } else {
895 // single-threaded discovery, we save in round-robin
896 // fashion to each of the lists.
897 if (_processing_is_mt) {
898 id = next_id();
899 }
900 }
|
258 is_alive, keep_alive, complete_gc, task_executor, phase_times);
259 }
260
261 // Phantom references
262 {
263 RefProcPhaseTimesTracker tt(REF_PHANTOM, phase_times, this);
264 process_discovered_reflist(_discoveredPhantomRefs, NULL, true,
265 is_alive, keep_alive, complete_gc, task_executor, phase_times);
266 }
267
268 if (task_executor != NULL) {
269 // Record the work done by the parallel workers.
270 task_executor->set_single_threaded_mode();
271 }
272
273 phase_times->set_total_time_ms((os::elapsedTime() - start_time) * 1000);
274
275 return stats;
276 }
277
278 void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) {
279 _discovered_addr = java_lang_ref_Reference::discovered_addr_raw(_current_discovered);
280 oop discovered = java_lang_ref_Reference::discovered(_current_discovered);
281 assert(_discovered_addr && oopDesc::is_oop_or_null(discovered),
282 "Expected an oop or NULL for discovered field at " PTR_FORMAT, p2i(discovered));
283 _next_discovered = discovered;
284
285 _referent_addr = java_lang_ref_Reference::referent_addr_raw(_current_discovered);
286 _referent = java_lang_ref_Reference::referent(_current_discovered);
287 assert(Universe::heap()->is_in_reserved_or_null(_referent),
288 "Wrong oop found in java.lang.Reference object");
289 assert(allow_null_referent ?
290 oopDesc::is_oop_or_null(_referent)
291 : oopDesc::is_oop(_referent),
292 "Expected an oop%s for referent field at " PTR_FORMAT,
293 (allow_null_referent ? " or NULL" : ""),
294 p2i(_referent));
295 }
296
297 void DiscoveredListIterator::remove() {
303 if (_next_discovered == _current_discovered) {
304 // At the end of the list, we should make _prev point to itself.
305 // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
306 // and _prev will be NULL.
307 new_next = _prev_discovered;
308 } else {
309 new_next = _next_discovered;
310 }
311 // Remove Reference object from discovered list. Note that G1 does not need a
312 // pre-barrier here because we know the Reference has already been found/marked,
313 // that's how it ended up in the discovered list in the first place.
314 RawAccess<>::oop_store(_prev_discovered_addr, new_next);
315 NOT_PRODUCT(_removed++);
316 _refs_list.dec_length(1);
317 }
318
319 void DiscoveredListIterator::clear_referent() {
320 RawAccess<>::oop_store(_referent_addr, oop(NULL));
321 }
322
323 void DiscoveredListIterator::enqueue() {
324 // Self-loop next, so as to make Ref not active.
325 java_lang_ref_Reference::set_next_raw(_current_discovered, _current_discovered);
326
327 HeapAccess<AS_NO_KEEPALIVE>::oop_store_at(_current_discovered,
328 java_lang_ref_Reference::discovered_offset,
329 _next_discovered);
330 }
331
332 void DiscoveredListIterator::complete_enqeue() {
333 if (_prev_discovered != NULL) {
334 // This is the last object.
335 // Swap refs_list into pending list and set obj's
336 // discovered to what we read from the pending list.
337 oop old = Universe::swap_reference_pending_list(_refs_list.head());
338 HeapAccess<AS_NO_KEEPALIVE>::oop_store_at(_prev_discovered, java_lang_ref_Reference::discovered_offset, old);
339 }
340 }
341
342 // NOTE: process_phase*() are largely similar, and at a high level
343 // merely iterate over the extant list applying a predicate to
344 // each of its elements and possibly removing that element from the
345 // list and applying some further closures to that element.
346 // We should consider the possibility of replacing these
347 // process_phase*() methods by abstracting them into
348 // a single general iterator invocation that receives appropriate
349 // closures that accomplish this work.
350
351 // (SoftReferences only) Traverse the list and remove any SoftReferences whose
352 // referents are not alive, but that should be kept alive for policy reasons.
353 // Keep alive the transitive closure of all such referents.
354 void
355 ReferenceProcessor::process_phase1(DiscoveredList& refs_list,
356 ReferencePolicy* policy,
357 BoolObjectClosure* is_alive,
358 OopClosure* keep_alive,
359 VoidClosure* complete_gc) {
360 assert(policy != NULL, "Must have a non-NULL policy");
361 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
469 }
470 )
471 }
472
473 void ReferenceProcessor::process_phase3(DiscoveredList& refs_list,
474 bool clear_referent,
475 BoolObjectClosure* is_alive,
476 OopClosure* keep_alive,
477 VoidClosure* complete_gc) {
478 ResourceMark rm;
479 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
480 while (iter.has_next()) {
481 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
482 if (clear_referent) {
483 // NULL out referent pointer
484 iter.clear_referent();
485 } else {
486 // keep the referent around
487 iter.make_referent_alive();
488 }
489 iter.enqueue();
490 log_develop_trace(gc, ref)("Adding %sreference (" INTPTR_FORMAT ": %s) as pending",
491 clear_referent ? "cleared " : "", p2i(iter.obj()), iter.obj()->klass()->internal_name());
492 assert(oopDesc::is_oop(iter.obj(), UseConcMarkSweepGC), "Adding a bad reference");
493 iter.next();
494 }
495 iter.complete_enqeue();
496 // Close the reachable set
497 complete_gc->do_void();
498 }
499
500 void
501 ReferenceProcessor::clear_discovered_references(DiscoveredList& refs_list) {
502 oop obj = NULL;
503 oop next = refs_list.head();
504 while (next != obj) {
505 obj = next;
506 next = java_lang_ref_Reference::discovered(obj);
507 java_lang_ref_Reference::set_discovered_raw(obj, NULL);
508 }
509 refs_list.set_head(NULL);
510 refs_list.set_length(0);
511 }
512
513 void ReferenceProcessor::abandon_partial_discovery() {
514 // loop over the lists
515 for (uint i = 0; i < _max_num_queues * number_of_subclasses_of_ref(); i++) {
587 };
588
589 class RefProcPhase3Task: public AbstractRefProcTaskExecutor::ProcessTask {
590 public:
591 RefProcPhase3Task(ReferenceProcessor& ref_processor,
592 DiscoveredList refs_lists[],
593 bool clear_referent,
594 bool marks_oops_alive,
595 ReferenceProcessorPhaseTimes* phase_times)
596 : ProcessTask(ref_processor, refs_lists, marks_oops_alive, phase_times),
597 _clear_referent(clear_referent)
598 { }
599 virtual void work(unsigned int i, BoolObjectClosure& is_alive,
600 OopClosure& keep_alive,
601 VoidClosure& complete_gc)
602 {
603 RefProcWorkerTimeTracker tt(ReferenceProcessorPhaseTimes::RefPhase3, _phase_times, i);
604
605 _ref_processor.process_phase3(_refs_lists[i], _clear_referent,
606 &is_alive, &keep_alive, &complete_gc);
607 _refs_lists[i].set_head(NULL);
608 _refs_lists[i].set_length(0);
609 }
610 private:
611 bool _clear_referent;
612 };
613
614 #ifndef PRODUCT
615 void ReferenceProcessor::log_reflist_counts(DiscoveredList ref_lists[], uint active_length, size_t total_refs) {
616 if (!log_is_enabled(Trace, gc, ref)) {
617 return;
618 }
619
620 stringStream st;
621 for (uint i = 0; i < active_length; ++i) {
622 st.print(SIZE_FORMAT " ", ref_lists[i].length());
623 }
624 log_develop_trace(gc, ref)("%s= " SIZE_FORMAT, st.as_string(), total_refs);
625 #ifdef ASSERT
626 for (uint i = active_length; i < _max_num_queues; i++) {
627 assert(ref_lists[i].length() == 0, SIZE_FORMAT " unexpected References in %u",
628 ref_lists[i].length(), i);
702 }
703 ref_lists[from_idx].dec_length(refs_to_move);
704 if (ref_lists[from_idx].length() == 0) {
705 break;
706 }
707 } else {
708 to_idx = (to_idx + 1) % _num_queues;
709 }
710 }
711 }
712 #ifdef ASSERT
713 size_t balanced_total_refs = 0;
714 for (uint i = 0; i < _num_queues; ++i) {
715 balanced_total_refs += ref_lists[i].length();
716 }
717 log_reflist_counts(ref_lists, _num_queues, balanced_total_refs);
718 assert(total_refs == balanced_total_refs, "Balancing was incomplete");
719 #endif
720 }
721
722 void ReferenceProcessor::process_discovered_reflist(
723 DiscoveredList refs_lists[],
724 ReferencePolicy* policy,
725 bool clear_referent,
726 BoolObjectClosure* is_alive,
727 OopClosure* keep_alive,
728 VoidClosure* complete_gc,
729 AbstractRefProcTaskExecutor* task_executor,
730 ReferenceProcessorPhaseTimes* phase_times)
731 {
732 bool mt_processing = task_executor != NULL && _processing_is_mt;
733
734 phase_times->set_processing_is_mt(mt_processing);
735
736 if (mt_processing && ParallelRefProcBalancingEnabled) {
737 RefProcBalanceQueuesTimeTracker tt(phase_times);
738 balance_queues(refs_lists);
739 }
740
741 // Phase 1 (soft refs only):
770 task_executor->execute(phase2);
771 } else {
772 for (uint i = 0; i < _max_num_queues; i++) {
773 process_phase2(refs_lists[i], is_alive, keep_alive, complete_gc);
774 }
775 }
776 }
777
778 // Phase 3:
779 // . Traverse the list and process referents as appropriate.
780 {
781 RefProcParPhaseTimeTracker tt(ReferenceProcessorPhaseTimes::RefPhase3, phase_times);
782
783 if (mt_processing) {
784 RefProcPhase3Task phase3(*this, refs_lists, clear_referent, true /*marks_oops_alive*/, phase_times);
785 task_executor->execute(phase3);
786 } else {
787 for (uint i = 0; i < _max_num_queues; i++) {
788 process_phase3(refs_lists[i], clear_referent,
789 is_alive, keep_alive, complete_gc);
790 refs_lists[i].set_head(NULL);
791 refs_lists[i].set_length(0);
792 }
793 }
794 }
795 }
796
797 inline DiscoveredList* ReferenceProcessor::get_discovered_list(ReferenceType rt) {
798 uint id = 0;
799 // Determine the queue index to use for this object.
800 if (_discovery_is_mt) {
801 // During a multi-threaded discovery phase,
802 // each thread saves to its "own" list.
803 Thread* thr = Thread::current();
804 id = thr->as_Worker_thread()->id();
805 } else {
806 // single-threaded discovery, we save in round-robin
807 // fashion to each of the lists.
808 if (_processing_is_mt) {
809 id = next_id();
810 }
811 }
|