src/share/vm/memory/referenceProcessor.cpp

Print this page

        

*** 100,153 **** guarantee(RefDiscoveryPolicy == ReferenceBasedDiscovery || RefDiscoveryPolicy == ReferentBasedDiscovery, "Unrecongnized RefDiscoveryPolicy"); } - ReferenceProcessor* - ReferenceProcessor::create_ref_processor(MemRegion span, - bool atomic_discovery, - bool mt_discovery, - BoolObjectClosure* is_alive_non_header, - int parallel_gc_threads, - bool mt_processing, - bool dl_needs_barrier) { - int mt_degree = 1; - if (parallel_gc_threads > 1) { - mt_degree = parallel_gc_threads; - } - ReferenceProcessor* rp = - new ReferenceProcessor(span, atomic_discovery, - mt_discovery, mt_degree, - mt_processing && (parallel_gc_threads > 0), - dl_needs_barrier); - if (rp == NULL) { - vm_exit_during_initialization("Could not allocate ReferenceProcessor object"); - } - rp->set_is_alive_non_header(is_alive_non_header); - rp->setup_policy(false /* default soft ref policy */); - return rp; - } - ReferenceProcessor::ReferenceProcessor(MemRegion span, - bool atomic_discovery, - bool mt_discovery, - int mt_degree, bool mt_processing, bool discovered_list_needs_barrier) : _discovering_refs(false), _enqueuing_is_done(false), ! _is_alive_non_header(NULL), _discovered_list_needs_barrier(discovered_list_needs_barrier), _bs(NULL), _processing_is_mt(mt_processing), _next_id(0) { _span = span; _discovery_is_atomic = atomic_discovery; _discovery_is_mt = mt_discovery; ! _num_q = mt_degree; ! _max_num_q = mt_degree; _discoveredSoftRefs = NEW_C_HEAP_ARRAY(DiscoveredList, _max_num_q * subclasses_of_ref); if (_discoveredSoftRefs == NULL) { vm_exit_during_initialization("Could not allocated RefProc Array"); } _discoveredWeakRefs = &_discoveredSoftRefs[_max_num_q]; --- 100,130 ---- guarantee(RefDiscoveryPolicy == ReferenceBasedDiscovery || RefDiscoveryPolicy == ReferentBasedDiscovery, "Unrecongnized RefDiscoveryPolicy"); } ReferenceProcessor::ReferenceProcessor(MemRegion span, bool mt_processing, + int mt_processing_degree, + bool mt_discovery, + int mt_discovery_degree, + bool atomic_discovery, + BoolObjectClosure* is_alive_non_header, bool discovered_list_needs_barrier) : _discovering_refs(false), _enqueuing_is_done(false), ! _is_alive_non_header(is_alive_non_header), _discovered_list_needs_barrier(discovered_list_needs_barrier), _bs(NULL), _processing_is_mt(mt_processing), _next_id(0) { _span = span; _discovery_is_atomic = atomic_discovery; _discovery_is_mt = mt_discovery; ! _num_q = mt_processing_degree; ! _max_num_q = MAX2(mt_processing_degree, mt_discovery_degree); _discoveredSoftRefs = NEW_C_HEAP_ARRAY(DiscoveredList, _max_num_q * subclasses_of_ref); if (_discoveredSoftRefs == NULL) { vm_exit_during_initialization("Could not allocated RefProc Array"); } _discoveredWeakRefs = &_discoveredSoftRefs[_max_num_q];
*** 161,170 **** --- 138,148 ---- } // If we do barreirs, cache a copy of the barrier set. if (discovered_list_needs_barrier) { _bs = Universe::heap()->barrier_set(); } + setup_policy(false /* default soft ref policy */); } #ifndef PRODUCT void ReferenceProcessor::verify_no_references_recorded() { guarantee(!_discovering_refs, "Discovering refs?");
*** 949,959 **** must_balance) { balance_queues(refs_lists); } if (PrintReferenceGC && PrintGCDetails) { size_t total = 0; ! for (int i = 0; i < _num_q; ++i) { total += refs_lists[i].length(); } gclog_or_tty->print(", %u refs", total); } --- 927,937 ---- must_balance) { balance_queues(refs_lists); } if (PrintReferenceGC && PrintGCDetails) { size_t total = 0; ! for (int i = 0; i < _max_num_q; ++i) { total += refs_lists[i].length(); } gclog_or_tty->print(", %u refs", total); }
*** 965,975 **** if (policy != NULL) { if (mt_processing) { RefProcPhase1Task phase1(*this, refs_lists, policy, true /*marks_oops_alive*/); task_executor->execute(phase1); } else { ! for (int i = 0; i < _num_q; i++) { process_phase1(refs_lists[i], policy, is_alive, keep_alive, complete_gc); } } } else { // policy == NULL --- 943,953 ---- if (policy != NULL) { if (mt_processing) { RefProcPhase1Task phase1(*this, refs_lists, policy, true /*marks_oops_alive*/); task_executor->execute(phase1); } else { ! for (int i = 0; i < _max_num_q; i++) { process_phase1(refs_lists[i], policy, is_alive, keep_alive, complete_gc); } } } else { // policy == NULL
*** 981,991 **** // . Traverse the list and remove any refs whose referents are alive. if (mt_processing) { RefProcPhase2Task phase2(*this, refs_lists, !discovery_is_atomic() /*marks_oops_alive*/); task_executor->execute(phase2); } else { ! for (int i = 0; i < _num_q; i++) { process_phase2(refs_lists[i], is_alive, keep_alive, complete_gc); } } // Phase 3: --- 959,969 ---- // . Traverse the list and remove any refs whose referents are alive. if (mt_processing) { RefProcPhase2Task phase2(*this, refs_lists, !discovery_is_atomic() /*marks_oops_alive*/); task_executor->execute(phase2); } else { ! for (int i = 0; i < _max_num_q; i++) { process_phase2(refs_lists[i], is_alive, keep_alive, complete_gc); } } // Phase 3:
*** 992,1002 **** // . Traverse the list and process referents as appropriate. if (mt_processing) { RefProcPhase3Task phase3(*this, refs_lists, clear_referent, true /*marks_oops_alive*/); task_executor->execute(phase3); } else { ! for (int i = 0; i < _num_q; i++) { process_phase3(refs_lists[i], clear_referent, is_alive, keep_alive, complete_gc); } } } --- 970,980 ---- // . Traverse the list and process referents as appropriate. if (mt_processing) { RefProcPhase3Task phase3(*this, refs_lists, clear_referent, true /*marks_oops_alive*/); task_executor->execute(phase3); } else { ! for (int i = 0; i < _max_num_q; i++) { process_phase3(refs_lists[i], clear_referent, is_alive, keep_alive, complete_gc); } } }