< prev index next >

src/share/vm/gc/g1/g1CollectedHeap.cpp

Print this page
rev 8816 : [mq]: 8133470-fix-plab-inline
rev 8817 : imported patch 8073013-add-detailed-information-about-plab-memory-usage
rev 8818 : imported patch jon-review-statistics
rev 8819 : imported patch dlindholm-changes
rev 8821 : imported patch move-jfr-event-to-extra-cr
rev 8822 : imported patch 8133530-add-jfr-event-for-evacuation
rev 8823 : imported patch 8040162-avoid-reallocating-plab-allocators
rev 8824 : imported patch parallel-ref-proc-fixes
rev 8825 : imported patch mikael-suggestions-pss-alloc

*** 4467,4480 **** G1RootProcessor* _root_processor; ParallelTaskTerminator _terminator; uint _n_workers; public: ! G1ParTask(G1CollectedHeap* g1h, G1ParScanThreadState** pss, RefToScanQueueSet *task_queues, G1RootProcessor* root_processor, uint n_workers) : AbstractGangTask("G1 collection"), _g1h(g1h), ! _pss(pss), _queues(task_queues), _root_processor(root_processor), _terminator(n_workers, _queues), _n_workers(n_workers) {} --- 4467,4480 ---- G1RootProcessor* _root_processor; ParallelTaskTerminator _terminator; uint _n_workers; public: ! G1ParTask(G1CollectedHeap* g1h, G1ParScanThreadState** per_thread_states, RefToScanQueueSet *task_queues, G1RootProcessor* root_processor, uint n_workers) : AbstractGangTask("G1 collection"), _g1h(g1h), ! _pss(per_thread_states), _queues(task_queues), _root_processor(root_processor), _terminator(n_workers, _queues), _n_workers(n_workers) {}
*** 4574,4592 **** weak_root_cl = &scan_only_root_cl; strong_cld_cl = &scan_only_cld_cl; weak_cld_cl = &scan_only_cld_cl; } _root_processor->evacuate_roots(strong_root_cl, weak_root_cl, strong_cld_cl, weak_cld_cl, trace_metadata, worker_id); G1ParPushHeapRSClosure push_heap_rs_cl(_g1h, pss); - double start_strong_roots_sec = os::elapsedTime(); _root_processor->scan_remembered_sets(&push_heap_rs_cl, weak_root_cl, worker_id); double strong_roots_sec = os::elapsedTime() - start_strong_roots_sec; --- 4574,4592 ---- weak_root_cl = &scan_only_root_cl; strong_cld_cl = &scan_only_cld_cl; weak_cld_cl = &scan_only_cld_cl; } + double start_strong_roots_sec = os::elapsedTime(); _root_processor->evacuate_roots(strong_root_cl, weak_root_cl, strong_cld_cl, weak_cld_cl, trace_metadata, worker_id); G1ParPushHeapRSClosure push_heap_rs_cl(_g1h, pss); _root_processor->scan_remembered_sets(&push_heap_rs_cl, weak_root_cl, worker_id); double strong_roots_sec = os::elapsedTime() - start_strong_roots_sec;
*** 5194,5209 **** FlexibleWorkGang* _workers; uint _active_workers; public: G1STWRefProcTaskExecutor(G1CollectedHeap* g1h, ! G1ParScanThreadState** pss, FlexibleWorkGang* workers, RefToScanQueueSet *task_queues, uint n_workers) : _g1h(g1h), ! _pss(pss), _queues(task_queues), _workers(workers), _active_workers(n_workers) { assert(n_workers > 0, "shouldn't call this otherwise"); --- 5194,5209 ---- FlexibleWorkGang* _workers; uint _active_workers; public: G1STWRefProcTaskExecutor(G1CollectedHeap* g1h, ! G1ParScanThreadState** per_thread_states, FlexibleWorkGang* workers, RefToScanQueueSet *task_queues, uint n_workers) : _g1h(g1h), ! _pss(per_thread_states), _queues(task_queues), _workers(workers), _active_workers(n_workers) { assert(n_workers > 0, "shouldn't call this otherwise");
*** 5225,5241 **** ParallelTaskTerminator* _terminator; public: G1STWRefProcTaskProxy(ProcessTask& proc_task, G1CollectedHeap* g1h, ! G1ParScanThreadState** pss, RefToScanQueueSet *task_queues, ParallelTaskTerminator* terminator) : AbstractGangTask("Process reference objects in parallel"), _proc_task(proc_task), _g1h(g1h), ! _pss(pss), _task_queues(task_queues), _terminator(terminator) {} virtual void work(uint worker_id) { --- 5225,5241 ---- ParallelTaskTerminator* _terminator; public: G1STWRefProcTaskProxy(ProcessTask& proc_task, G1CollectedHeap* g1h, ! G1ParScanThreadState** per_thread_states, RefToScanQueueSet *task_queues, ParallelTaskTerminator* terminator) : AbstractGangTask("Process reference objects in parallel"), _proc_task(proc_task), _g1h(g1h), ! _pss(per_thread_states), _task_queues(task_queues), _terminator(terminator) {} virtual void work(uint worker_id) {
*** 5329,5342 **** RefToScanQueueSet* _queues; ParallelTaskTerminator _terminator; uint _n_workers; public: ! G1ParPreserveCMReferentsTask(G1CollectedHeap* g1h, G1ParScanThreadState** pss, int workers, RefToScanQueueSet *task_queues) : AbstractGangTask("ParPreserveCMReferents"), _g1h(g1h), ! _pss(pss), _queues(task_queues), _terminator(workers, _queues), _n_workers(workers) { } --- 5329,5342 ---- RefToScanQueueSet* _queues; ParallelTaskTerminator _terminator; uint _n_workers; public: ! G1ParPreserveCMReferentsTask(G1CollectedHeap* g1h, G1ParScanThreadState** per_thread_states, int workers, RefToScanQueueSet *task_queues) : AbstractGangTask("ParPreserveCMReferents"), _g1h(g1h), ! _pss(per_thread_states), _queues(task_queues), _terminator(workers, _queues), _n_workers(workers) { }
*** 5403,5413 **** assert(pss->queue_is_empty(), "should be"); } }; // Weak Reference processing during an evacuation pause (part 1). ! void G1CollectedHeap::process_discovered_references(G1ParScanThreadState** pss_) { double ref_proc_start = os::elapsedTime(); ReferenceProcessor* rp = _ref_processor_stw; assert(rp->discovery_enabled(), "should have been enabled"); --- 5403,5413 ---- assert(pss->queue_is_empty(), "should be"); } }; // Weak Reference processing during an evacuation pause (part 1). ! void G1CollectedHeap::process_discovered_references(G1ParScanThreadState** per_thread_states) { double ref_proc_start = os::elapsedTime(); ReferenceProcessor* rp = _ref_processor_stw; assert(rp->discovery_enabled(), "should have been enabled");
*** 5433,5443 **** // object discovered by the STW ref processor. uint no_of_gc_workers = workers()->active_workers(); G1ParPreserveCMReferentsTask keep_cm_referents(this, ! pss_, no_of_gc_workers, _task_queues); workers()->run_task(&keep_cm_referents); --- 5433,5443 ---- // object discovered by the STW ref processor. uint no_of_gc_workers = workers()->active_workers(); G1ParPreserveCMReferentsTask keep_cm_referents(this, ! per_thread_states, no_of_gc_workers, _task_queues); workers()->run_task(&keep_cm_referents);
*** 5448,5458 **** // of JNI refs is serial and performed serially by the current thread // rather than by a worker. The following PSS will be used for processing // JNI refs. // Use only a single queue for this PSS. ! G1ParScanThreadState* pss = pss_[0]; pss->set_ref_processor(NULL); assert(pss->queue_is_empty(), "pre-condition"); // We do not embed a reference processor in the copying/scanning // closures while we're actually processing the discovered --- 5448,5458 ---- // of JNI refs is serial and performed serially by the current thread // rather than by a worker. The following PSS will be used for processing // JNI refs. // Use only a single queue for this PSS. ! G1ParScanThreadState* pss = per_thread_states[0]; pss->set_ref_processor(NULL); assert(pss->queue_is_empty(), "pre-condition"); // We do not embed a reference processor in the copying/scanning // closures while we're actually processing the discovered
*** 5490,5500 **** } else { // Parallel reference processing assert(rp->num_q() == no_of_gc_workers, "sanity"); assert(no_of_gc_workers <= rp->max_num_q(), "sanity"); ! G1STWRefProcTaskExecutor par_task_executor(this, pss_, workers(), _task_queues, no_of_gc_workers); stats = rp->process_discovered_references(&is_alive, &keep_alive, &drain_queue, &par_task_executor, _gc_timer_stw, --- 5490,5500 ---- } else { // Parallel reference processing assert(rp->num_q() == no_of_gc_workers, "sanity"); assert(no_of_gc_workers <= rp->max_num_q(), "sanity"); ! G1STWRefProcTaskExecutor par_task_executor(this, per_thread_states, workers(), _task_queues, no_of_gc_workers); stats = rp->process_discovered_references(&is_alive, &keep_alive, &drain_queue, &par_task_executor, _gc_timer_stw,
*** 5509,5519 **** double ref_proc_time = os::elapsedTime() - ref_proc_start; g1_policy()->phase_times()->record_ref_proc_time(ref_proc_time * 1000.0); } // Weak Reference processing during an evacuation pause (part 2). ! void G1CollectedHeap::enqueue_discovered_references(G1ParScanThreadState** pss) { double ref_enq_start = os::elapsedTime(); ReferenceProcessor* rp = _ref_processor_stw; assert(!rp->discovery_enabled(), "should have been disabled as part of processing"); --- 5509,5519 ---- double ref_proc_time = os::elapsedTime() - ref_proc_start; g1_policy()->phase_times()->record_ref_proc_time(ref_proc_time * 1000.0); } // Weak Reference processing during an evacuation pause (part 2). ! void G1CollectedHeap::enqueue_discovered_references(G1ParScanThreadState** per_thread_states) { double ref_enq_start = os::elapsedTime(); ReferenceProcessor* rp = _ref_processor_stw; assert(!rp->discovery_enabled(), "should have been disabled as part of processing");
*** 5528,5538 **** uint n_workers = workers()->active_workers(); assert(rp->num_q() == n_workers, "sanity"); assert(n_workers <= rp->max_num_q(), "sanity"); ! G1STWRefProcTaskExecutor par_task_executor(this, pss, workers(), _task_queues, n_workers); rp->enqueue_discovered_references(&par_task_executor); } rp->verify_no_references_recorded(); assert(!rp->discovery_enabled(), "should have been disabled"); --- 5528,5538 ---- uint n_workers = workers()->active_workers(); assert(rp->num_q() == n_workers, "sanity"); assert(n_workers <= rp->max_num_q(), "sanity"); ! G1STWRefProcTaskExecutor par_task_executor(this, per_thread_states, workers(), _task_queues, n_workers); rp->enqueue_discovered_references(&par_task_executor); } rp->verify_no_references_recorded(); assert(!rp->discovery_enabled(), "should have been disabled");
< prev index next >