< prev index next >

src/share/vm/gc/g1/g1CollectedHeap.cpp

Print this page
rev 10513 : 8149343: assert(rp->num_q() == no_of_gc_workers) failed: sanity


4274 class G1STWRefProcTaskExecutor: public AbstractRefProcTaskExecutor {
4275 private:
4276   G1CollectedHeap*          _g1h;
4277   G1ParScanThreadStateSet*  _pss;
4278   RefToScanQueueSet*        _queues;
4279   WorkGang*                 _workers;
4280   uint                      _active_workers;
4281 
4282 public:
4283   G1STWRefProcTaskExecutor(G1CollectedHeap* g1h,
4284                            G1ParScanThreadStateSet* per_thread_states,
4285                            WorkGang* workers,
4286                            RefToScanQueueSet *task_queues,
4287                            uint n_workers) :
4288     _g1h(g1h),
4289     _pss(per_thread_states),
4290     _queues(task_queues),
4291     _workers(workers),
4292     _active_workers(n_workers)
4293   {
4294     assert(n_workers > 0, "shouldn't call this otherwise");
4295   }
4296 
4297   // Executes the given task using concurrent marking worker threads.
4298   virtual void execute(ProcessTask& task);
4299   virtual void execute(EnqueueTask& task);
4300 };
4301 
4302 // Gang task for possibly parallel reference processing
4303 
4304 class G1STWRefProcTaskProxy: public AbstractGangTask {
4305   typedef AbstractRefProcTaskExecutor::ProcessTask ProcessTask;
4306   ProcessTask&     _proc_task;
4307   G1CollectedHeap* _g1h;
4308   G1ParScanThreadStateSet* _pss;
4309   RefToScanQueueSet* _task_queues;
4310   ParallelTaskTerminator* _terminator;
4311 
4312 public:
4313   G1STWRefProcTaskProxy(ProcessTask& proc_task,
4314                         G1CollectedHeap* g1h,


4395 // Abstract task used to preserve (i.e. copy) any referent objects
4396 // that are in the collection set and are pointed to by reference
4397 // objects discovered by the CM ref processor.
4398 
4399 class G1ParPreserveCMReferentsTask: public AbstractGangTask {
4400 protected:
4401   G1CollectedHeap*         _g1h;
4402   G1ParScanThreadStateSet* _pss;
4403   RefToScanQueueSet*       _queues;
4404   ParallelTaskTerminator   _terminator;
4405   uint                     _n_workers;
4406 
4407 public:
4408   G1ParPreserveCMReferentsTask(G1CollectedHeap* g1h, G1ParScanThreadStateSet* per_thread_states, int workers, RefToScanQueueSet *task_queues) :
4409     AbstractGangTask("ParPreserveCMReferents"),
4410     _g1h(g1h),
4411     _pss(per_thread_states),
4412     _queues(task_queues),
4413     _terminator(workers, _queues),
4414     _n_workers(workers)
4415   { }


4416 
4417   void work(uint worker_id) {
4418     G1GCParPhaseTimesTracker x(_g1h->g1_policy()->phase_times(), G1GCPhaseTimes::PreserveCMReferents, worker_id);
4419 
4420     ResourceMark rm;
4421     HandleMark   hm;
4422 
4423     G1ParScanThreadState*          pss = _pss->state_for_worker(worker_id);
4424     pss->set_ref_processor(NULL);
4425     assert(pss->queue_is_empty(), "both queue and overflow should be empty");
4426 
4427     // Is alive closure
4428     G1AlwaysAliveClosure always_alive(_g1h);
4429 
4430     // Copying keep alive closure. Applied to referent objects that need
4431     // to be copied.
4432     G1CopyingKeepAliveClosure keep_alive(_g1h, pss->closures()->raw_strong_oops(), pss);
4433 
4434     ReferenceProcessor* rp = _g1h->ref_processor_cm();
4435 


4538   G1CopyingKeepAliveClosure keep_alive(this, pss->closures()->raw_strong_oops(), pss);
4539 
4540   // Serial Complete GC closure
4541   G1STWDrainQueueClosure drain_queue(this, pss);
4542 
4543   // Setup the soft refs policy...
4544   rp->setup_policy(false);
4545 
4546   ReferenceProcessorStats stats;
4547   if (!rp->processing_is_mt()) {
4548     // Serial reference processing...
4549     stats = rp->process_discovered_references(&is_alive,
4550                                               &keep_alive,
4551                                               &drain_queue,
4552                                               NULL,
4553                                               _gc_timer_stw);
4554   } else {
4555     uint no_of_gc_workers = workers()->active_workers();
4556 
4557     // Parallel reference processing
4558     assert(rp->num_q() == no_of_gc_workers, "sanity");
4559     assert(no_of_gc_workers <= rp->max_num_q(), "sanity");

4560 
4561     G1STWRefProcTaskExecutor par_task_executor(this, per_thread_states, workers(), _task_queues, no_of_gc_workers);
4562     stats = rp->process_discovered_references(&is_alive,
4563                                               &keep_alive,
4564                                               &drain_queue,
4565                                               &par_task_executor,
4566                                               _gc_timer_stw);
4567   }
4568 
4569   _gc_tracer_stw->report_gc_reference_stats(stats);
4570 
4571   // We have completed copying any necessary live referent objects.
4572   assert(pss->queue_is_empty(), "both queue and overflow should be empty");
4573 
4574   double ref_proc_time = os::elapsedTime() - ref_proc_start;
4575   g1_policy()->phase_times()->record_ref_proc_time(ref_proc_time * 1000.0);
4576 }
4577 
4578 // Weak Reference processing during an evacuation pause (part 2).
4579 void G1CollectedHeap::enqueue_discovered_references(G1ParScanThreadStateSet* per_thread_states) {
4580   double ref_enq_start = os::elapsedTime();
4581 
4582   ReferenceProcessor* rp = _ref_processor_stw;
4583   assert(!rp->discovery_enabled(), "should have been disabled as part of processing");
4584 
4585   // Now enqueue any remaining on the discovered lists on to
4586   // the pending list.
4587   if (!rp->processing_is_mt()) {
4588     // Serial reference processing...
4589     rp->enqueue_discovered_references();
4590   } else {
4591     // Parallel reference enqueueing
4592 
4593     uint n_workers = workers()->active_workers();
4594 
4595     assert(rp->num_q() == n_workers, "sanity");
4596     assert(n_workers <= rp->max_num_q(), "sanity");

4597 
4598     G1STWRefProcTaskExecutor par_task_executor(this, per_thread_states, workers(), _task_queues, n_workers);
4599     rp->enqueue_discovered_references(&par_task_executor);
4600   }
4601 
4602   rp->verify_no_references_recorded();
4603   assert(!rp->discovery_enabled(), "should have been disabled");
4604 
4605   // FIXME
4606   // CM's reference processing also cleans up the string and symbol tables.
4607   // Should we do that here also? We could, but it is a serial operation
4608   // and could significantly increase the pause time.
4609 
4610   double ref_enq_time = os::elapsedTime() - ref_enq_start;
4611   g1_policy()->phase_times()->record_ref_enq_time(ref_enq_time * 1000.0);
4612 }
4613 
4614 void G1CollectedHeap::merge_per_thread_state_info(G1ParScanThreadStateSet* per_thread_states) {
4615   double merge_pss_time_start = os::elapsedTime();
4616   per_thread_states->flush();




4274 class G1STWRefProcTaskExecutor: public AbstractRefProcTaskExecutor {
4275 private:
4276   G1CollectedHeap*          _g1h;
4277   G1ParScanThreadStateSet*  _pss;
4278   RefToScanQueueSet*        _queues;
4279   WorkGang*                 _workers;
4280   uint                      _active_workers;
4281 
4282 public:
4283   G1STWRefProcTaskExecutor(G1CollectedHeap* g1h,
4284                            G1ParScanThreadStateSet* per_thread_states,
4285                            WorkGang* workers,
4286                            RefToScanQueueSet *task_queues,
4287                            uint n_workers) :
4288     _g1h(g1h),
4289     _pss(per_thread_states),
4290     _queues(task_queues),
4291     _workers(workers),
4292     _active_workers(n_workers)
4293   {
4294     g1h->ref_processor_stw()->set_active_mt_degree(n_workers);
4295   }
4296 
4297   // Executes the given task using concurrent marking worker threads.
4298   virtual void execute(ProcessTask& task);
4299   virtual void execute(EnqueueTask& task);
4300 };
4301 
4302 // Gang task for possibly parallel reference processing
4303 
4304 class G1STWRefProcTaskProxy: public AbstractGangTask {
4305   typedef AbstractRefProcTaskExecutor::ProcessTask ProcessTask;
4306   ProcessTask&     _proc_task;
4307   G1CollectedHeap* _g1h;
4308   G1ParScanThreadStateSet* _pss;
4309   RefToScanQueueSet* _task_queues;
4310   ParallelTaskTerminator* _terminator;
4311 
4312 public:
4313   G1STWRefProcTaskProxy(ProcessTask& proc_task,
4314                         G1CollectedHeap* g1h,


4395 // Abstract task used to preserve (i.e. copy) any referent objects
4396 // that are in the collection set and are pointed to by reference
4397 // objects discovered by the CM ref processor.
4398 
4399 class G1ParPreserveCMReferentsTask: public AbstractGangTask {
4400 protected:
4401   G1CollectedHeap*         _g1h;
4402   G1ParScanThreadStateSet* _pss;
4403   RefToScanQueueSet*       _queues;
4404   ParallelTaskTerminator   _terminator;
4405   uint                     _n_workers;
4406 
4407 public:
4408   G1ParPreserveCMReferentsTask(G1CollectedHeap* g1h, G1ParScanThreadStateSet* per_thread_states, int workers, RefToScanQueueSet *task_queues) :
4409     AbstractGangTask("ParPreserveCMReferents"),
4410     _g1h(g1h),
4411     _pss(per_thread_states),
4412     _queues(task_queues),
4413     _terminator(workers, _queues),
4414     _n_workers(workers)
4415   { 
4416     g1h->ref_processor_cm()->set_active_mt_degree(workers);
4417   }
4418 
4419   void work(uint worker_id) {
4420     G1GCParPhaseTimesTracker x(_g1h->g1_policy()->phase_times(), G1GCPhaseTimes::PreserveCMReferents, worker_id);
4421 
4422     ResourceMark rm;
4423     HandleMark   hm;
4424 
4425     G1ParScanThreadState*          pss = _pss->state_for_worker(worker_id);
4426     pss->set_ref_processor(NULL);
4427     assert(pss->queue_is_empty(), "both queue and overflow should be empty");
4428 
4429     // Is alive closure
4430     G1AlwaysAliveClosure always_alive(_g1h);
4431 
4432     // Copying keep alive closure. Applied to referent objects that need
4433     // to be copied.
4434     G1CopyingKeepAliveClosure keep_alive(_g1h, pss->closures()->raw_strong_oops(), pss);
4435 
4436     ReferenceProcessor* rp = _g1h->ref_processor_cm();
4437 


4540   G1CopyingKeepAliveClosure keep_alive(this, pss->closures()->raw_strong_oops(), pss);
4541 
4542   // Serial Complete GC closure
4543   G1STWDrainQueueClosure drain_queue(this, pss);
4544 
4545   // Setup the soft refs policy...
4546   rp->setup_policy(false);
4547 
4548   ReferenceProcessorStats stats;
4549   if (!rp->processing_is_mt()) {
4550     // Serial reference processing...
4551     stats = rp->process_discovered_references(&is_alive,
4552                                               &keep_alive,
4553                                               &drain_queue,
4554                                               NULL,
4555                                               _gc_timer_stw);
4556   } else {
4557     uint no_of_gc_workers = workers()->active_workers();
4558 
4559     // Parallel reference processing
4560     assert(no_of_gc_workers <= rp->max_num_q(),
4561            "Mismatch between the number of GC workers %u and the maximum number of Reference process queues %u",
4562            no_of_gc_workers,  rp->max_num_q());
4563 
4564     G1STWRefProcTaskExecutor par_task_executor(this, per_thread_states, workers(), _task_queues, no_of_gc_workers);
4565     stats = rp->process_discovered_references(&is_alive,
4566                                               &keep_alive,
4567                                               &drain_queue,
4568                                               &par_task_executor,
4569                                               _gc_timer_stw);
4570   }
4571 
4572   _gc_tracer_stw->report_gc_reference_stats(stats);
4573 
4574   // We have completed copying any necessary live referent objects.
4575   assert(pss->queue_is_empty(), "both queue and overflow should be empty");
4576 
4577   double ref_proc_time = os::elapsedTime() - ref_proc_start;
4578   g1_policy()->phase_times()->record_ref_proc_time(ref_proc_time * 1000.0);
4579 }
4580 
4581 // Weak Reference processing during an evacuation pause (part 2).
4582 void G1CollectedHeap::enqueue_discovered_references(G1ParScanThreadStateSet* per_thread_states) {
4583   double ref_enq_start = os::elapsedTime();
4584 
4585   ReferenceProcessor* rp = _ref_processor_stw;
4586   assert(!rp->discovery_enabled(), "should have been disabled as part of processing");
4587 
4588   // Now enqueue any remaining on the discovered lists on to
4589   // the pending list.
4590   if (!rp->processing_is_mt()) {
4591     // Serial reference processing...
4592     rp->enqueue_discovered_references();
4593   } else {
4594     // Parallel reference enqueueing
4595 
4596     uint n_workers = workers()->active_workers();
4597 
4598     assert(n_workers <= rp->max_num_q(),
4599            "Mismatch between the number of GC workers %u and the maximum number of Reference process queues %u",
4600            n_workers,  rp->max_num_q());
4601 
4602     G1STWRefProcTaskExecutor par_task_executor(this, per_thread_states, workers(), _task_queues, n_workers);
4603     rp->enqueue_discovered_references(&par_task_executor);
4604   }
4605 
4606   rp->verify_no_references_recorded();
4607   assert(!rp->discovery_enabled(), "should have been disabled");
4608 
4609   // FIXME
4610   // CM's reference processing also cleans up the string and symbol tables.
4611   // Should we do that here also? We could, but it is a serial operation
4612   // and could significantly increase the pause time.
4613 
4614   double ref_enq_time = os::elapsedTime() - ref_enq_start;
4615   g1_policy()->phase_times()->record_ref_enq_time(ref_enq_time * 1000.0);
4616 }
4617 
4618 void G1CollectedHeap::merge_per_thread_state_info(G1ParScanThreadStateSet* per_thread_states) {
4619   double merge_pss_time_start = os::elapsedTime();
4620   per_thread_states->flush();


< prev index next >