< prev index next >

src/hotspot/share/gc/cms/parNewGeneration.cpp

Print this page




  34 #include "gc/shared/ageTable.inline.hpp"
  35 #include "gc/shared/copyFailedInfo.hpp"
  36 #include "gc/shared/gcHeapSummary.hpp"
  37 #include "gc/shared/gcTimer.hpp"
  38 #include "gc/shared/gcTrace.hpp"
  39 #include "gc/shared/gcTraceTime.inline.hpp"
  40 #include "gc/shared/genOopClosures.inline.hpp"
  41 #include "gc/shared/generation.hpp"
  42 #include "gc/shared/plab.inline.hpp"
  43 #include "gc/shared/preservedMarks.inline.hpp"
  44 #include "gc/shared/referencePolicy.hpp"
  45 #include "gc/shared/referenceProcessorPhaseTimes.hpp"
  46 #include "gc/shared/space.hpp"
  47 #include "gc/shared/spaceDecorator.hpp"
  48 #include "gc/shared/strongRootsScope.hpp"
  49 #include "gc/shared/taskqueue.inline.hpp"
  50 #include "gc/shared/weakProcessor.hpp"
  51 #include "gc/shared/workgroup.hpp"
  52 #include "logging/log.hpp"
  53 #include "logging/logStream.hpp"

  54 #include "memory/resourceArea.hpp"
  55 #include "oops/access.inline.hpp"
  56 #include "oops/compressedOops.inline.hpp"
  57 #include "oops/objArrayOop.hpp"
  58 #include "oops/oop.inline.hpp"
  59 #include "runtime/atomic.hpp"
  60 #include "runtime/handles.hpp"
  61 #include "runtime/handles.inline.hpp"
  62 #include "runtime/java.hpp"
  63 #include "runtime/thread.inline.hpp"
  64 #include "utilities/copy.hpp"
  65 #include "utilities/globalDefinitions.hpp"
  66 #include "utilities/stack.inline.hpp"
  67 
  68 ParScanThreadState::ParScanThreadState(Space* to_space_,
  69                                        ParNewGeneration* young_gen_,
  70                                        Generation* old_gen_,
  71                                        int thread_num_,
  72                                        ObjToScanQueueSet* work_queue_set_,
  73                                        Stack<oop, mtGC>* overflow_stacks_,


 485     // Inform old gen that we're done.
 486     _old_gen.par_promote_alloc_done(i);
 487   }
 488 
 489   if (UseConcMarkSweepGC) {
 490     // We need to call this even when ResizeOldPLAB is disabled
 491     // so as to avoid breaking some asserts. While we may be able
 492     // to avoid this by reorganizing the code a bit, I am loathe
 493     // to do that unless we find cases where ergo leads to bad
 494     // performance.
 495     CompactibleFreeListSpaceLAB::compute_desired_plab_size();
 496   }
 497 }
 498 
 499 ParScanClosure::ParScanClosure(ParNewGeneration* g,
 500                                ParScanThreadState* par_scan_state) :
 501   OopsInClassLoaderDataOrGenClosure(g), _par_scan_state(par_scan_state), _g(g) {
 502   _boundary = _g->reserved().end();
 503 }
 504 
 505 void ParScanWithBarrierClosure::do_oop(oop* p)       { ParScanClosure::do_oop_work(p, true, false); }
 506 void ParScanWithBarrierClosure::do_oop(narrowOop* p) { ParScanClosure::do_oop_work(p, true, false); }
 507 
 508 void ParScanWithoutBarrierClosure::do_oop(oop* p)       { ParScanClosure::do_oop_work(p, false, false); }
 509 void ParScanWithoutBarrierClosure::do_oop(narrowOop* p) { ParScanClosure::do_oop_work(p, false, false); }
 510 
 511 void ParRootScanWithBarrierTwoGensClosure::do_oop(oop* p)       { ParScanClosure::do_oop_work(p, true, true); }
 512 void ParRootScanWithBarrierTwoGensClosure::do_oop(narrowOop* p) { ParScanClosure::do_oop_work(p, true, true); }
 513 
 514 void ParRootScanWithoutBarrierClosure::do_oop(oop* p)       { ParScanClosure::do_oop_work(p, false, true); }
 515 void ParRootScanWithoutBarrierClosure::do_oop(narrowOop* p) { ParScanClosure::do_oop_work(p, false, true); }
 516 
 517 ParScanWeakRefClosure::ParScanWeakRefClosure(ParNewGeneration* g,
 518                                              ParScanThreadState* par_scan_state)
 519   : ScanWeakRefClosure(g), _par_scan_state(par_scan_state)
 520 {}
 521 
 522 void ParScanWeakRefClosure::do_oop(oop* p)       { ParScanWeakRefClosure::do_oop_work(p); }
 523 void ParScanWeakRefClosure::do_oop(narrowOop* p) { ParScanWeakRefClosure::do_oop_work(p); }
 524 
 525 #ifdef WIN32
 526 #pragma warning(disable: 4786) /* identifier was truncated to '255' characters in the browser information */
 527 #endif
 528 
 529 ParEvacuateFollowersClosure::ParEvacuateFollowersClosure(
 530     ParScanThreadState* par_scan_state_,
 531     ParScanWithoutBarrierClosure* to_space_closure_,
 532     ParScanWithBarrierClosure* old_gen_closure_,
 533     ParRootScanWithoutBarrierClosure* to_space_root_closure_,
 534     ParNewGeneration* par_gen_,
 535     ParRootScanWithBarrierTwoGensClosure* old_gen_root_closure_,
 536     ObjToScanQueueSet* task_queues_,
 537     ParallelTaskTerminator* terminator_) :
 538 
 539     _par_scan_state(par_scan_state_),
 540     _to_space_closure(to_space_closure_),
 541     _old_gen_closure(old_gen_closure_),
 542     _to_space_root_closure(to_space_root_closure_),
 543     _old_gen_root_closure(old_gen_root_closure_),
 544     _par_gen(par_gen_),


 674     PerfDataManager::create_constant(SUN_GC, cname, PerfData::U_None,
 675                                      ParallelGCThreads, CHECK);
 676   }
 677 }
 678 
 679 // ParNewGeneration::
 680 ParKeepAliveClosure::ParKeepAliveClosure(ParScanWeakRefClosure* cl) :
 681   DefNewGeneration::KeepAliveClosure(cl), _par_cl(cl) {}
 682 
 683 template <class T>
 684 void /*ParNewGeneration::*/ParKeepAliveClosure::do_oop_work(T* p) {
 685 #ifdef ASSERT
 686   {
 687     oop obj = RawAccess<OOP_NOT_NULL>::oop_load(p);
 688     // We never expect to see a null reference being processed
 689     // as a weak reference.
 690     assert(oopDesc::is_oop(obj), "expected an oop while scanning weak refs");
 691   }
 692 #endif // ASSERT
 693 
 694   _par_cl->do_oop_nv(p);
 695 
 696   if (CMSHeap::heap()->is_in_reserved(p)) {
 697     oop obj = RawAccess<OOP_NOT_NULL>::oop_load(p);;
 698     _rs->write_ref_field_gc_par(p, obj);
 699   }
 700 }
 701 
 702 void /*ParNewGeneration::*/ParKeepAliveClosure::do_oop(oop* p)       { ParKeepAliveClosure::do_oop_work(p); }
 703 void /*ParNewGeneration::*/ParKeepAliveClosure::do_oop(narrowOop* p) { ParKeepAliveClosure::do_oop_work(p); }
 704 
 705 // ParNewGeneration::
 706 KeepAliveClosure::KeepAliveClosure(ScanWeakRefClosure* cl) :
 707   DefNewGeneration::KeepAliveClosure(cl) {}
 708 
 709 template <class T>
 710 void /*ParNewGeneration::*/KeepAliveClosure::do_oop_work(T* p) {
 711 #ifdef ASSERT
 712   {
 713     oop obj = RawAccess<OOP_NOT_NULL>::oop_load(p);
 714     // We never expect to see a null reference being processed
 715     // as a weak reference.
 716     assert(oopDesc::is_oop(obj), "expected an oop while scanning weak refs");
 717   }
 718 #endif // ASSERT
 719 
 720   _cl->do_oop_nv(p);
 721 
 722   if (CMSHeap::heap()->is_in_reserved(p)) {
 723     oop obj = RawAccess<OOP_NOT_NULL>::oop_load(p);
 724     _rs->write_ref_field_gc_par(p, obj);
 725   }
 726 }
 727 
 728 void /*ParNewGeneration::*/KeepAliveClosure::do_oop(oop* p)       { KeepAliveClosure::do_oop_work(p); }
 729 void /*ParNewGeneration::*/KeepAliveClosure::do_oop(narrowOop* p) { KeepAliveClosure::do_oop_work(p); }
 730 
 731 template <class T> void ScanClosureWithParBarrier::do_oop_work(T* p) {
 732   T heap_oop = RawAccess<>::oop_load(p);
 733   if (!CompressedOops::is_null(heap_oop)) {
 734     oop obj = CompressedOops::decode_not_null(heap_oop);
 735     if ((HeapWord*)obj < _boundary) {
 736       assert(!_g->to()->is_in_reserved(obj), "Scanning field twice?");
 737       oop new_obj = obj->is_forwarded()
 738                       ? obj->forwardee()
 739                       : _g->DefNewGeneration::copy_to_survivor_space(obj);
 740       RawAccess<OOP_NOT_NULL>::oop_store(p, new_obj);




  34 #include "gc/shared/ageTable.inline.hpp"
  35 #include "gc/shared/copyFailedInfo.hpp"
  36 #include "gc/shared/gcHeapSummary.hpp"
  37 #include "gc/shared/gcTimer.hpp"
  38 #include "gc/shared/gcTrace.hpp"
  39 #include "gc/shared/gcTraceTime.inline.hpp"
  40 #include "gc/shared/genOopClosures.inline.hpp"
  41 #include "gc/shared/generation.hpp"
  42 #include "gc/shared/plab.inline.hpp"
  43 #include "gc/shared/preservedMarks.inline.hpp"
  44 #include "gc/shared/referencePolicy.hpp"
  45 #include "gc/shared/referenceProcessorPhaseTimes.hpp"
  46 #include "gc/shared/space.hpp"
  47 #include "gc/shared/spaceDecorator.hpp"
  48 #include "gc/shared/strongRootsScope.hpp"
  49 #include "gc/shared/taskqueue.inline.hpp"
  50 #include "gc/shared/weakProcessor.hpp"
  51 #include "gc/shared/workgroup.hpp"
  52 #include "logging/log.hpp"
  53 #include "logging/logStream.hpp"
  54 #include "memory/iterator.inline.hpp"
  55 #include "memory/resourceArea.hpp"
  56 #include "oops/access.inline.hpp"
  57 #include "oops/compressedOops.inline.hpp"
  58 #include "oops/objArrayOop.hpp"
  59 #include "oops/oop.inline.hpp"
  60 #include "runtime/atomic.hpp"
  61 #include "runtime/handles.hpp"
  62 #include "runtime/handles.inline.hpp"
  63 #include "runtime/java.hpp"
  64 #include "runtime/thread.inline.hpp"
  65 #include "utilities/copy.hpp"
  66 #include "utilities/globalDefinitions.hpp"
  67 #include "utilities/stack.inline.hpp"
  68 
  69 ParScanThreadState::ParScanThreadState(Space* to_space_,
  70                                        ParNewGeneration* young_gen_,
  71                                        Generation* old_gen_,
  72                                        int thread_num_,
  73                                        ObjToScanQueueSet* work_queue_set_,
  74                                        Stack<oop, mtGC>* overflow_stacks_,


 486     // Inform old gen that we're done.
 487     _old_gen.par_promote_alloc_done(i);
 488   }
 489 
 490   if (UseConcMarkSweepGC) {
 491     // We need to call this even when ResizeOldPLAB is disabled
 492     // so as to avoid breaking some asserts. While we may be able
 493     // to avoid this by reorganizing the code a bit, I am loathe
 494     // to do that unless we find cases where ergo leads to bad
 495     // performance.
 496     CompactibleFreeListSpaceLAB::compute_desired_plab_size();
 497   }
 498 }
 499 
 500 ParScanClosure::ParScanClosure(ParNewGeneration* g,
 501                                ParScanThreadState* par_scan_state) :
 502   OopsInClassLoaderDataOrGenClosure(g), _par_scan_state(par_scan_state), _g(g) {
 503   _boundary = _g->reserved().end();
 504 }
 505 






 506 void ParRootScanWithBarrierTwoGensClosure::do_oop(oop* p)       { ParScanClosure::do_oop_work(p, true, true); }
 507 void ParRootScanWithBarrierTwoGensClosure::do_oop(narrowOop* p) { ParScanClosure::do_oop_work(p, true, true); }
 508 
 509 void ParRootScanWithoutBarrierClosure::do_oop(oop* p)       { ParScanClosure::do_oop_work(p, false, true); }
 510 void ParRootScanWithoutBarrierClosure::do_oop(narrowOop* p) { ParScanClosure::do_oop_work(p, false, true); }
 511 
 512 ParScanWeakRefClosure::ParScanWeakRefClosure(ParNewGeneration* g,
 513                                              ParScanThreadState* par_scan_state)
 514   : ScanWeakRefClosure(g), _par_scan_state(par_scan_state)
 515 {}
 516 



 517 #ifdef WIN32
 518 #pragma warning(disable: 4786) /* identifier was truncated to '255' characters in the browser information */
 519 #endif
 520 
 521 ParEvacuateFollowersClosure::ParEvacuateFollowersClosure(
 522     ParScanThreadState* par_scan_state_,
 523     ParScanWithoutBarrierClosure* to_space_closure_,
 524     ParScanWithBarrierClosure* old_gen_closure_,
 525     ParRootScanWithoutBarrierClosure* to_space_root_closure_,
 526     ParNewGeneration* par_gen_,
 527     ParRootScanWithBarrierTwoGensClosure* old_gen_root_closure_,
 528     ObjToScanQueueSet* task_queues_,
 529     ParallelTaskTerminator* terminator_) :
 530 
 531     _par_scan_state(par_scan_state_),
 532     _to_space_closure(to_space_closure_),
 533     _old_gen_closure(old_gen_closure_),
 534     _to_space_root_closure(to_space_root_closure_),
 535     _old_gen_root_closure(old_gen_root_closure_),
 536     _par_gen(par_gen_),


 666     PerfDataManager::create_constant(SUN_GC, cname, PerfData::U_None,
 667                                      ParallelGCThreads, CHECK);
 668   }
 669 }
 670 
 671 // ParNewGeneration::
 672 ParKeepAliveClosure::ParKeepAliveClosure(ParScanWeakRefClosure* cl) :
 673   DefNewGeneration::KeepAliveClosure(cl), _par_cl(cl) {}
 674 
 675 template <class T>
 676 void /*ParNewGeneration::*/ParKeepAliveClosure::do_oop_work(T* p) {
 677 #ifdef ASSERT
 678   {
 679     oop obj = RawAccess<OOP_NOT_NULL>::oop_load(p);
 680     // We never expect to see a null reference being processed
 681     // as a weak reference.
 682     assert(oopDesc::is_oop(obj), "expected an oop while scanning weak refs");
 683   }
 684 #endif // ASSERT
 685 
 686   Devirtualizer::do_oop_no_verify(_par_cl, p);
 687 
 688   if (CMSHeap::heap()->is_in_reserved(p)) {
 689     oop obj = RawAccess<OOP_NOT_NULL>::oop_load(p);;
 690     _rs->write_ref_field_gc_par(p, obj);
 691   }
 692 }
 693 
 694 void /*ParNewGeneration::*/ParKeepAliveClosure::do_oop(oop* p)       { ParKeepAliveClosure::do_oop_work(p); }
 695 void /*ParNewGeneration::*/ParKeepAliveClosure::do_oop(narrowOop* p) { ParKeepAliveClosure::do_oop_work(p); }
 696 
 697 // ParNewGeneration::
 698 KeepAliveClosure::KeepAliveClosure(ScanWeakRefClosure* cl) :
 699   DefNewGeneration::KeepAliveClosure(cl) {}
 700 
 701 template <class T>
 702 void /*ParNewGeneration::*/KeepAliveClosure::do_oop_work(T* p) {
 703 #ifdef ASSERT
 704   {
 705     oop obj = RawAccess<OOP_NOT_NULL>::oop_load(p);
 706     // We never expect to see a null reference being processed
 707     // as a weak reference.
 708     assert(oopDesc::is_oop(obj), "expected an oop while scanning weak refs");
 709   }
 710 #endif // ASSERT
 711 
 712   Devirtualizer::do_oop_no_verify(_cl, p);
 713 
 714   if (CMSHeap::heap()->is_in_reserved(p)) {
 715     oop obj = RawAccess<OOP_NOT_NULL>::oop_load(p);
 716     _rs->write_ref_field_gc_par(p, obj);
 717   }
 718 }
 719 
 720 void /*ParNewGeneration::*/KeepAliveClosure::do_oop(oop* p)       { KeepAliveClosure::do_oop_work(p); }
 721 void /*ParNewGeneration::*/KeepAliveClosure::do_oop(narrowOop* p) { KeepAliveClosure::do_oop_work(p); }
 722 
 723 template <class T> void ScanClosureWithParBarrier::do_oop_work(T* p) {
 724   T heap_oop = RawAccess<>::oop_load(p);
 725   if (!CompressedOops::is_null(heap_oop)) {
 726     oop obj = CompressedOops::decode_not_null(heap_oop);
 727     if ((HeapWord*)obj < _boundary) {
 728       assert(!_g->to()->is_in_reserved(obj), "Scanning field twice?");
 729       oop new_obj = obj->is_forwarded()
 730                       ? obj->forwardee()
 731                       : _g->DefNewGeneration::copy_to_survivor_space(obj);
 732       RawAccess<OOP_NOT_NULL>::oop_store(p, new_obj);


< prev index next >