34 #include "gc/shared/copyFailedInfo.hpp" 35 #include "gc/shared/gcHeapSummary.hpp" 36 #include "gc/shared/gcTimer.hpp" 37 #include "gc/shared/gcTrace.hpp" 38 #include "gc/shared/gcTraceTime.inline.hpp" 39 #include "gc/shared/genCollectedHeap.hpp" 40 #include "gc/shared/genOopClosures.inline.hpp" 41 #include "gc/shared/generation.hpp" 42 #include "gc/shared/plab.inline.hpp" 43 #include "gc/shared/preservedMarks.inline.hpp" 44 #include "gc/shared/referencePolicy.hpp" 45 #include "gc/shared/space.hpp" 46 #include "gc/shared/spaceDecorator.hpp" 47 #include "gc/shared/strongRootsScope.hpp" 48 #include "gc/shared/taskqueue.inline.hpp" 49 #include "gc/shared/weakProcessor.hpp" 50 #include "gc/shared/workgroup.hpp" 51 #include "logging/log.hpp" 52 #include "logging/logStream.hpp" 53 #include "memory/resourceArea.hpp" 54 #include "oops/objArrayOop.hpp" 55 #include "oops/oop.inline.hpp" 56 #include "runtime/atomic.hpp" 57 #include "runtime/handles.hpp" 58 #include "runtime/handles.inline.hpp" 59 #include "runtime/java.hpp" 60 #include "runtime/thread.inline.hpp" 61 #include "utilities/copy.hpp" 62 #include "utilities/globalDefinitions.hpp" 63 #include "utilities/stack.inline.hpp" 64 65 ParScanThreadState::ParScanThreadState(Space* to_space_, 66 ParNewGeneration* young_gen_, 67 Generation* old_gen_, 68 int thread_num_, 69 ObjToScanQueueSet* work_queue_set_, 70 Stack<oop, mtGC>* overflow_stacks_, 71 PreservedMarks* preserved_marks_, 72 size_t desired_plab_sz_, 73 ParallelTaskTerminator& term_) : 662 663 if (UsePerfData) { 664 EXCEPTION_MARK; 665 ResourceMark rm; 666 667 const char* cname = 668 PerfDataManager::counter_name(_gen_counters->name_space(), "threads"); 669 PerfDataManager::create_constant(SUN_GC, cname, PerfData::U_None, 670 ParallelGCThreads, CHECK); 671 } 672 } 673 674 // ParNewGeneration:: 675 ParKeepAliveClosure::ParKeepAliveClosure(ParScanWeakRefClosure* cl) : 676 DefNewGeneration::KeepAliveClosure(cl), _par_cl(cl) {} 677 678 template <class T> 679 void /*ParNewGeneration::*/ParKeepAliveClosure::do_oop_work(T* p) { 680 #ifdef ASSERT 681 { 682 assert(!oopDesc::is_null(*p), "expected non-null ref"); 683 oop obj = oopDesc::load_decode_heap_oop_not_null(p); 684 // We never expect to see a null reference being processed 685 // as a weak reference. 686 assert(oopDesc::is_oop(obj), "expected an oop while scanning weak refs"); 687 } 688 #endif // ASSERT 689 690 _par_cl->do_oop_nv(p); 691 692 if (CMSHeap::heap()->is_in_reserved(p)) { 693 oop obj = oopDesc::load_decode_heap_oop_not_null(p); 694 _rs->write_ref_field_gc_par(p, obj); 695 } 696 } 697 698 void /*ParNewGeneration::*/ParKeepAliveClosure::do_oop(oop* p) { ParKeepAliveClosure::do_oop_work(p); } 699 void /*ParNewGeneration::*/ParKeepAliveClosure::do_oop(narrowOop* p) { ParKeepAliveClosure::do_oop_work(p); } 700 701 // ParNewGeneration:: 702 KeepAliveClosure::KeepAliveClosure(ScanWeakRefClosure* cl) : 703 DefNewGeneration::KeepAliveClosure(cl) {} 704 705 template <class T> 706 void /*ParNewGeneration::*/KeepAliveClosure::do_oop_work(T* p) { 707 #ifdef ASSERT 708 { 709 assert(!oopDesc::is_null(*p), "expected non-null ref"); 710 oop obj = oopDesc::load_decode_heap_oop_not_null(p); 711 // We never expect to see a null reference being processed 712 // as a weak reference. 713 assert(oopDesc::is_oop(obj), "expected an oop while scanning weak refs"); 714 } 715 #endif // ASSERT 716 717 _cl->do_oop_nv(p); 718 719 if (CMSHeap::heap()->is_in_reserved(p)) { 720 oop obj = oopDesc::load_decode_heap_oop_not_null(p); 721 _rs->write_ref_field_gc_par(p, obj); 722 } 723 } 724 725 void /*ParNewGeneration::*/KeepAliveClosure::do_oop(oop* p) { KeepAliveClosure::do_oop_work(p); } 726 void /*ParNewGeneration::*/KeepAliveClosure::do_oop(narrowOop* p) { KeepAliveClosure::do_oop_work(p); } 727 728 template <class T> void ScanClosureWithParBarrier::do_oop_work(T* p) { 729 T heap_oop = oopDesc::load_heap_oop(p); 730 if (!oopDesc::is_null(heap_oop)) { 731 oop obj = oopDesc::decode_heap_oop_not_null(heap_oop); 732 if ((HeapWord*)obj < _boundary) { 733 assert(!_g->to()->is_in_reserved(obj), "Scanning field twice?"); 734 oop new_obj = obj->is_forwarded() 735 ? obj->forwardee() 736 : _g->DefNewGeneration::copy_to_survivor_space(obj); 737 oopDesc::encode_store_heap_oop_not_null(p, new_obj); 738 } 739 if (_gc_barrier) { 740 // If p points to a younger generation, mark the card. 741 if ((HeapWord*)obj < _gen_boundary) { 742 _rs->write_ref_field_gc_par(p, obj); 743 } 744 } 745 } 746 } 747 748 void ScanClosureWithParBarrier::do_oop(oop* p) { ScanClosureWithParBarrier::do_oop_work(p); } 749 void ScanClosureWithParBarrier::do_oop(narrowOop* p) { ScanClosureWithParBarrier::do_oop_work(p); } 750 751 class ParNewRefProcTaskProxy: public AbstractGangTask { 752 typedef AbstractRefProcTaskExecutor::ProcessTask ProcessTask; 753 public: 754 ParNewRefProcTaskProxy(ProcessTask& task, 755 ParNewGeneration& young_gen, 756 Generation& old_gen, 757 HeapWord* young_old_boundary, | 34 #include "gc/shared/copyFailedInfo.hpp" 35 #include "gc/shared/gcHeapSummary.hpp" 36 #include "gc/shared/gcTimer.hpp" 37 #include "gc/shared/gcTrace.hpp" 38 #include "gc/shared/gcTraceTime.inline.hpp" 39 #include "gc/shared/genCollectedHeap.hpp" 40 #include "gc/shared/genOopClosures.inline.hpp" 41 #include "gc/shared/generation.hpp" 42 #include "gc/shared/plab.inline.hpp" 43 #include "gc/shared/preservedMarks.inline.hpp" 44 #include "gc/shared/referencePolicy.hpp" 45 #include "gc/shared/space.hpp" 46 #include "gc/shared/spaceDecorator.hpp" 47 #include "gc/shared/strongRootsScope.hpp" 48 #include "gc/shared/taskqueue.inline.hpp" 49 #include "gc/shared/weakProcessor.hpp" 50 #include "gc/shared/workgroup.hpp" 51 #include "logging/log.hpp" 52 #include "logging/logStream.hpp" 53 #include "memory/resourceArea.hpp" 54 #include "oops/access.inline.hpp" 55 #include "oops/compressedOops.inline.hpp" 56 #include "oops/objArrayOop.hpp" 57 #include "oops/oop.inline.hpp" 58 #include "runtime/atomic.hpp" 59 #include "runtime/handles.hpp" 60 #include "runtime/handles.inline.hpp" 61 #include "runtime/java.hpp" 62 #include "runtime/thread.inline.hpp" 63 #include "utilities/copy.hpp" 64 #include "utilities/globalDefinitions.hpp" 65 #include "utilities/stack.inline.hpp" 66 67 ParScanThreadState::ParScanThreadState(Space* to_space_, 68 ParNewGeneration* young_gen_, 69 Generation* old_gen_, 70 int thread_num_, 71 ObjToScanQueueSet* work_queue_set_, 72 Stack<oop, mtGC>* overflow_stacks_, 73 PreservedMarks* preserved_marks_, 74 size_t desired_plab_sz_, 75 ParallelTaskTerminator& term_) : 664 665 if (UsePerfData) { 666 EXCEPTION_MARK; 667 ResourceMark rm; 668 669 const char* cname = 670 PerfDataManager::counter_name(_gen_counters->name_space(), "threads"); 671 PerfDataManager::create_constant(SUN_GC, cname, PerfData::U_None, 672 ParallelGCThreads, CHECK); 673 } 674 } 675 676 // ParNewGeneration:: 677 ParKeepAliveClosure::ParKeepAliveClosure(ParScanWeakRefClosure* cl) : 678 DefNewGeneration::KeepAliveClosure(cl), _par_cl(cl) {} 679 680 template <class T> 681 void /*ParNewGeneration::*/ParKeepAliveClosure::do_oop_work(T* p) { 682 #ifdef ASSERT 683 { 684 oop obj = RawAccess<OOP_NOT_NULL>::oop_load(p); 685 // We never expect to see a null reference being processed 686 // as a weak reference. 687 assert(oopDesc::is_oop(obj), "expected an oop while scanning weak refs"); 688 } 689 #endif // ASSERT 690 691 _par_cl->do_oop_nv(p); 692 693 if (CMSHeap::heap()->is_in_reserved(p)) { 694 oop obj = RawAccess<OOP_NOT_NULL>::oop_load(p);; 695 _rs->write_ref_field_gc_par(p, obj); 696 } 697 } 698 699 void /*ParNewGeneration::*/ParKeepAliveClosure::do_oop(oop* p) { ParKeepAliveClosure::do_oop_work(p); } 700 void /*ParNewGeneration::*/ParKeepAliveClosure::do_oop(narrowOop* p) { ParKeepAliveClosure::do_oop_work(p); } 701 702 // ParNewGeneration:: 703 KeepAliveClosure::KeepAliveClosure(ScanWeakRefClosure* cl) : 704 DefNewGeneration::KeepAliveClosure(cl) {} 705 706 template <class T> 707 void /*ParNewGeneration::*/KeepAliveClosure::do_oop_work(T* p) { 708 #ifdef ASSERT 709 { 710 oop obj = RawAccess<OOP_NOT_NULL>::oop_load(p); 711 // We never expect to see a null reference being processed 712 // as a weak reference. 713 assert(oopDesc::is_oop(obj), "expected an oop while scanning weak refs"); 714 } 715 #endif // ASSERT 716 717 _cl->do_oop_nv(p); 718 719 if (CMSHeap::heap()->is_in_reserved(p)) { 720 oop obj = RawAccess<OOP_NOT_NULL>::oop_load(p); 721 _rs->write_ref_field_gc_par(p, obj); 722 } 723 } 724 725 void /*ParNewGeneration::*/KeepAliveClosure::do_oop(oop* p) { KeepAliveClosure::do_oop_work(p); } 726 void /*ParNewGeneration::*/KeepAliveClosure::do_oop(narrowOop* p) { KeepAliveClosure::do_oop_work(p); } 727 728 template <class T> void ScanClosureWithParBarrier::do_oop_work(T* p) { 729 T heap_oop = RawAccess<>::oop_load(p); 730 if (!CompressedOops::is_null(heap_oop)) { 731 oop obj = CompressedOops::decode_not_null(heap_oop); 732 if ((HeapWord*)obj < _boundary) { 733 assert(!_g->to()->is_in_reserved(obj), "Scanning field twice?"); 734 oop new_obj = obj->is_forwarded() 735 ? obj->forwardee() 736 : _g->DefNewGeneration::copy_to_survivor_space(obj); 737 RawAccess<>::oop_store(p, new_obj); 738 } 739 if (_gc_barrier) { 740 // If p points to a younger generation, mark the card. 741 if ((HeapWord*)obj < _gen_boundary) { 742 _rs->write_ref_field_gc_par(p, obj); 743 } 744 } 745 } 746 } 747 748 void ScanClosureWithParBarrier::do_oop(oop* p) { ScanClosureWithParBarrier::do_oop_work(p); } 749 void ScanClosureWithParBarrier::do_oop(narrowOop* p) { ScanClosureWithParBarrier::do_oop_work(p); } 750 751 class ParNewRefProcTaskProxy: public AbstractGangTask { 752 typedef AbstractRefProcTaskExecutor::ProcessTask ProcessTask; 753 public: 754 ParNewRefProcTaskProxy(ProcessTask& task, 755 ParNewGeneration& young_gen, 756 Generation& old_gen, 757 HeapWord* young_old_boundary, |