4905 G1ParCopyClosure<G1BarrierNone, G1MarkPromotedFromRoot> scan_mark_weak_root_cl(_g1h, &pss, rp);
4906 G1CLDClosure<G1MarkPromotedFromRoot> scan_mark_weak_cld_cl(&scan_mark_weak_root_cl,
4907 false, // Process all klasses.
4908 true); // Need to claim CLDs.
4909
4910 G1CodeBlobClosure scan_only_code_cl(&scan_only_root_cl);
4911 G1CodeBlobClosure scan_mark_code_cl(&scan_mark_root_cl);
4912 // IM Weak code roots are handled later.
4913
4914 OopClosure* strong_root_cl;
4915 OopClosure* weak_root_cl;
4916 CLDClosure* strong_cld_cl;
4917 CLDClosure* weak_cld_cl;
4918 CodeBlobClosure* strong_code_cl;
4919
4920 if (_g1h->g1_policy()->during_initial_mark_pause()) {
4921 // We also need to mark copied objects.
4922 strong_root_cl = &scan_mark_root_cl;
4923 strong_cld_cl = &scan_mark_cld_cl;
4924 strong_code_cl = &scan_mark_code_cl;
4925 if (G1ClassUnloadingEnabled) {
4926 weak_root_cl = &scan_mark_weak_root_cl;
4927 weak_cld_cl = &scan_mark_weak_cld_cl;
4928 } else {
4929 weak_root_cl = &scan_mark_root_cl;
4930 weak_cld_cl = &scan_mark_cld_cl;
4931 }
4932 } else {
4933 strong_root_cl = &scan_only_root_cl;
4934 weak_root_cl = &scan_only_root_cl;
4935 strong_cld_cl = &scan_only_cld_cl;
4936 weak_cld_cl = &scan_only_cld_cl;
4937 strong_code_cl = &scan_only_code_cl;
4938 }
4939
4940
4941 G1ParPushHeapRSClosure push_heap_rs_cl(_g1h, &pss);
4942
4943 pss.start_strong_roots();
4944 _g1h->g1_process_roots(strong_root_cl,
4945 weak_root_cl,
4982
4983 // *** Common G1 Evacuation Stuff
4984
4985 // This method is run in a GC worker.
4986
4987 void
4988 G1CollectedHeap::
4989 g1_process_roots(OopClosure* scan_non_heap_roots,
4990 OopClosure* scan_non_heap_weak_roots,
4991 OopsInHeapRegionClosure* scan_rs,
4992 CLDClosure* scan_strong_clds,
4993 CLDClosure* scan_weak_clds,
4994 CodeBlobClosure* scan_strong_code,
4995 uint worker_i) {
4996
4997 // First scan the shared roots.
4998 double ext_roots_start = os::elapsedTime();
4999 double closure_app_time_sec = 0.0;
5000
5001 bool during_im = _g1h->g1_policy()->during_initial_mark_pause();
5002 bool trace_metadata = during_im && G1ClassUnloadingEnabled;
5003
5004 BufferingOopClosure buf_scan_non_heap_roots(scan_non_heap_roots);
5005 BufferingOopClosure buf_scan_non_heap_weak_roots(scan_non_heap_weak_roots);
5006
5007 process_roots(false, // no scoping; this is parallel code
5008 SharedHeap::SO_None,
5009 &buf_scan_non_heap_roots,
5010 &buf_scan_non_heap_weak_roots,
5011 scan_strong_clds,
5012 // Unloading Initial Marks handle the weak CLDs separately.
5013 (trace_metadata ? NULL : scan_weak_clds),
5014 scan_strong_code);
5015
5016 // Now the CM ref_processor roots.
5017 if (!_process_strong_tasks->is_task_claimed(G1H_PS_refProcessor_oops_do)) {
5018 // We need to treat the discovered reference lists of the
5019 // concurrent mark ref processor as roots and keep entries
5020 // (which are added by the marking threads) on them live
5021 // until they can be processed at the end of marking.
5022 ref_processor_cm()->weak_oops_do(&buf_scan_non_heap_roots);
|
4905 G1ParCopyClosure<G1BarrierNone, G1MarkPromotedFromRoot> scan_mark_weak_root_cl(_g1h, &pss, rp);
4906 G1CLDClosure<G1MarkPromotedFromRoot> scan_mark_weak_cld_cl(&scan_mark_weak_root_cl,
4907 false, // Process all klasses.
4908 true); // Need to claim CLDs.
4909
4910 G1CodeBlobClosure scan_only_code_cl(&scan_only_root_cl);
4911 G1CodeBlobClosure scan_mark_code_cl(&scan_mark_root_cl);
4912 // IM Weak code roots are handled later.
4913
4914 OopClosure* strong_root_cl;
4915 OopClosure* weak_root_cl;
4916 CLDClosure* strong_cld_cl;
4917 CLDClosure* weak_cld_cl;
4918 CodeBlobClosure* strong_code_cl;
4919
4920 if (_g1h->g1_policy()->during_initial_mark_pause()) {
4921 // We also need to mark copied objects.
4922 strong_root_cl = &scan_mark_root_cl;
4923 strong_cld_cl = &scan_mark_cld_cl;
4924 strong_code_cl = &scan_mark_code_cl;
4925 if (ClassUnloadingWithConcurrentMark) {
4926 weak_root_cl = &scan_mark_weak_root_cl;
4927 weak_cld_cl = &scan_mark_weak_cld_cl;
4928 } else {
4929 weak_root_cl = &scan_mark_root_cl;
4930 weak_cld_cl = &scan_mark_cld_cl;
4931 }
4932 } else {
4933 strong_root_cl = &scan_only_root_cl;
4934 weak_root_cl = &scan_only_root_cl;
4935 strong_cld_cl = &scan_only_cld_cl;
4936 weak_cld_cl = &scan_only_cld_cl;
4937 strong_code_cl = &scan_only_code_cl;
4938 }
4939
4940
4941 G1ParPushHeapRSClosure push_heap_rs_cl(_g1h, &pss);
4942
4943 pss.start_strong_roots();
4944 _g1h->g1_process_roots(strong_root_cl,
4945 weak_root_cl,
4982
4983 // *** Common G1 Evacuation Stuff
4984
4985 // This method is run in a GC worker.
4986
4987 void
4988 G1CollectedHeap::
4989 g1_process_roots(OopClosure* scan_non_heap_roots,
4990 OopClosure* scan_non_heap_weak_roots,
4991 OopsInHeapRegionClosure* scan_rs,
4992 CLDClosure* scan_strong_clds,
4993 CLDClosure* scan_weak_clds,
4994 CodeBlobClosure* scan_strong_code,
4995 uint worker_i) {
4996
4997 // First scan the shared roots.
4998 double ext_roots_start = os::elapsedTime();
4999 double closure_app_time_sec = 0.0;
5000
5001 bool during_im = _g1h->g1_policy()->during_initial_mark_pause();
5002 bool trace_metadata = during_im && ClassUnloadingWithConcurrentMark;
5003
5004 BufferingOopClosure buf_scan_non_heap_roots(scan_non_heap_roots);
5005 BufferingOopClosure buf_scan_non_heap_weak_roots(scan_non_heap_weak_roots);
5006
5007 process_roots(false, // no scoping; this is parallel code
5008 SharedHeap::SO_None,
5009 &buf_scan_non_heap_roots,
5010 &buf_scan_non_heap_weak_roots,
5011 scan_strong_clds,
5012 // Unloading Initial Marks handle the weak CLDs separately.
5013 (trace_metadata ? NULL : scan_weak_clds),
5014 scan_strong_code);
5015
5016 // Now the CM ref_processor roots.
5017 if (!_process_strong_tasks->is_task_claimed(G1H_PS_refProcessor_oops_do)) {
5018 // We need to treat the discovered reference lists of the
5019 // concurrent mark ref processor as roots and keep entries
5020 // (which are added by the marking threads) on them live
5021 // until they can be processed at the end of marking.
5022 ref_processor_cm()->weak_oops_do(&buf_scan_non_heap_roots);
|