27 #include "classfile/symbolTable.hpp"
28 #include "classfile/stringTable.hpp"
29 #include "classfile/systemDictionary.hpp"
30 #include "classfile/vmSymbols.hpp"
31 #include "code/codeCache.hpp"
32 #include "code/icBuffer.hpp"
33 #include "gc/serial/defNewGeneration.hpp"
34 #include "gc/shared/adaptiveSizePolicy.hpp"
35 #include "gc/shared/cardTableBarrierSet.hpp"
36 #include "gc/shared/cardTableRS.hpp"
37 #include "gc/shared/collectedHeap.inline.hpp"
38 #include "gc/shared/collectorCounters.hpp"
39 #include "gc/shared/gcId.hpp"
40 #include "gc/shared/gcLocker.hpp"
41 #include "gc/shared/gcPolicyCounters.hpp"
42 #include "gc/shared/gcTrace.hpp"
43 #include "gc/shared/gcTraceTime.inline.hpp"
44 #include "gc/shared/genCollectedHeap.hpp"
45 #include "gc/shared/genOopClosures.inline.hpp"
46 #include "gc/shared/generationSpec.hpp"
47 #include "gc/shared/space.hpp"
48 #include "gc/shared/strongRootsScope.hpp"
49 #include "gc/shared/vmGCOperations.hpp"
50 #include "gc/shared/weakProcessor.hpp"
51 #include "gc/shared/workgroup.hpp"
52 #include "memory/filemap.hpp"
53 #include "memory/metaspaceCounters.hpp"
54 #include "memory/resourceArea.hpp"
55 #include "oops/oop.inline.hpp"
56 #include "runtime/biasedLocking.hpp"
57 #include "runtime/flags/flagSetting.hpp"
58 #include "runtime/handles.hpp"
59 #include "runtime/handles.inline.hpp"
60 #include "runtime/java.hpp"
61 #include "runtime/vmThread.hpp"
62 #include "services/management.hpp"
63 #include "services/memoryService.hpp"
64 #include "utilities/debug.hpp"
65 #include "utilities/formatBuffer.hpp"
66 #include "utilities/macros.hpp"
835 assert(code_roots != NULL, "must supply closure for code cache");
836
837 // We only visit parts of the CodeCache when scavenging.
838 CodeCache::scavenge_root_nmethods_do(code_roots);
839 }
840 if (so & SO_AllCodeCache) {
841 assert(code_roots != NULL, "must supply closure for code cache");
842
843 // CMSCollector uses this to do intermediate-strength collections.
844 // We scan the entire code cache, since CodeCache::do_unloading is not called.
845 CodeCache::blobs_do(code_roots);
846 }
847 // Verify that the code cache contents are not subject to
848 // movement by a scavenging collection.
849 DEBUG_ONLY(CodeBlobToOopClosure assert_code_is_non_scavengable(&assert_is_non_scavengable_closure, !CodeBlobToOopClosure::FixRelocations));
850 DEBUG_ONLY(CodeCache::asserted_non_scavengable_nmethods_do(&assert_code_is_non_scavengable));
851 }
852 }
853
854 void GenCollectedHeap::process_string_table_roots(StrongRootsScope* scope,
855 OopClosure* root_closure) {
856 assert(root_closure != NULL, "Must be set");
857 // All threads execute the following. A specific chunk of buckets
858 // from the StringTable are the individual tasks.
859 if (scope->n_threads() > 1) {
860 StringTable::possibly_parallel_oops_do(root_closure);
861 } else {
862 StringTable::oops_do(root_closure);
863 }
864 }
865
866 void GenCollectedHeap::young_process_roots(StrongRootsScope* scope,
867 OopsInGenClosure* root_closure,
868 OopsInGenClosure* old_gen_closure,
869 CLDClosure* cld_closure) {
870 MarkingCodeBlobClosure mark_code_closure(root_closure, CodeBlobToOopClosure::FixRelocations);
871
872 process_roots(scope, SO_ScavengeCodeCache, root_closure, root_closure,
873 cld_closure, cld_closure, &mark_code_closure);
874 process_string_table_roots(scope, root_closure);
875
876 if (!_process_strong_tasks->is_task_claimed(GCH_PS_younger_gens)) {
877 root_closure->reset_generation();
878 }
879
880 // When collection is parallel, all threads get to cooperate to do
881 // old generation scanning.
882 old_gen_closure->set_generation(_old_gen);
883 rem_set()->younger_refs_iterate(_old_gen, old_gen_closure, scope->n_threads());
884 old_gen_closure->reset_generation();
885
886 _process_strong_tasks->all_tasks_completed(scope->n_threads());
887 }
888
889 void GenCollectedHeap::full_process_roots(StrongRootsScope* scope,
890 bool is_adjust_phase,
891 ScanningOption so,
892 bool only_strong_roots,
893 OopsInGenClosure* root_closure,
894 CLDClosure* cld_closure) {
895 MarkingCodeBlobClosure mark_code_closure(root_closure, is_adjust_phase);
896 OopsInGenClosure* weak_roots = only_strong_roots ? NULL : root_closure;
897 CLDClosure* weak_cld_closure = only_strong_roots ? NULL : cld_closure;
898
899 process_roots(scope, so, root_closure, weak_roots, cld_closure, weak_cld_closure, &mark_code_closure);
900 if (is_adjust_phase) {
901 // We never treat the string table as roots during marking
902 // for the full gc, so we only need to process it during
903 // the adjust phase.
904 process_string_table_roots(scope, root_closure);
905 }
906
907 _process_strong_tasks->all_tasks_completed(scope->n_threads());
908 }
909
910 void GenCollectedHeap::gen_process_weak_roots(OopClosure* root_closure) {
911 WeakProcessor::oops_do(root_closure);
912 _young_gen->ref_processor()->weak_oops_do(root_closure);
913 _old_gen->ref_processor()->weak_oops_do(root_closure);
914 }
915
916 bool GenCollectedHeap::no_allocs_since_save_marks() {
917 return _young_gen->no_allocs_since_save_marks() &&
918 _old_gen->no_allocs_since_save_marks();
919 }
920
921 bool GenCollectedHeap::supports_inline_contig_alloc() const {
922 return _young_gen->supports_inline_contig_alloc();
923 }
924
|
27 #include "classfile/symbolTable.hpp"
28 #include "classfile/stringTable.hpp"
29 #include "classfile/systemDictionary.hpp"
30 #include "classfile/vmSymbols.hpp"
31 #include "code/codeCache.hpp"
32 #include "code/icBuffer.hpp"
33 #include "gc/serial/defNewGeneration.hpp"
34 #include "gc/shared/adaptiveSizePolicy.hpp"
35 #include "gc/shared/cardTableBarrierSet.hpp"
36 #include "gc/shared/cardTableRS.hpp"
37 #include "gc/shared/collectedHeap.inline.hpp"
38 #include "gc/shared/collectorCounters.hpp"
39 #include "gc/shared/gcId.hpp"
40 #include "gc/shared/gcLocker.hpp"
41 #include "gc/shared/gcPolicyCounters.hpp"
42 #include "gc/shared/gcTrace.hpp"
43 #include "gc/shared/gcTraceTime.inline.hpp"
44 #include "gc/shared/genCollectedHeap.hpp"
45 #include "gc/shared/genOopClosures.inline.hpp"
46 #include "gc/shared/generationSpec.hpp"
47 #include "gc/shared/oopStorageParState.inline.hpp"
48 #include "gc/shared/space.hpp"
49 #include "gc/shared/strongRootsScope.hpp"
50 #include "gc/shared/vmGCOperations.hpp"
51 #include "gc/shared/weakProcessor.hpp"
52 #include "gc/shared/workgroup.hpp"
53 #include "memory/filemap.hpp"
54 #include "memory/metaspaceCounters.hpp"
55 #include "memory/resourceArea.hpp"
56 #include "oops/oop.inline.hpp"
57 #include "runtime/biasedLocking.hpp"
58 #include "runtime/flags/flagSetting.hpp"
59 #include "runtime/handles.hpp"
60 #include "runtime/handles.inline.hpp"
61 #include "runtime/java.hpp"
62 #include "runtime/vmThread.hpp"
63 #include "services/management.hpp"
64 #include "services/memoryService.hpp"
65 #include "utilities/debug.hpp"
66 #include "utilities/formatBuffer.hpp"
67 #include "utilities/macros.hpp"
836 assert(code_roots != NULL, "must supply closure for code cache");
837
838 // We only visit parts of the CodeCache when scavenging.
839 CodeCache::scavenge_root_nmethods_do(code_roots);
840 }
841 if (so & SO_AllCodeCache) {
842 assert(code_roots != NULL, "must supply closure for code cache");
843
844 // CMSCollector uses this to do intermediate-strength collections.
845 // We scan the entire code cache, since CodeCache::do_unloading is not called.
846 CodeCache::blobs_do(code_roots);
847 }
848 // Verify that the code cache contents are not subject to
849 // movement by a scavenging collection.
850 DEBUG_ONLY(CodeBlobToOopClosure assert_code_is_non_scavengable(&assert_is_non_scavengable_closure, !CodeBlobToOopClosure::FixRelocations));
851 DEBUG_ONLY(CodeCache::asserted_non_scavengable_nmethods_do(&assert_code_is_non_scavengable));
852 }
853 }
854
855 void GenCollectedHeap::process_string_table_roots(StrongRootsScope* scope,
856 OopClosure* root_closure,
857 OopStorage::ParState<false, false>* par_state_string) {
858 assert(root_closure != NULL, "Must be set");
859 // All threads execute the following. A specific chunk of buckets
860 // from the StringTable are the individual tasks.
861
862 // Either we should be single threaded or have a ParState
863 assert((scope->n_threads() <= 1) || par_state_string != NULL, "Parallel but not ParState");
864
865 if (scope->n_threads() > 1 && par_state_string != NULL) {
866 StringTable::possibly_parallel_oops_do(par_state_string, root_closure);
867 } else {
868 StringTable::oops_do(root_closure);
869 }
870 }
871
872 void GenCollectedHeap::young_process_roots(StrongRootsScope* scope,
873 OopsInGenClosure* root_closure,
874 OopsInGenClosure* old_gen_closure,
875 CLDClosure* cld_closure,
876 OopStorage::ParState<false, false>* par_state_string) {
877 MarkingCodeBlobClosure mark_code_closure(root_closure, CodeBlobToOopClosure::FixRelocations);
878
879 process_roots(scope, SO_ScavengeCodeCache, root_closure, root_closure,
880 cld_closure, cld_closure, &mark_code_closure);
881
882 process_string_table_roots(scope, root_closure, par_state_string);
883
884 if (!_process_strong_tasks->is_task_claimed(GCH_PS_younger_gens)) {
885 root_closure->reset_generation();
886 }
887
888 // When collection is parallel, all threads get to cooperate to do
889 // old generation scanning.
890 old_gen_closure->set_generation(_old_gen);
891 rem_set()->younger_refs_iterate(_old_gen, old_gen_closure, scope->n_threads());
892 old_gen_closure->reset_generation();
893
894 _process_strong_tasks->all_tasks_completed(scope->n_threads());
895 }
896
897 void GenCollectedHeap::full_process_roots(StrongRootsScope* scope,
898 bool is_adjust_phase,
899 ScanningOption so,
900 bool only_strong_roots,
901 OopsInGenClosure* root_closure,
902 CLDClosure* cld_closure,
903 OopStorage::ParState<false, false>* par_state_string) {
904 MarkingCodeBlobClosure mark_code_closure(root_closure, is_adjust_phase);
905 OopsInGenClosure* weak_roots = only_strong_roots ? NULL : root_closure;
906 CLDClosure* weak_cld_closure = only_strong_roots ? NULL : cld_closure;
907
908 process_roots(scope, so, root_closure, weak_roots, cld_closure, weak_cld_closure, &mark_code_closure);
909 if (is_adjust_phase) {
910 // We never treat the string table as roots during marking
911 // for the full gc, so we only need to process it during
912 // the adjust phase.
913 process_string_table_roots(scope, root_closure, par_state_string);
914 }
915
916 _process_strong_tasks->all_tasks_completed(scope->n_threads());
917 }
918
919 void GenCollectedHeap::gen_process_weak_roots(OopClosure* root_closure) {
920 WeakProcessor::oops_do(root_closure);
921 _young_gen->ref_processor()->weak_oops_do(root_closure);
922 _old_gen->ref_processor()->weak_oops_do(root_closure);
923 }
924
925 bool GenCollectedHeap::no_allocs_since_save_marks() {
926 return _young_gen->no_allocs_since_save_marks() &&
927 _old_gen->no_allocs_since_save_marks();
928 }
929
930 bool GenCollectedHeap::supports_inline_contig_alloc() const {
931 return _young_gen->supports_inline_contig_alloc();
932 }
933
|