src/share/vm/gc_implementation/parallelScavenge/psParallelCompact.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hs-gc-g1-mmap Sdiff src/share/vm/gc_implementation/parallelScavenge

src/share/vm/gc_implementation/parallelScavenge/psParallelCompact.cpp

Print this page




 931   SplitInfo& split_info = _space_info[id].split_info();
 932   if (split_info.is_valid()) {
 933     split_info.clear();
 934   }
 935   DEBUG_ONLY(split_info.verify_clear();)
 936 }
 937 
 938 void PSParallelCompact::pre_compact(PreGCValues* pre_gc_values)
 939 {
 940   // Update the from & to space pointers in space_info, since they are swapped
 941   // at each young gen gc.  Do the update unconditionally (even though a
 942   // promotion failure does not swap spaces) because an unknown number of minor
 943   // collections will have swapped the spaces an unknown number of times.
 944   TraceTime tm("pre compact", print_phases(), true, gclog_or_tty);
 945   ParallelScavengeHeap* heap = gc_heap();
 946   _space_info[from_space_id].set_space(heap->young_gen()->from_space());
 947   _space_info[to_space_id].set_space(heap->young_gen()->to_space());
 948 
 949   pre_gc_values->fill(heap);
 950 
 951   NOT_PRODUCT(_mark_bitmap.reset_counters());
 952   DEBUG_ONLY(add_obj_count = add_obj_size = 0;)
 953   DEBUG_ONLY(mark_bitmap_count = mark_bitmap_size = 0;)
 954 
 955   // Increment the invocation count
 956   heap->increment_total_collections(true);
 957 
 958   // We need to track unique mark sweep invocations as well.
 959   _total_invocations++;
 960 
 961   heap->print_heap_before_gc();
 962 
 963   // Fill in TLABs
 964   heap->accumulate_statistics_all_tlabs();
 965   heap->ensure_parsability(true);  // retire TLABs
 966 
 967   if (VerifyBeforeGC && heap->total_collections() >= VerifyGCStartAt) {
 968     HandleMark hm;  // Discard invalid handles created during verification
 969     Universe::verify(" VerifyBeforeGC:");
 970   }
 971 


2025     TraceMemoryManagerStats tms(true /* Full GC */,gc_cause);
2026 
2027     if (TraceGen1Time) accumulated_time()->start();
2028 
2029     // Let the size policy know we're starting
2030     size_policy->major_collection_begin();
2031 
2032     CodeCache::gc_prologue();
2033     Threads::gc_prologue();
2034 
2035     COMPILER2_PRESENT(DerivedPointerTable::clear());
2036 
2037     ref_processor()->enable_discovery(true /*verify_disabled*/, true /*verify_no_refs*/);
2038     ref_processor()->setup_policy(maximum_heap_compaction);
2039 
2040     bool marked_for_unloading = false;
2041 
2042     marking_start.update();
2043     marking_phase(vmthread_cm, maximum_heap_compaction);
2044 
2045 #ifndef PRODUCT
2046     if (TraceParallelOldGCMarkingPhase) {
2047       gclog_or_tty->print_cr("marking_phase: cas_tries %d  cas_retries %d "
2048         "cas_by_another %d",
2049         mark_bitmap()->cas_tries(), mark_bitmap()->cas_retries(),
2050         mark_bitmap()->cas_by_another());
2051     }
2052 #endif  // #ifndef PRODUCT
2053 
2054     bool max_on_system_gc = UseMaximumCompactionOnSystemGC
2055       && gc_cause == GCCause::_java_lang_system_gc;
2056     summary_phase(vmthread_cm, maximum_heap_compaction || max_on_system_gc);
2057 
2058     COMPILER2_PRESENT(assert(DerivedPointerTable::is_active(), "Sanity"));
2059     COMPILER2_PRESENT(DerivedPointerTable::set_active(false));
2060 
2061     // adjust_roots() updates Universe::_intArrayKlassObj which is
2062     // needed by the compaction for filling holes in the dense prefix.
2063     adjust_roots();
2064 
2065     compaction_start.update();
2066     compact();
2067 
2068     // Reset the mark bitmap, summary data, and do other bookkeeping.  Must be
2069     // done before resizing.
2070     post_compact();
2071 
2072     // Let the size policy know we're done
2073     size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);




 931   SplitInfo& split_info = _space_info[id].split_info();
 932   if (split_info.is_valid()) {
 933     split_info.clear();
 934   }
 935   DEBUG_ONLY(split_info.verify_clear();)
 936 }
 937 
 938 void PSParallelCompact::pre_compact(PreGCValues* pre_gc_values)
 939 {
 940   // Update the from & to space pointers in space_info, since they are swapped
 941   // at each young gen gc.  Do the update unconditionally (even though a
 942   // promotion failure does not swap spaces) because an unknown number of minor
 943   // collections will have swapped the spaces an unknown number of times.
 944   TraceTime tm("pre compact", print_phases(), true, gclog_or_tty);
 945   ParallelScavengeHeap* heap = gc_heap();
 946   _space_info[from_space_id].set_space(heap->young_gen()->from_space());
 947   _space_info[to_space_id].set_space(heap->young_gen()->to_space());
 948 
 949   pre_gc_values->fill(heap);
 950 

 951   DEBUG_ONLY(add_obj_count = add_obj_size = 0;)
 952   DEBUG_ONLY(mark_bitmap_count = mark_bitmap_size = 0;)
 953 
 954   // Increment the invocation count
 955   heap->increment_total_collections(true);
 956 
 957   // We need to track unique mark sweep invocations as well.
 958   _total_invocations++;
 959 
 960   heap->print_heap_before_gc();
 961 
 962   // Fill in TLABs
 963   heap->accumulate_statistics_all_tlabs();
 964   heap->ensure_parsability(true);  // retire TLABs
 965 
 966   if (VerifyBeforeGC && heap->total_collections() >= VerifyGCStartAt) {
 967     HandleMark hm;  // Discard invalid handles created during verification
 968     Universe::verify(" VerifyBeforeGC:");
 969   }
 970 


2024     TraceMemoryManagerStats tms(true /* Full GC */,gc_cause);
2025 
2026     if (TraceGen1Time) accumulated_time()->start();
2027 
2028     // Let the size policy know we're starting
2029     size_policy->major_collection_begin();
2030 
2031     CodeCache::gc_prologue();
2032     Threads::gc_prologue();
2033 
2034     COMPILER2_PRESENT(DerivedPointerTable::clear());
2035 
2036     ref_processor()->enable_discovery(true /*verify_disabled*/, true /*verify_no_refs*/);
2037     ref_processor()->setup_policy(maximum_heap_compaction);
2038 
2039     bool marked_for_unloading = false;
2040 
2041     marking_start.update();
2042     marking_phase(vmthread_cm, maximum_heap_compaction);
2043 









2044     bool max_on_system_gc = UseMaximumCompactionOnSystemGC
2045       && gc_cause == GCCause::_java_lang_system_gc;
2046     summary_phase(vmthread_cm, maximum_heap_compaction || max_on_system_gc);
2047 
2048     COMPILER2_PRESENT(assert(DerivedPointerTable::is_active(), "Sanity"));
2049     COMPILER2_PRESENT(DerivedPointerTable::set_active(false));
2050 
2051     // adjust_roots() updates Universe::_intArrayKlassObj which is
2052     // needed by the compaction for filling holes in the dense prefix.
2053     adjust_roots();
2054 
2055     compaction_start.update();
2056     compact();
2057 
2058     // Reset the mark bitmap, summary data, and do other bookkeeping.  Must be
2059     // done before resizing.
2060     post_compact();
2061 
2062     // Let the size policy know we're done
2063     size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);


src/share/vm/gc_implementation/parallelScavenge/psParallelCompact.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File