< prev index next >

src/share/vm/gc/parallel/psParallelCompact.cpp

Print this page




1729 
1730   TimeStamp marking_start;
1731   TimeStamp compaction_start;
1732   TimeStamp collection_exit;
1733 
1734   GCCause::Cause gc_cause = heap->gc_cause();
1735   PSYoungGen* young_gen = heap->young_gen();
1736   PSOldGen* old_gen = heap->old_gen();
1737   PSAdaptiveSizePolicy* size_policy = heap->size_policy();
1738 
1739   // The scope of casr should end after code that can change
1740   // CollectorPolicy::_should_clear_all_soft_refs.
1741   ClearedAllSoftRefs casr(maximum_heap_compaction,
1742                           heap->collector_policy());
1743 
1744   if (ZapUnusedHeapArea) {
1745     // Save information needed to minimize mangling
1746     heap->record_gen_tops_before_GC();
1747   }
1748 
1749   heap->pre_full_gc_dump(&_gc_timer);
1750 
1751   // Make sure data structures are sane, make the heap parsable, and do other
1752   // miscellaneous bookkeeping.
1753   pre_compact();
1754 
1755   PreGCValues pre_gc_values(heap);
1756 
1757   // Get the compaction manager reserved for the VM thread.
1758   ParCompactionManager* const vmthread_cm =
1759     ParCompactionManager::manager_array(gc_task_manager()->workers());
1760 
1761   {
1762     ResourceMark rm;
1763     HandleMark hm;
1764 
1765     // Set the number of GC threads to be used in this collection
1766     gc_task_manager()->set_active_gang();
1767     gc_task_manager()->task_idle_workers();
1768 
1769     GCTraceCPUTime tcpu;
1770     GCTraceTime(Info, gc) tm("Pause Full", NULL, gc_cause, true);



1771     TraceCollectorStats tcs(counters());
1772     TraceMemoryManagerStats tms(true /* Full GC */,gc_cause);
1773 
1774     if (TraceOldGenTime) accumulated_time()->start();
1775 
1776     // Let the size policy know we're starting
1777     size_policy->major_collection_begin();
1778 
1779     CodeCache::gc_prologue();
1780 
1781 #if defined(COMPILER2) || INCLUDE_JVMCI
1782     DerivedPointerTable::clear();
1783 #endif
1784 
1785     ref_processor()->enable_discovery();
1786     ref_processor()->setup_policy(maximum_heap_compaction);
1787 
1788     bool marked_for_unloading = false;
1789 
1790     marking_start.update();


1885       counters->update_young_capacity(young_gen->capacity_in_bytes());
1886     }
1887 
1888     heap->resize_all_tlabs();
1889 
1890     // Resize the metaspace capacity after a collection
1891     MetaspaceGC::compute_new_size();
1892 
1893     if (TraceOldGenTime) {
1894       accumulated_time()->stop();
1895     }
1896 
1897     young_gen->print_used_change(pre_gc_values.young_gen_used());
1898     old_gen->print_used_change(pre_gc_values.old_gen_used());
1899     MetaspaceAux::print_metaspace_change(pre_gc_values.metadata_used());
1900 
1901     // Track memory usage and detect low memory
1902     MemoryService::track_memory_usage();
1903     heap->update_counters();
1904     gc_task_manager()->release_idle_workers();


1905   }
1906 
1907 #ifdef ASSERT
1908   for (size_t i = 0; i < ParallelGCThreads + 1; ++i) {
1909     ParCompactionManager* const cm =
1910       ParCompactionManager::manager_array(int(i));
1911     assert(cm->marking_stack()->is_empty(),       "should be empty");
1912     assert(ParCompactionManager::region_list(int(i))->is_empty(), "should be empty");
1913   }
1914 #endif // ASSERT
1915 
1916   if (VerifyAfterGC && heap->total_collections() >= VerifyGCStartAt) {
1917     HandleMark hm;  // Discard invalid handles created during verification
1918     Universe::verify("After GC");
1919   }
1920 
1921   // Re-verify object start arrays
1922   if (VerifyObjectStartArray &&
1923       VerifyAfterGC) {
1924     old_gen->verify_object_start_array();
1925   }
1926 
1927   if (ZapUnusedHeapArea) {
1928     old_gen->object_space()->check_mangled_unused_area_complete();
1929   }
1930 
1931   NOT_PRODUCT(ref_processor()->verify_no_references_recorded());
1932 
1933   collection_exit.update();
1934 
1935   heap->print_heap_after_gc();
1936   heap->trace_heap_after_gc(&_gc_tracer);
1937 
1938   log_debug(gc, task, time)("VM-Thread " JLONG_FORMAT " " JLONG_FORMAT " " JLONG_FORMAT,
1939                          marking_start.ticks(), compaction_start.ticks(),
1940                          collection_exit.ticks());
1941   gc_task_manager()->print_task_time_stamps();
1942 
1943   heap->post_full_gc_dump(&_gc_timer);
1944 
1945 #ifdef TRACESPINNING
1946   ParallelTaskTerminator::print_termination_counts();
1947 #endif
1948 
1949   AdaptiveSizePolicyOutput::print(size_policy, heap->total_collections());
1950 
1951   _gc_timer.register_gc_end();
1952 
1953   _gc_tracer.report_dense_prefix(dense_prefix(old_space_id));
1954   _gc_tracer.report_gc_end(_gc_timer.gc_end(), _gc_timer.time_partitions());
1955 
1956   return true;
1957 }
1958 
1959 bool PSParallelCompact::absorb_live_data_from_eden(PSAdaptiveSizePolicy* size_policy,
1960                                              PSYoungGen* young_gen,
1961                                              PSOldGen* old_gen) {
1962   MutableSpace* const eden_space = young_gen->eden_space();
1963   assert(!eden_space->is_empty(), "eden must be non-empty");




1729 
1730   TimeStamp marking_start;
1731   TimeStamp compaction_start;
1732   TimeStamp collection_exit;
1733 
1734   GCCause::Cause gc_cause = heap->gc_cause();
1735   PSYoungGen* young_gen = heap->young_gen();
1736   PSOldGen* old_gen = heap->old_gen();
1737   PSAdaptiveSizePolicy* size_policy = heap->size_policy();
1738 
1739   // The scope of casr should end after code that can change
1740   // CollectorPolicy::_should_clear_all_soft_refs.
1741   ClearedAllSoftRefs casr(maximum_heap_compaction,
1742                           heap->collector_policy());
1743 
1744   if (ZapUnusedHeapArea) {
1745     // Save information needed to minimize mangling
1746     heap->record_gen_tops_before_GC();
1747   }
1748 


1749   // Make sure data structures are sane, make the heap parsable, and do other
1750   // miscellaneous bookkeeping.
1751   pre_compact();
1752 
1753   PreGCValues pre_gc_values(heap);
1754 
1755   // Get the compaction manager reserved for the VM thread.
1756   ParCompactionManager* const vmthread_cm =
1757     ParCompactionManager::manager_array(gc_task_manager()->workers());
1758 
1759   {
1760     ResourceMark rm;
1761     HandleMark hm;
1762 
1763     // Set the number of GC threads to be used in this collection
1764     gc_task_manager()->set_active_gang();
1765     gc_task_manager()->task_idle_workers();
1766 
1767     GCTraceCPUTime tcpu;
1768     GCTraceTime(Info, gc) tm("Pause Full", NULL, gc_cause, true);
1769 
1770     heap->pre_full_gc_dump(&_gc_timer);
1771 
1772     TraceCollectorStats tcs(counters());
1773     TraceMemoryManagerStats tms(true /* Full GC */,gc_cause);
1774 
1775     if (TraceOldGenTime) accumulated_time()->start();
1776 
1777     // Let the size policy know we're starting
1778     size_policy->major_collection_begin();
1779 
1780     CodeCache::gc_prologue();
1781 
1782 #if defined(COMPILER2) || INCLUDE_JVMCI
1783     DerivedPointerTable::clear();
1784 #endif
1785 
1786     ref_processor()->enable_discovery();
1787     ref_processor()->setup_policy(maximum_heap_compaction);
1788 
1789     bool marked_for_unloading = false;
1790 
1791     marking_start.update();


1886       counters->update_young_capacity(young_gen->capacity_in_bytes());
1887     }
1888 
1889     heap->resize_all_tlabs();
1890 
1891     // Resize the metaspace capacity after a collection
1892     MetaspaceGC::compute_new_size();
1893 
1894     if (TraceOldGenTime) {
1895       accumulated_time()->stop();
1896     }
1897 
1898     young_gen->print_used_change(pre_gc_values.young_gen_used());
1899     old_gen->print_used_change(pre_gc_values.old_gen_used());
1900     MetaspaceAux::print_metaspace_change(pre_gc_values.metadata_used());
1901 
1902     // Track memory usage and detect low memory
1903     MemoryService::track_memory_usage();
1904     heap->update_counters();
1905     gc_task_manager()->release_idle_workers();
1906 
1907     heap->post_full_gc_dump(&_gc_timer);
1908   }
1909 
1910 #ifdef ASSERT
1911   for (size_t i = 0; i < ParallelGCThreads + 1; ++i) {
1912     ParCompactionManager* const cm =
1913       ParCompactionManager::manager_array(int(i));
1914     assert(cm->marking_stack()->is_empty(),       "should be empty");
1915     assert(ParCompactionManager::region_list(int(i))->is_empty(), "should be empty");
1916   }
1917 #endif // ASSERT
1918 
1919   if (VerifyAfterGC && heap->total_collections() >= VerifyGCStartAt) {
1920     HandleMark hm;  // Discard invalid handles created during verification
1921     Universe::verify("After GC");
1922   }
1923 
1924   // Re-verify object start arrays
1925   if (VerifyObjectStartArray &&
1926       VerifyAfterGC) {
1927     old_gen->verify_object_start_array();
1928   }
1929 
1930   if (ZapUnusedHeapArea) {
1931     old_gen->object_space()->check_mangled_unused_area_complete();
1932   }
1933 
1934   NOT_PRODUCT(ref_processor()->verify_no_references_recorded());
1935 
1936   collection_exit.update();
1937 
1938   heap->print_heap_after_gc();
1939   heap->trace_heap_after_gc(&_gc_tracer);
1940 
1941   log_debug(gc, task, time)("VM-Thread " JLONG_FORMAT " " JLONG_FORMAT " " JLONG_FORMAT,
1942                          marking_start.ticks(), compaction_start.ticks(),
1943                          collection_exit.ticks());
1944   gc_task_manager()->print_task_time_stamps();


1945 
1946 #ifdef TRACESPINNING
1947   ParallelTaskTerminator::print_termination_counts();
1948 #endif
1949 
1950   AdaptiveSizePolicyOutput::print(size_policy, heap->total_collections());
1951 
1952   _gc_timer.register_gc_end();
1953 
1954   _gc_tracer.report_dense_prefix(dense_prefix(old_space_id));
1955   _gc_tracer.report_gc_end(_gc_timer.gc_end(), _gc_timer.time_partitions());
1956 
1957   return true;
1958 }
1959 
1960 bool PSParallelCompact::absorb_live_data_from_eden(PSAdaptiveSizePolicy* size_policy,
1961                                              PSYoungGen* young_gen,
1962                                              PSOldGen* old_gen) {
1963   MutableSpace* const eden_space = young_gen->eden_space();
1964   assert(!eden_space->is_empty(), "eden must be non-empty");


< prev index next >