1858 for (int pr = 0; pr < last; ++pr) {
1859 _priority_buffer[pr] = _priority_buffer[pr + 1];
1860 }
1861 _priority_buffer[last] = retired_and_set;
1862 }
1863 };
1864
1865 class G1ParScanThreadState : public StackObj {
1866 protected:
1867 G1CollectedHeap* _g1h;
1868 RefToScanQueue* _refs;
1869 DirtyCardQueue _dcq;
1870 G1SATBCardTableModRefBS* _ct_bs;
1871 G1RemSet* _g1_rem;
1872
1873 G1ParGCAllocBufferContainer _surviving_alloc_buffer;
1874 G1ParGCAllocBufferContainer _tenured_alloc_buffer;
1875 G1ParGCAllocBufferContainer* _alloc_buffers[GCAllocPurposeCount];
1876 ageTable _age_table;
1877
1878 size_t _alloc_buffer_waste;
1879 size_t _undo_waste;
1880
1881 OopsInHeapRegionClosure* _evac_failure_cl;
1882 G1ParScanHeapEvacClosure* _evac_cl;
1883 G1ParScanPartialArrayClosure* _partial_scan_cl;
1884
1885 int _hash_seed;
1886 uint _queue_num;
1887
1888 size_t _term_attempts;
1889
1890 double _start;
1891 double _start_strong_roots;
1892 double _strong_roots_time;
1893 double _start_term;
1894 double _term_time;
1895
1896 // Map from young-age-index (0 == not young, 1 is youngest) to
1897 // surviving words. base is what we get back from the malloc call
1910
1911 template <class T> void immediate_rs_update(HeapRegion* from, T* p, int tid) {
1912 if (!from->is_survivor()) {
1913 _g1_rem->par_write_ref(from, p, tid);
1914 }
1915 }
1916
1917 template <class T> void deferred_rs_update(HeapRegion* from, T* p, int tid) {
1918 // If the new value of the field points to the same region or
1919 // is the to-space, we don't need to include it in the Rset updates.
1920 if (!from->is_in_reserved(oopDesc::load_decode_heap_oop(p)) && !from->is_survivor()) {
1921 size_t card_index = ctbs()->index_for(p);
1922 // If the card hasn't been added to the buffer, do it.
1923 if (ctbs()->mark_card_deferred(card_index)) {
1924 dirty_card_queue().enqueue((jbyte*)ctbs()->byte_for_index(card_index));
1925 }
1926 }
1927 }
1928
1929 public:
1930 G1ParScanThreadState(G1CollectedHeap* g1h, uint queue_num);
1931
1932 ~G1ParScanThreadState() {
1933 FREE_C_HEAP_ARRAY(size_t, _surviving_young_words_base, mtGC);
1934 }
1935
1936 RefToScanQueue* refs() { return _refs; }
1937 ageTable* age_table() { return &_age_table; }
1938
1939 G1ParGCAllocBufferContainer* alloc_buffer(GCAllocPurpose purpose) {
1940 return _alloc_buffers[purpose];
1941 }
1942
1943 size_t alloc_buffer_waste() const { return _alloc_buffer_waste; }
1944 size_t undo_waste() const { return _undo_waste; }
1945
1946 #ifdef ASSERT
1947 bool verify_ref(narrowOop* ref) const;
1948 bool verify_ref(oop* ref) const;
1949 bool verify_task(StarTask ref) const;
1950 #endif // ASSERT
2044 static void
2045 print_termination_stats_hdr(outputStream* const st = gclog_or_tty);
2046 void
2047 print_termination_stats(int i, outputStream* const st = gclog_or_tty) const;
2048
2049 size_t* surviving_young_words() {
2050 // We add on to hide entry 0 which accumulates surviving words for
2051 // age -1 regions (i.e. non-young ones)
2052 return _surviving_young_words;
2053 }
2054
2055 void retire_alloc_buffers() {
2056 for (int ap = 0; ap < GCAllocPurposeCount; ++ap) {
2057 size_t waste = _alloc_buffers[ap]->words_remaining();
2058 add_to_alloc_buffer_waste(waste);
2059 _alloc_buffers[ap]->flush_stats_and_retire(_g1h->stats_for_purpose((GCAllocPurpose)ap),
2060 true /* end_of_gc */,
2061 false /* retain */);
2062 }
2063 }
2064
2065 template <class T> void deal_with_reference(T* ref_to_scan) {
2066 if (has_partial_array_mask(ref_to_scan)) {
2067 _partial_scan_cl->do_oop_nv(ref_to_scan);
2068 } else {
2069 // Note: we can use "raw" versions of "region_containing" because
2070 // "obj_to_scan" is definitely in the heap, and is not in a
2071 // humongous region.
2072 HeapRegion* r = _g1h->heap_region_containing_raw(ref_to_scan);
2073 _evac_cl->set_region(r);
2074 _evac_cl->do_oop_nv(ref_to_scan);
2075 }
2076 }
2077
2078 void deal_with_reference(StarTask ref) {
2079 assert(verify_task(ref), "sanity");
2080 if (ref.is_narrow()) {
2081 deal_with_reference((narrowOop*)ref);
2082 } else {
2083 deal_with_reference((oop*)ref);
|
1858 for (int pr = 0; pr < last; ++pr) {
1859 _priority_buffer[pr] = _priority_buffer[pr + 1];
1860 }
1861 _priority_buffer[last] = retired_and_set;
1862 }
1863 };
1864
1865 class G1ParScanThreadState : public StackObj {
1866 protected:
1867 G1CollectedHeap* _g1h;
1868 RefToScanQueue* _refs;
1869 DirtyCardQueue _dcq;
1870 G1SATBCardTableModRefBS* _ct_bs;
1871 G1RemSet* _g1_rem;
1872
1873 G1ParGCAllocBufferContainer _surviving_alloc_buffer;
1874 G1ParGCAllocBufferContainer _tenured_alloc_buffer;
1875 G1ParGCAllocBufferContainer* _alloc_buffers[GCAllocPurposeCount];
1876 ageTable _age_table;
1877
1878 G1ParScanClosure _scanner;
1879
1880 size_t _alloc_buffer_waste;
1881 size_t _undo_waste;
1882
1883 OopsInHeapRegionClosure* _evac_failure_cl;
1884 G1ParScanHeapEvacClosure* _evac_cl;
1885 G1ParScanPartialArrayClosure* _partial_scan_cl;
1886
1887 int _hash_seed;
1888 uint _queue_num;
1889
1890 size_t _term_attempts;
1891
1892 double _start;
1893 double _start_strong_roots;
1894 double _strong_roots_time;
1895 double _start_term;
1896 double _term_time;
1897
1898 // Map from young-age-index (0 == not young, 1 is youngest) to
1899 // surviving words. base is what we get back from the malloc call
1912
1913 template <class T> void immediate_rs_update(HeapRegion* from, T* p, int tid) {
1914 if (!from->is_survivor()) {
1915 _g1_rem->par_write_ref(from, p, tid);
1916 }
1917 }
1918
1919 template <class T> void deferred_rs_update(HeapRegion* from, T* p, int tid) {
1920 // If the new value of the field points to the same region or
1921 // is the to-space, we don't need to include it in the Rset updates.
1922 if (!from->is_in_reserved(oopDesc::load_decode_heap_oop(p)) && !from->is_survivor()) {
1923 size_t card_index = ctbs()->index_for(p);
1924 // If the card hasn't been added to the buffer, do it.
1925 if (ctbs()->mark_card_deferred(card_index)) {
1926 dirty_card_queue().enqueue((jbyte*)ctbs()->byte_for_index(card_index));
1927 }
1928 }
1929 }
1930
1931 public:
1932 G1ParScanThreadState(G1CollectedHeap* g1h, uint queue_num, ReferenceProcessor* rp);
1933
1934 ~G1ParScanThreadState() {
1935 FREE_C_HEAP_ARRAY(size_t, _surviving_young_words_base, mtGC);
1936 }
1937
1938 RefToScanQueue* refs() { return _refs; }
1939 ageTable* age_table() { return &_age_table; }
1940
1941 G1ParGCAllocBufferContainer* alloc_buffer(GCAllocPurpose purpose) {
1942 return _alloc_buffers[purpose];
1943 }
1944
1945 size_t alloc_buffer_waste() const { return _alloc_buffer_waste; }
1946 size_t undo_waste() const { return _undo_waste; }
1947
1948 #ifdef ASSERT
1949 bool verify_ref(narrowOop* ref) const;
1950 bool verify_ref(oop* ref) const;
1951 bool verify_task(StarTask ref) const;
1952 #endif // ASSERT
2046 static void
2047 print_termination_stats_hdr(outputStream* const st = gclog_or_tty);
2048 void
2049 print_termination_stats(int i, outputStream* const st = gclog_or_tty) const;
2050
2051 size_t* surviving_young_words() {
2052 // We add on to hide entry 0 which accumulates surviving words for
2053 // age -1 regions (i.e. non-young ones)
2054 return _surviving_young_words;
2055 }
2056
2057 void retire_alloc_buffers() {
2058 for (int ap = 0; ap < GCAllocPurposeCount; ++ap) {
2059 size_t waste = _alloc_buffers[ap]->words_remaining();
2060 add_to_alloc_buffer_waste(waste);
2061 _alloc_buffers[ap]->flush_stats_and_retire(_g1h->stats_for_purpose((GCAllocPurpose)ap),
2062 true /* end_of_gc */,
2063 false /* retain */);
2064 }
2065 }
2066
2067 oop copy_to_survivor_space(oop const obj);
2068
2069 template <class T> void deal_with_reference(T* ref_to_scan) {
2070 if (has_partial_array_mask(ref_to_scan)) {
2071 _partial_scan_cl->do_oop_nv(ref_to_scan);
2072 } else {
2073 // Note: we can use "raw" versions of "region_containing" because
2074 // "obj_to_scan" is definitely in the heap, and is not in a
2075 // humongous region.
2076 HeapRegion* r = _g1h->heap_region_containing_raw(ref_to_scan);
2077 _evac_cl->set_region(r);
2078 _evac_cl->do_oop_nv(ref_to_scan);
2079 }
2080 }
2081
2082 void deal_with_reference(StarTask ref) {
2083 assert(verify_task(ref), "sanity");
2084 if (ref.is_narrow()) {
2085 deal_with_reference((narrowOop*)ref);
2086 } else {
2087 deal_with_reference((oop*)ref);
|