65 target_refills = 1;
66 }
67 size_t cur_plab_sz = used() / target_refills;
68 // Take historical weighted average
69 _filter.sample(cur_plab_sz);
70 // Clip from above and below, and align to object boundary
71 size_t plab_sz;
72 plab_sz = MAX2(min_size(), (size_t)_filter.average());
73 plab_sz = MIN2(max_size(), plab_sz);
74 plab_sz = align_object_size(plab_sz);
75 // Latch the result
76 _desired_net_plab_sz = plab_sz;
77 if (PrintPLAB) {
78 gclog_or_tty->print_cr(" (plab_sz = " SIZE_FORMAT " desired_plab_sz = " SIZE_FORMAT ") ", cur_plab_sz, plab_sz);
79 }
80 }
81 // Clear accumulators for next round.
82 reset();
83 }
84
85 void G1EvacStats::send_obj_copy_mem_event(const char* for_gen) {
86 EventGCG1EvacuationMemoryStatistics e;
87 if (e.should_commit()) {
88 e.set_gcId(GCId::peek().id());
89 e.set_gen(for_gen);
90 e.set_allocated(allocated() * HeapWordSize);
91 e.set_wasted(wasted() * HeapWordSize);
92 e.set_used(used() * HeapWordSize);
93 e.set_undo_waste(undo_wasted() * HeapWordSize);
94 e.set_region_end_waste(region_end_waste() * HeapWordSize);
95 e.set_regions_refilled(regions_refilled());
96 e.set_direct_allocated(direct_allocated() * HeapWordSize);
97 e.set_failure_used(failure_used() * HeapWordSize);
98 e.set_failure_waste(failure_waste() * HeapWordSize);
99 e.commit();
100 }
101 }
|
65 target_refills = 1;
66 }
67 size_t cur_plab_sz = used() / target_refills;
68 // Take historical weighted average
69 _filter.sample(cur_plab_sz);
70 // Clip from above and below, and align to object boundary
71 size_t plab_sz;
72 plab_sz = MAX2(min_size(), (size_t)_filter.average());
73 plab_sz = MIN2(max_size(), plab_sz);
74 plab_sz = align_object_size(plab_sz);
75 // Latch the result
76 _desired_net_plab_sz = plab_sz;
77 if (PrintPLAB) {
78 gclog_or_tty->print_cr(" (plab_sz = " SIZE_FORMAT " desired_plab_sz = " SIZE_FORMAT ") ", cur_plab_sz, plab_sz);
79 }
80 }
81 // Clear accumulators for next round.
82 reset();
83 }
84
|