< prev index next >

src/hotspot/share/gc/z/zStat.cpp

Print this page




 744 
 745   if (_verbose) {
 746     LogTarget(Info, gc) log;
 747     log_end(log, duration, true /* thread */);
 748   } else {
 749     LogTarget(Debug, gc) log;
 750     log_end(log, duration, true /* thread */);
 751   }
 752 }
 753 
 754 //
 755 // Stat timer
 756 //
 757 THREAD_LOCAL uint32_t ZStatTimerDisable::_active = 0;
 758 
 759 //
 760 // Stat sample/inc
 761 //
 762 void ZStatSample(const ZStatSampler& sampler, uint64_t value) {
 763   ZStatSamplerData* const cpu_data = sampler.get();
 764   Atomic::add(1u, &cpu_data->_nsamples);
 765   Atomic::add(value, &cpu_data->_sum);
 766 
 767   uint64_t max = cpu_data->_max;
 768   for (;;) {
 769     if (max >= value) {
 770       // Not max
 771       break;
 772     }
 773 
 774     const uint64_t new_max = value;
 775     const uint64_t prev_max = Atomic::cmpxchg(new_max, &cpu_data->_max, max);
 776     if (prev_max == max) {
 777       // Success
 778       break;
 779     }
 780 
 781     // Retry
 782     max = prev_max;
 783   }
 784 
 785   ZTracer::tracer()->report_stat_sampler(sampler, value);
 786 }
 787 
 788 void ZStatInc(const ZStatCounter& counter, uint64_t increment) {
 789   ZStatCounterData* const cpu_data = counter.get();
 790   const uint64_t value = Atomic::add(increment, &cpu_data->_counter);
 791 
 792   ZTracer::tracer()->report_stat_counter(counter, increment, value);
 793 }
 794 
 795 void ZStatInc(const ZStatUnsampledCounter& counter, uint64_t increment) {
 796   ZStatCounterData* const cpu_data = counter.get();
 797   Atomic::add(increment, &cpu_data->_counter);
 798 }
 799 
 800 //
 801 // Stat allocation rate
 802 //
 803 const ZStatUnsampledCounter ZStatAllocRate::_counter("Allocation Rate");
 804 TruncatedSeq                ZStatAllocRate::_rate(ZStatAllocRate::sample_window_sec * ZStatAllocRate::sample_hz);
 805 TruncatedSeq                ZStatAllocRate::_rate_avg(ZStatAllocRate::sample_window_sec * ZStatAllocRate::sample_hz);
 806 
 807 const ZStatUnsampledCounter& ZStatAllocRate::counter() {
 808   return _counter;
 809 }
 810 
 811 uint64_t ZStatAllocRate::sample_and_reset() {
 812   const ZStatCounterData bytes_per_sample = _counter.collect_and_reset();
 813   const uint64_t bytes_per_second = bytes_per_sample._counter * sample_hz;
 814 
 815   _rate.add(bytes_per_second);
 816   _rate_avg.add(_rate.avg());
 817 




 744 
 745   if (_verbose) {
 746     LogTarget(Info, gc) log;
 747     log_end(log, duration, true /* thread */);
 748   } else {
 749     LogTarget(Debug, gc) log;
 750     log_end(log, duration, true /* thread */);
 751   }
 752 }
 753 
 754 //
 755 // Stat timer
 756 //
 757 THREAD_LOCAL uint32_t ZStatTimerDisable::_active = 0;
 758 
 759 //
 760 // Stat sample/inc
 761 //
 762 void ZStatSample(const ZStatSampler& sampler, uint64_t value) {
 763   ZStatSamplerData* const cpu_data = sampler.get();
 764   Atomic::add(&cpu_data->_nsamples, 1u);
 765   Atomic::add(&cpu_data->_sum, value);
 766 
 767   uint64_t max = cpu_data->_max;
 768   for (;;) {
 769     if (max >= value) {
 770       // Not max
 771       break;
 772     }
 773 
 774     const uint64_t new_max = value;
 775     const uint64_t prev_max = Atomic::cmpxchg(new_max, &cpu_data->_max, max);
 776     if (prev_max == max) {
 777       // Success
 778       break;
 779     }
 780 
 781     // Retry
 782     max = prev_max;
 783   }
 784 
 785   ZTracer::tracer()->report_stat_sampler(sampler, value);
 786 }
 787 
 788 void ZStatInc(const ZStatCounter& counter, uint64_t increment) {
 789   ZStatCounterData* const cpu_data = counter.get();
 790   const uint64_t value = Atomic::add(&cpu_data->_counter, increment);
 791 
 792   ZTracer::tracer()->report_stat_counter(counter, increment, value);
 793 }
 794 
 795 void ZStatInc(const ZStatUnsampledCounter& counter, uint64_t increment) {
 796   ZStatCounterData* const cpu_data = counter.get();
 797   Atomic::add(&cpu_data->_counter, increment);
 798 }
 799 
 800 //
 801 // Stat allocation rate
 802 //
 803 const ZStatUnsampledCounter ZStatAllocRate::_counter("Allocation Rate");
 804 TruncatedSeq                ZStatAllocRate::_rate(ZStatAllocRate::sample_window_sec * ZStatAllocRate::sample_hz);
 805 TruncatedSeq                ZStatAllocRate::_rate_avg(ZStatAllocRate::sample_window_sec * ZStatAllocRate::sample_hz);
 806 
 807 const ZStatUnsampledCounter& ZStatAllocRate::counter() {
 808   return _counter;
 809 }
 810 
 811 uint64_t ZStatAllocRate::sample_and_reset() {
 812   const ZStatCounterData bytes_per_sample = _counter.collect_and_reset();
 813   const uint64_t bytes_per_second = bytes_per_sample._counter * sample_hz;
 814 
 815   _rate.add(bytes_per_second);
 816   _rate_avg.add(_rate.avg());
 817 


< prev index next >