< prev index next >

src/hotspot/share/gc/z/zStat.cpp

Print this page




 407 }
 408 
 409 //
 410 // Stat sampler
 411 //
 412 ZStatSampler::ZStatSampler(const char* group, const char* name, ZStatUnitPrinter printer) :
 413     ZStatIterableValue<ZStatSampler>(group, name, sizeof(ZStatSamplerData)),
 414     _printer(printer) {}
 415 
 416 ZStatSamplerData* ZStatSampler::get() const {
 417   return get_cpu_local<ZStatSamplerData>(ZCPU::id());
 418 }
 419 
 420 ZStatSamplerData ZStatSampler::collect_and_reset() const {
 421   ZStatSamplerData all;
 422 
 423   const uint32_t ncpus = ZCPU::count();
 424   for (uint32_t i = 0; i < ncpus; i++) {
 425     ZStatSamplerData* const cpu_data = get_cpu_local<ZStatSamplerData>(i);
 426     if (cpu_data->_nsamples > 0) {
 427       const uint64_t nsamples = Atomic::xchg((uint64_t)0, &cpu_data->_nsamples);
 428       const uint64_t sum = Atomic::xchg((uint64_t)0, &cpu_data->_sum);
 429       const uint64_t max = Atomic::xchg((uint64_t)0, &cpu_data->_max);
 430       all._nsamples += nsamples;
 431       all._sum += sum;
 432       if (all._max < max) {
 433         all._max = max;
 434       }
 435     }
 436   }
 437 
 438   return all;
 439 }
 440 
 441 ZStatUnitPrinter ZStatSampler::printer() const {
 442   return _printer;
 443 }
 444 
 445 //
 446 // Stat counter
 447 //
 448 ZStatCounter::ZStatCounter(const char* group, const char* name, ZStatUnitPrinter printer) :
 449     ZStatIterableValue<ZStatCounter>(group, name, sizeof(ZStatCounterData)),
 450     _sampler(group, name, printer) {}
 451 
 452 ZStatCounterData* ZStatCounter::get() const {
 453   return get_cpu_local<ZStatCounterData>(ZCPU::id());
 454 }
 455 
 456 void ZStatCounter::sample_and_reset() const {
 457   uint64_t counter = 0;
 458 
 459   const uint32_t ncpus = ZCPU::count();
 460   for (uint32_t i = 0; i < ncpus; i++) {
 461     ZStatCounterData* const cpu_data = get_cpu_local<ZStatCounterData>(i);
 462     counter += Atomic::xchg((uint64_t)0, &cpu_data->_counter);
 463   }
 464 
 465   ZStatSample(_sampler, counter);
 466 }
 467 
 468 //
 469 // Stat unsampled counter
 470 //
 471 ZStatUnsampledCounter::ZStatUnsampledCounter(const char* name) :
 472     ZStatIterableValue<ZStatUnsampledCounter>("Unsampled", name, sizeof(ZStatCounterData)) {}
 473 
 474 ZStatCounterData* ZStatUnsampledCounter::get() const {
 475   return get_cpu_local<ZStatCounterData>(ZCPU::id());
 476 }
 477 
 478 ZStatCounterData ZStatUnsampledCounter::collect_and_reset() const {
 479   ZStatCounterData all;
 480 
 481   const uint32_t ncpus = ZCPU::count();
 482   for (uint32_t i = 0; i < ncpus; i++) {
 483     ZStatCounterData* const cpu_data = get_cpu_local<ZStatCounterData>(i);
 484     all._counter += Atomic::xchg((uint64_t)0, &cpu_data->_counter);
 485   }
 486 
 487   return all;
 488 }
 489 
 490 //
 491 // Stat MMU (Minimum Mutator Utilization)
 492 //
 493 ZStatMMUPause::ZStatMMUPause() :
 494     _start(0.0),
 495     _end(0.0) {}
 496 
 497 ZStatMMUPause::ZStatMMUPause(const Ticks& start, const Ticks& end) :
 498     _start(TimeHelper::counter_to_millis(start.value())),
 499     _end(TimeHelper::counter_to_millis(end.value())) {}
 500 
 501 double ZStatMMUPause::end() const {
 502   return _end;
 503 }
 504 


 744 
 745   if (_verbose) {
 746     LogTarget(Info, gc) log;
 747     log_end(log, duration, true /* thread */);
 748   } else {
 749     LogTarget(Debug, gc) log;
 750     log_end(log, duration, true /* thread */);
 751   }
 752 }
 753 
 754 //
 755 // Stat timer
 756 //
 757 THREAD_LOCAL uint32_t ZStatTimerDisable::_active = 0;
 758 
 759 //
 760 // Stat sample/inc
 761 //
 762 void ZStatSample(const ZStatSampler& sampler, uint64_t value) {
 763   ZStatSamplerData* const cpu_data = sampler.get();
 764   Atomic::add(1u, &cpu_data->_nsamples);
 765   Atomic::add(value, &cpu_data->_sum);
 766 
 767   uint64_t max = cpu_data->_max;
 768   for (;;) {
 769     if (max >= value) {
 770       // Not max
 771       break;
 772     }
 773 
 774     const uint64_t new_max = value;
 775     const uint64_t prev_max = Atomic::cmpxchg(new_max, &cpu_data->_max, max);
 776     if (prev_max == max) {
 777       // Success
 778       break;
 779     }
 780 
 781     // Retry
 782     max = prev_max;
 783   }
 784 
 785   ZTracer::tracer()->report_stat_sampler(sampler, value);
 786 }
 787 
 788 void ZStatInc(const ZStatCounter& counter, uint64_t increment) {
 789   ZStatCounterData* const cpu_data = counter.get();
 790   const uint64_t value = Atomic::add(increment, &cpu_data->_counter);
 791 
 792   ZTracer::tracer()->report_stat_counter(counter, increment, value);
 793 }
 794 
 795 void ZStatInc(const ZStatUnsampledCounter& counter, uint64_t increment) {
 796   ZStatCounterData* const cpu_data = counter.get();
 797   Atomic::add(increment, &cpu_data->_counter);
 798 }
 799 
 800 //
 801 // Stat allocation rate
 802 //
 803 const ZStatUnsampledCounter ZStatAllocRate::_counter("Allocation Rate");
 804 TruncatedSeq                ZStatAllocRate::_rate(ZStatAllocRate::sample_window_sec * ZStatAllocRate::sample_hz);
 805 TruncatedSeq                ZStatAllocRate::_rate_avg(ZStatAllocRate::sample_window_sec * ZStatAllocRate::sample_hz);
 806 
 807 const ZStatUnsampledCounter& ZStatAllocRate::counter() {
 808   return _counter;
 809 }
 810 
 811 uint64_t ZStatAllocRate::sample_and_reset() {
 812   const ZStatCounterData bytes_per_sample = _counter.collect_and_reset();
 813   const uint64_t bytes_per_second = bytes_per_sample._counter * sample_hz;
 814 
 815   _rate.add(bytes_per_second);
 816   _rate_avg.add(_rate.avg());
 817 




 407 }
 408 
 409 //
 410 // Stat sampler
 411 //
 412 ZStatSampler::ZStatSampler(const char* group, const char* name, ZStatUnitPrinter printer) :
 413     ZStatIterableValue<ZStatSampler>(group, name, sizeof(ZStatSamplerData)),
 414     _printer(printer) {}
 415 
 416 ZStatSamplerData* ZStatSampler::get() const {
 417   return get_cpu_local<ZStatSamplerData>(ZCPU::id());
 418 }
 419 
 420 ZStatSamplerData ZStatSampler::collect_and_reset() const {
 421   ZStatSamplerData all;
 422 
 423   const uint32_t ncpus = ZCPU::count();
 424   for (uint32_t i = 0; i < ncpus; i++) {
 425     ZStatSamplerData* const cpu_data = get_cpu_local<ZStatSamplerData>(i);
 426     if (cpu_data->_nsamples > 0) {
 427       const uint64_t nsamples = Atomic::xchg(&cpu_data->_nsamples, (uint64_t)0);
 428       const uint64_t sum = Atomic::xchg(&cpu_data->_sum, (uint64_t)0);
 429       const uint64_t max = Atomic::xchg(&cpu_data->_max, (uint64_t)0);
 430       all._nsamples += nsamples;
 431       all._sum += sum;
 432       if (all._max < max) {
 433         all._max = max;
 434       }
 435     }
 436   }
 437 
 438   return all;
 439 }
 440 
 441 ZStatUnitPrinter ZStatSampler::printer() const {
 442   return _printer;
 443 }
 444 
 445 //
 446 // Stat counter
 447 //
 448 ZStatCounter::ZStatCounter(const char* group, const char* name, ZStatUnitPrinter printer) :
 449     ZStatIterableValue<ZStatCounter>(group, name, sizeof(ZStatCounterData)),
 450     _sampler(group, name, printer) {}
 451 
 452 ZStatCounterData* ZStatCounter::get() const {
 453   return get_cpu_local<ZStatCounterData>(ZCPU::id());
 454 }
 455 
 456 void ZStatCounter::sample_and_reset() const {
 457   uint64_t counter = 0;
 458 
 459   const uint32_t ncpus = ZCPU::count();
 460   for (uint32_t i = 0; i < ncpus; i++) {
 461     ZStatCounterData* const cpu_data = get_cpu_local<ZStatCounterData>(i);
 462     counter += Atomic::xchg(&cpu_data->_counter, (uint64_t)0);
 463   }
 464 
 465   ZStatSample(_sampler, counter);
 466 }
 467 
 468 //
 469 // Stat unsampled counter
 470 //
 471 ZStatUnsampledCounter::ZStatUnsampledCounter(const char* name) :
 472     ZStatIterableValue<ZStatUnsampledCounter>("Unsampled", name, sizeof(ZStatCounterData)) {}
 473 
 474 ZStatCounterData* ZStatUnsampledCounter::get() const {
 475   return get_cpu_local<ZStatCounterData>(ZCPU::id());
 476 }
 477 
 478 ZStatCounterData ZStatUnsampledCounter::collect_and_reset() const {
 479   ZStatCounterData all;
 480 
 481   const uint32_t ncpus = ZCPU::count();
 482   for (uint32_t i = 0; i < ncpus; i++) {
 483     ZStatCounterData* const cpu_data = get_cpu_local<ZStatCounterData>(i);
 484     all._counter += Atomic::xchg(&cpu_data->_counter, (uint64_t)0);
 485   }
 486 
 487   return all;
 488 }
 489 
 490 //
 491 // Stat MMU (Minimum Mutator Utilization)
 492 //
 493 ZStatMMUPause::ZStatMMUPause() :
 494     _start(0.0),
 495     _end(0.0) {}
 496 
 497 ZStatMMUPause::ZStatMMUPause(const Ticks& start, const Ticks& end) :
 498     _start(TimeHelper::counter_to_millis(start.value())),
 499     _end(TimeHelper::counter_to_millis(end.value())) {}
 500 
 501 double ZStatMMUPause::end() const {
 502   return _end;
 503 }
 504 


 744 
 745   if (_verbose) {
 746     LogTarget(Info, gc) log;
 747     log_end(log, duration, true /* thread */);
 748   } else {
 749     LogTarget(Debug, gc) log;
 750     log_end(log, duration, true /* thread */);
 751   }
 752 }
 753 
 754 //
 755 // Stat timer
 756 //
 757 THREAD_LOCAL uint32_t ZStatTimerDisable::_active = 0;
 758 
 759 //
 760 // Stat sample/inc
 761 //
 762 void ZStatSample(const ZStatSampler& sampler, uint64_t value) {
 763   ZStatSamplerData* const cpu_data = sampler.get();
 764   Atomic::add(&cpu_data->_nsamples, 1u);
 765   Atomic::add(&cpu_data->_sum, value);
 766 
 767   uint64_t max = cpu_data->_max;
 768   for (;;) {
 769     if (max >= value) {
 770       // Not max
 771       break;
 772     }
 773 
 774     const uint64_t new_max = value;
 775     const uint64_t prev_max = Atomic::cmpxchg(&cpu_data->_max, max, new_max);
 776     if (prev_max == max) {
 777       // Success
 778       break;
 779     }
 780 
 781     // Retry
 782     max = prev_max;
 783   }
 784 
 785   ZTracer::tracer()->report_stat_sampler(sampler, value);
 786 }
 787 
 788 void ZStatInc(const ZStatCounter& counter, uint64_t increment) {
 789   ZStatCounterData* const cpu_data = counter.get();
 790   const uint64_t value = Atomic::add(&cpu_data->_counter, increment);
 791 
 792   ZTracer::tracer()->report_stat_counter(counter, increment, value);
 793 }
 794 
 795 void ZStatInc(const ZStatUnsampledCounter& counter, uint64_t increment) {
 796   ZStatCounterData* const cpu_data = counter.get();
 797   Atomic::add(&cpu_data->_counter, increment);
 798 }
 799 
 800 //
 801 // Stat allocation rate
 802 //
 803 const ZStatUnsampledCounter ZStatAllocRate::_counter("Allocation Rate");
 804 TruncatedSeq                ZStatAllocRate::_rate(ZStatAllocRate::sample_window_sec * ZStatAllocRate::sample_hz);
 805 TruncatedSeq                ZStatAllocRate::_rate_avg(ZStatAllocRate::sample_window_sec * ZStatAllocRate::sample_hz);
 806 
 807 const ZStatUnsampledCounter& ZStatAllocRate::counter() {
 808   return _counter;
 809 }
 810 
 811 uint64_t ZStatAllocRate::sample_and_reset() {
 812   const ZStatCounterData bytes_per_sample = _counter.collect_and_reset();
 813   const uint64_t bytes_per_second = bytes_per_sample._counter * sample_hz;
 814 
 815   _rate.add(bytes_per_second);
 816   _rate_avg.add(_rate.avg());
 817 


< prev index next >