rev 59956 : 8247819: G1: Process strong OopStorage entries in parallel
Reviewed-by:
Contributed-by: Erik Osterlund <erik.osterlund@oracle.com>, Stefan Karlsson <stefan.karlsson@oracle.com>, Thomas Schatzl <thomas.schatzl@oracle.com>

   1 /*
   2  * Copyright (c) 2015, 2019, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #ifndef SHARE_GC_SHARED_WORKERDATAARRAY_INLINE_HPP
  26 #define SHARE_GC_SHARED_WORKERDATAARRAY_INLINE_HPP
  27 
  28 #include "gc/shared/workerDataArray.hpp"
  29 #include "memory/allocation.inline.hpp"
  30 #include "utilities/ostream.hpp"
  31 
  32 template <typename T>
  33 WorkerDataArray<T>::WorkerDataArray(const char* short_name, const char* title, uint length, bool is_serial) :
  34  _data(NULL),
  35  _length(length),
  36  _short_name(short_name),
  37  _title(title),
  38  _is_serial(is_serial) {
  39   assert(length > 0, "Must have some workers to store data for");
  40   assert(!is_serial || length == 1, "Serial phase must only have a single entry.");
  41   _data = NEW_C_HEAP_ARRAY(T, _length, mtGC);
  42   for (uint i = 0; i < MaxThreadWorkItems; i++) {
  43     _thread_work_items[i] = NULL;
  44   }
  45   reset();
  46 }
  47 
  48 template <typename T>
  49 void WorkerDataArray<T>::set(uint worker_i, T value) {
  50   assert(worker_i < _length, "Worker %d is greater than max: %d", worker_i, _length);
  51   assert(_data[worker_i] == uninitialized(), "Overwriting data for worker %d in %s", worker_i, _title);
  52   _data[worker_i] = value;
  53 }
  54 
  55 template <typename T>
  56 T WorkerDataArray<T>::get(uint worker_i) const {
  57   assert(worker_i < _length, "Worker %d is greater than max: %d", worker_i, _length);
  58   return _data[worker_i];
  59 }
  60 
  61 template <typename T>
  62 WorkerDataArray<T>::~WorkerDataArray() {
  63   for (uint i = 0; i < MaxThreadWorkItems; i++) {
  64     delete _thread_work_items[i];
  65   }
  66   FREE_C_HEAP_ARRAY(T, _data);
  67 }
  68 
  69 template <typename T>
  70 void WorkerDataArray<T>::create_thread_work_items(const char* title, uint index, uint length_override) {
  71   assert(index < MaxThreadWorkItems, "Tried to access thread work item %u (max %u)", index, MaxThreadWorkItems);
  72   assert(_thread_work_items[index] == NULL, "Tried to overwrite existing thread work item");
  73   uint length = length_override != 0 ? length_override : _length;
  74   _thread_work_items[index] = new WorkerDataArray<size_t>(NULL, title, length);
  75 }
  76 
  77 template <typename T>
  78 void WorkerDataArray<T>::set_thread_work_item(uint worker_i, size_t value, uint index) {
  79   assert(index < MaxThreadWorkItems, "Tried to access thread work item %u (max %u)", index, MaxThreadWorkItems);
  80   assert(_thread_work_items[index] != NULL, "No sub count");
  81   _thread_work_items[index]->set(worker_i, value);
  82 }
  83 
  84 template <typename T>
  85 void WorkerDataArray<T>::add_thread_work_item(uint worker_i, size_t value, uint index) {
  86   assert(index < MaxThreadWorkItems, "Tried to access thread work item %u (max %u)", index, MaxThreadWorkItems);
  87   assert(_thread_work_items[index] != NULL, "No sub count");
  88   _thread_work_items[index]->add(worker_i, value);
  89 }
  90 
  91 template <typename T>
  92 void WorkerDataArray<T>::set_or_add_thread_work_item(uint worker_i, size_t value, uint index) {
  93   assert(index < MaxThreadWorkItems, "Tried to access thread work item %u (max %u)", index, MaxThreadWorkItems);
  94   assert(_thread_work_items[index] != NULL, "No sub count");
  95   if (_thread_work_items[index]->get(worker_i) == _thread_work_items[index]->uninitialized()) {
  96     _thread_work_items[index]->set(worker_i, value);
  97   } else {
  98     _thread_work_items[index]->add(worker_i, value);
  99   }
 100 }
 101 
 102 template <typename T>
 103 size_t WorkerDataArray<T>::get_thread_work_item(uint worker_i, uint index) {
 104   assert(index < MaxThreadWorkItems, "Tried to access thread work item %u (max %u)", index, MaxThreadWorkItems);
 105   assert(_thread_work_items[index] != NULL, "No sub count");
 106   return _thread_work_items[index]->get(worker_i);
 107 }
 108 
 109 template <typename T>
 110 void WorkerDataArray<T>::add(uint worker_i, T value) {
 111   assert(worker_i < _length, "Worker %d is greater than max: %d", worker_i, _length);
 112   assert(_data[worker_i] != uninitialized(), "No data to add to %s for worker %d", _title, worker_i);
 113   _data[worker_i] += value;
 114 }
 115 
 116 template <typename T>
 117 double WorkerDataArray<T>::average() const {
 118   uint contributing_threads = 0;
 119   for (uint i = 0; i < _length; ++i) {
 120     if (get(i) != uninitialized()) {
 121       contributing_threads++;
 122     }
 123   }
 124   if (contributing_threads == 0) {
 125     return 0.0;
 126   }
 127   return sum() / (double) contributing_threads;
 128 }
 129 
 130 template <typename T>
 131 T WorkerDataArray<T>::sum() const {
 132   T s = 0;
 133   for (uint i = 0; i < _length; ++i) {
 134     if (get(i) != uninitialized()) {
 135       s += get(i);
 136     }
 137   }
 138   return s;
 139 }
 140 
 141 template <typename T>
 142 void WorkerDataArray<T>::set_all(T value) {
 143   for (uint i = 0; i < _length; i++) {
 144     _data[i] = value;
 145   }
 146 }
 147 
 148 template <class T>
 149 void WorkerDataArray<T>::print_summary_on(outputStream* out, bool print_sum) const {
 150   if (_is_serial) {
 151     out->print("%s:", title());
 152   } else {
 153     out->print("%-25s", title());
 154   }
 155 
 156   uint start = 0;
 157   while (start < _length && get(start) == uninitialized()) {
 158     start++;
 159   }
 160   if (start < _length) {
 161     if (_is_serial) {
 162       WDAPrinter::summary(out, get(0));
 163     } else {
 164       T min = get(start);
 165       T max = min;
 166       T sum = 0;
 167       uint contributing_threads = 0;
 168       for (uint i = start; i < _length; ++i) {
 169         T value = get(i);
 170         if (value != uninitialized()) {
 171           max = MAX2(max, value);
 172           min = MIN2(min, value);
 173           sum += value;
 174           contributing_threads++;
 175         }
 176       }
 177       T diff = max - min;
 178       assert(contributing_threads != 0, "Must be since we found a used value for the start index");
 179       double avg = sum / (double) contributing_threads;
 180       WDAPrinter::summary(out, min, avg, max, diff, sum, print_sum);
 181       out->print_cr(", Workers: %d", contributing_threads);
 182     }
 183   } else {
 184     // No data for this phase.
 185     out->print_cr(" skipped");
 186   }
 187 }
 188 
 189 template <class T>
 190 void WorkerDataArray<T>::print_details_on(outputStream* out) const {
 191   WDAPrinter::details(this, out);
 192 }
 193 
 194 template <typename T>
 195 void WorkerDataArray<T>::reset() {
 196   set_all(uninitialized());
 197   for (uint i = 0; i < MaxThreadWorkItems; i++) {
 198     if (_thread_work_items[i] != NULL) {
 199       _thread_work_items[i]->reset();
 200     }
 201   }
 202 }
 203 
 204 #endif // SHARE_GC_SHARED_WORKERDATAARRAY_INLINE_HPP
--- EOF ---