< prev index next >

src/hotspot/share/gc/z/zReferenceProcessor.cpp

Print this page




  26 #include "gc/shared/referencePolicy.hpp"
  27 #include "gc/shared/referenceProcessorStats.hpp"
  28 #include "gc/z/zHeap.inline.hpp"
  29 #include "gc/z/zOopClosures.inline.hpp"
  30 #include "gc/z/zReferenceProcessor.hpp"
  31 #include "gc/z/zStat.hpp"
  32 #include "gc/z/zTask.hpp"
  33 #include "gc/z/zTracer.inline.hpp"
  34 #include "gc/z/zUtils.inline.hpp"
  35 #include "memory/universe.hpp"
  36 #include "runtime/mutexLocker.hpp"
  37 #include "runtime/os.hpp"
  38 
  39 static const ZStatSubPhase ZSubPhaseConcurrentReferencesProcess("Concurrent References Process");
  40 static const ZStatSubPhase ZSubPhaseConcurrentReferencesEnqueue("Concurrent References Enqueue");
  41 
  42 ZReferenceProcessor::ZReferenceProcessor(ZWorkers* workers) :
  43     _workers(workers),
  44     _soft_reference_policy(NULL),
  45     _encountered_count(),
  46     _dropped_count(),
  47     _enqueued_count(),
  48     _discovered_list(NULL),
  49     _pending_list(NULL),
  50     _pending_list_tail(_pending_list.addr()) {}
  51 
  52 void ZReferenceProcessor::set_soft_reference_policy(bool clear) {
  53   static AlwaysClearPolicy always_clear_policy;
  54   static LRUMaxHeapPolicy lru_max_heap_policy;
  55 
  56   if (clear) {
  57     log_info(gc, ref)("Clearing All Soft References");
  58     _soft_reference_policy = &always_clear_policy;
  59   } else {
  60     _soft_reference_policy = &lru_max_heap_policy;
  61   }
  62 
  63   _soft_reference_policy->setup();
  64 }
  65 
  66 void ZReferenceProcessor::update_soft_reference_clock() const {


 163 
 164   // Update statistics
 165   _encountered_count.get()[type]++;
 166 
 167   if (is_reference_inactive(obj) ||
 168       is_referent_alive_or_null(obj, type) ||
 169       is_referent_softly_alive(obj, type)) {
 170     // Not discovered
 171     return false;
 172   }
 173 
 174   discover(obj, type);
 175 
 176   // Discovered
 177   return true;
 178 }
 179 
 180 void ZReferenceProcessor::discover(oop obj, ReferenceType type) {
 181   log_trace(gc, ref)("Discovered Reference: " PTR_FORMAT " (%s)", p2i(obj), ReferenceTypeName[type]);
 182 



 183   // Mark referent finalizable
 184   if (should_mark_referent(type)) {
 185     oop* const referent_addr = (oop*)java_lang_ref_Reference::referent_addr(obj);
 186     ZBarrier::mark_barrier_on_oop_field(referent_addr, true /* finalizable */);
 187   }
 188 
 189   // Add reference to discovered list
 190   assert(java_lang_ref_Reference::discovered(obj) == NULL, "Already discovered");
 191   oop* const list = _discovered_list.addr();
 192   java_lang_ref_Reference::set_discovered(obj, *list);
 193   *list = obj;
 194 }
 195 
 196 oop ZReferenceProcessor::drop(oop obj, ReferenceType type) {
 197   log_trace(gc, ref)("Dropped Reference: " PTR_FORMAT " (%s)", p2i(obj), ReferenceTypeName[type]);
 198 
 199   // Update statistics
 200   _dropped_count.get()[type]++;
 201 
 202   // Keep referent alive
 203   keep_referent_alive(obj, type);
 204 
 205   // Unlink and return next in list
 206   const oop next = java_lang_ref_Reference::discovered(obj);
 207   java_lang_ref_Reference::set_discovered(obj, NULL);
 208   return next;
 209 }
 210 
 211 oop* ZReferenceProcessor::keep(oop obj, ReferenceType type) {
 212   log_trace(gc, ref)("Pending Reference: " PTR_FORMAT " (%s)", p2i(obj), ReferenceTypeName[type]);
 213 
 214   // Update statistics
 215   _enqueued_count.get()[type]++;
 216 
 217   // Clear referent
 218   if (should_clear_referent(type)) {
 219     java_lang_ref_Reference::set_referent(obj, NULL);
 220   }
 221 


 273   }
 274 
 275   if (_pending_list.get() != NULL) {
 276     return false;
 277   }
 278 
 279   return true;
 280 }
 281 
 282 void ZReferenceProcessor::reset_statistics() {
 283   assert(is_empty(), "Should be empty");
 284 
 285   // Reset encountered
 286   ZPerWorkerIterator<Counters> iter_encountered(&_encountered_count);
 287   for (Counters* counters; iter_encountered.next(&counters);) {
 288     for (int i = REF_SOFT; i <= REF_PHANTOM; i++) {
 289       (*counters)[i] = 0;
 290     }
 291   }
 292 
 293   // Reset dropped
 294   ZPerWorkerIterator<Counters> iter_dropped(&_dropped_count);
 295   for (Counters* counters; iter_dropped.next(&counters);) {
 296     for (int i = REF_SOFT; i <= REF_PHANTOM; i++) {
 297       (*counters)[i] = 0;
 298     }
 299   }
 300 
 301   // Reset enqueued
 302   ZPerWorkerIterator<Counters> iter_enqueued(&_enqueued_count);
 303   for (Counters* counters; iter_enqueued.next(&counters);) {
 304     for (int i = REF_SOFT; i <= REF_PHANTOM; i++) {
 305       (*counters)[i] = 0;
 306     }
 307   }
 308 }
 309 
 310 void ZReferenceProcessor::collect_statistics() {
 311   Counters encountered = {};
 312   Counters dropped = {};
 313   Counters enqueued = {};
 314 
 315   // Sum encountered
 316   ZPerWorkerConstIterator<Counters> iter_encountered(&_encountered_count);
 317   for (const Counters* counters; iter_encountered.next(&counters);) {
 318     for (int i = REF_SOFT; i <= REF_PHANTOM; i++) {
 319       encountered[i] += (*counters)[i];
 320     }
 321   }
 322 
 323   // Sum dropped
 324   ZPerWorkerConstIterator<Counters> iter_dropped(&_dropped_count);
 325   for (const Counters* counters; iter_dropped.next(&counters);) {
 326     for (int i = REF_SOFT; i <= REF_PHANTOM; i++) {
 327       dropped[i] += (*counters)[i];
 328     }
 329   }
 330 
 331   // Sum enqueued
 332   ZPerWorkerConstIterator<Counters> iter_enqueued(&_enqueued_count);
 333   for (const Counters* counters; iter_enqueued.next(&counters);) {
 334     for (int i = REF_SOFT; i <= REF_PHANTOM; i++) {
 335       enqueued[i] += (*counters)[i];
 336     }
 337   }
 338 
 339   // Update statistics
 340   ZStatReferences::set_soft(encountered[REF_SOFT], dropped[REF_SOFT], enqueued[REF_SOFT]);
 341   ZStatReferences::set_weak(encountered[REF_WEAK], dropped[REF_WEAK], enqueued[REF_WEAK]);
 342   ZStatReferences::set_final(encountered[REF_FINAL], dropped[REF_FINAL], enqueued[REF_FINAL]);
 343   ZStatReferences::set_phantom(encountered[REF_PHANTOM], dropped[REF_PHANTOM], enqueued[REF_PHANTOM]);
 344 
 345   // Trace statistics
 346   const ReferenceProcessorStats stats(dropped[REF_SOFT] + enqueued[REF_SOFT],
 347                                       dropped[REF_WEAK] + enqueued[REF_WEAK],
 348                                       dropped[REF_FINAL] + enqueued[REF_FINAL],
 349                                       dropped[REF_PHANTOM] + enqueued[REF_PHANTOM]);
 350   ZTracer::tracer()->report_gc_reference_stats(stats);
 351 }
 352 
 353 class ZReferenceProcessorTask : public ZTask {
 354 private:
 355   ZReferenceProcessor* const _reference_processor;
 356 
 357 public:
 358   ZReferenceProcessorTask(ZReferenceProcessor* reference_processor) :
 359       ZTask("ZReferenceProcessorTask"),
 360       _reference_processor(reference_processor) {}
 361 
 362   virtual void work() {
 363     _reference_processor->work();
 364   }
 365 };
 366 
 367 void ZReferenceProcessor::process_references() {
 368   ZStatTimer timer(ZSubPhaseConcurrentReferencesProcess);
 369 




  26 #include "gc/shared/referencePolicy.hpp"
  27 #include "gc/shared/referenceProcessorStats.hpp"
  28 #include "gc/z/zHeap.inline.hpp"
  29 #include "gc/z/zOopClosures.inline.hpp"
  30 #include "gc/z/zReferenceProcessor.hpp"
  31 #include "gc/z/zStat.hpp"
  32 #include "gc/z/zTask.hpp"
  33 #include "gc/z/zTracer.inline.hpp"
  34 #include "gc/z/zUtils.inline.hpp"
  35 #include "memory/universe.hpp"
  36 #include "runtime/mutexLocker.hpp"
  37 #include "runtime/os.hpp"
  38 
  39 static const ZStatSubPhase ZSubPhaseConcurrentReferencesProcess("Concurrent References Process");
  40 static const ZStatSubPhase ZSubPhaseConcurrentReferencesEnqueue("Concurrent References Enqueue");
  41 
  42 ZReferenceProcessor::ZReferenceProcessor(ZWorkers* workers) :
  43     _workers(workers),
  44     _soft_reference_policy(NULL),
  45     _encountered_count(),
  46     _discovered_count(),
  47     _enqueued_count(),
  48     _discovered_list(NULL),
  49     _pending_list(NULL),
  50     _pending_list_tail(_pending_list.addr()) {}
  51 
  52 void ZReferenceProcessor::set_soft_reference_policy(bool clear) {
  53   static AlwaysClearPolicy always_clear_policy;
  54   static LRUMaxHeapPolicy lru_max_heap_policy;
  55 
  56   if (clear) {
  57     log_info(gc, ref)("Clearing All Soft References");
  58     _soft_reference_policy = &always_clear_policy;
  59   } else {
  60     _soft_reference_policy = &lru_max_heap_policy;
  61   }
  62 
  63   _soft_reference_policy->setup();
  64 }
  65 
  66 void ZReferenceProcessor::update_soft_reference_clock() const {


 163 
 164   // Update statistics
 165   _encountered_count.get()[type]++;
 166 
 167   if (is_reference_inactive(obj) ||
 168       is_referent_alive_or_null(obj, type) ||
 169       is_referent_softly_alive(obj, type)) {
 170     // Not discovered
 171     return false;
 172   }
 173 
 174   discover(obj, type);
 175 
 176   // Discovered
 177   return true;
 178 }
 179 
 180 void ZReferenceProcessor::discover(oop obj, ReferenceType type) {
 181   log_trace(gc, ref)("Discovered Reference: " PTR_FORMAT " (%s)", p2i(obj), ReferenceTypeName[type]);
 182 
 183   // Update statistics
 184   _discovered_count.get()[type]++;
 185 
 186   // Mark referent finalizable
 187   if (should_mark_referent(type)) {
 188     oop* const referent_addr = (oop*)java_lang_ref_Reference::referent_addr(obj);
 189     ZBarrier::mark_barrier_on_oop_field(referent_addr, true /* finalizable */);
 190   }
 191 
 192   // Add reference to discovered list
 193   assert(java_lang_ref_Reference::discovered(obj) == NULL, "Already discovered");
 194   oop* const list = _discovered_list.addr();
 195   java_lang_ref_Reference::set_discovered(obj, *list);
 196   *list = obj;
 197 }
 198 
 199 oop ZReferenceProcessor::drop(oop obj, ReferenceType type) {
 200   log_trace(gc, ref)("Dropped Reference: " PTR_FORMAT " (%s)", p2i(obj), ReferenceTypeName[type]);
 201 



 202   // Keep referent alive
 203   keep_referent_alive(obj, type);
 204 
 205   // Unlink and return next in list
 206   const oop next = java_lang_ref_Reference::discovered(obj);
 207   java_lang_ref_Reference::set_discovered(obj, NULL);
 208   return next;
 209 }
 210 
 211 oop* ZReferenceProcessor::keep(oop obj, ReferenceType type) {
 212   log_trace(gc, ref)("Pending Reference: " PTR_FORMAT " (%s)", p2i(obj), ReferenceTypeName[type]);
 213 
 214   // Update statistics
 215   _enqueued_count.get()[type]++;
 216 
 217   // Clear referent
 218   if (should_clear_referent(type)) {
 219     java_lang_ref_Reference::set_referent(obj, NULL);
 220   }
 221 


 273   }
 274 
 275   if (_pending_list.get() != NULL) {
 276     return false;
 277   }
 278 
 279   return true;
 280 }
 281 
 282 void ZReferenceProcessor::reset_statistics() {
 283   assert(is_empty(), "Should be empty");
 284 
 285   // Reset encountered
 286   ZPerWorkerIterator<Counters> iter_encountered(&_encountered_count);
 287   for (Counters* counters; iter_encountered.next(&counters);) {
 288     for (int i = REF_SOFT; i <= REF_PHANTOM; i++) {
 289       (*counters)[i] = 0;
 290     }
 291   }
 292 
 293   // Reset discovered
 294   ZPerWorkerIterator<Counters> iter_discovered(&_discovered_count);
 295   for (Counters* counters; iter_discovered.next(&counters);) {
 296     for (int i = REF_SOFT; i <= REF_PHANTOM; i++) {
 297       (*counters)[i] = 0;
 298     }
 299   }
 300 
 301   // Reset enqueued
 302   ZPerWorkerIterator<Counters> iter_enqueued(&_enqueued_count);
 303   for (Counters* counters; iter_enqueued.next(&counters);) {
 304     for (int i = REF_SOFT; i <= REF_PHANTOM; i++) {
 305       (*counters)[i] = 0;
 306     }
 307   }
 308 }
 309 
 310 void ZReferenceProcessor::collect_statistics() {
 311   Counters encountered = {};
 312   Counters discovered = {};
 313   Counters enqueued = {};
 314 
 315   // Sum encountered
 316   ZPerWorkerConstIterator<Counters> iter_encountered(&_encountered_count);
 317   for (const Counters* counters; iter_encountered.next(&counters);) {
 318     for (int i = REF_SOFT; i <= REF_PHANTOM; i++) {
 319       encountered[i] += (*counters)[i];
 320     }
 321   }
 322 
 323   // Sum discovered
 324   ZPerWorkerConstIterator<Counters> iter_discovered(&_discovered_count);
 325   for (const Counters* counters; iter_discovered.next(&counters);) {
 326     for (int i = REF_SOFT; i <= REF_PHANTOM; i++) {
 327       discovered[i] += (*counters)[i];
 328     }
 329   }
 330 
 331   // Sum enqueued
 332   ZPerWorkerConstIterator<Counters> iter_enqueued(&_enqueued_count);
 333   for (const Counters* counters; iter_enqueued.next(&counters);) {
 334     for (int i = REF_SOFT; i <= REF_PHANTOM; i++) {
 335       enqueued[i] += (*counters)[i];
 336     }
 337   }
 338 
 339   // Update statistics
 340   ZStatReferences::set_soft(encountered[REF_SOFT], discovered[REF_SOFT], enqueued[REF_SOFT]);
 341   ZStatReferences::set_weak(encountered[REF_WEAK], discovered[REF_WEAK], enqueued[REF_WEAK]);
 342   ZStatReferences::set_final(encountered[REF_FINAL], discovered[REF_FINAL], enqueued[REF_FINAL]);
 343   ZStatReferences::set_phantom(encountered[REF_PHANTOM], discovered[REF_PHANTOM], enqueued[REF_PHANTOM]);
 344 
 345   // Trace statistics
 346   const ReferenceProcessorStats stats(discovered[REF_SOFT],
 347                                       discovered[REF_WEAK],
 348                                       discovered[REF_FINAL],
 349                                       discovered[REF_PHANTOM]);
 350   ZTracer::tracer()->report_gc_reference_stats(stats);
 351 }
 352 
 353 class ZReferenceProcessorTask : public ZTask {
 354 private:
 355   ZReferenceProcessor* const _reference_processor;
 356 
 357 public:
 358   ZReferenceProcessorTask(ZReferenceProcessor* reference_processor) :
 359       ZTask("ZReferenceProcessorTask"),
 360       _reference_processor(reference_processor) {}
 361 
 362   virtual void work() {
 363     _reference_processor->work();
 364   }
 365 };
 366 
 367 void ZReferenceProcessor::process_references() {
 368   ZStatTimer timer(ZSubPhaseConcurrentReferencesProcess);
 369 


< prev index next >