< prev index next >

src/hotspot/share/gc/shared/collectedHeap.cpp

Print this page




 224       break;
 225     }
 226     case GCCause::_metadata_GC_clear_soft_refs: {
 227       HandleMark hm;
 228       do_full_collection(true);         // do clear all soft refs
 229       break;
 230     }
 231     default:
 232       ShouldNotReachHere(); // Unexpected use of this function
 233   }
 234 }
 235 
 236 void CollectedHeap::set_barrier_set(BarrierSet* barrier_set) {
 237   _barrier_set = barrier_set;
 238   oopDesc::set_bs(_barrier_set);
 239 }
 240 
 241 void CollectedHeap::pre_initialize() {
 242   // Used for ReduceInitialCardMarks (when COMPILER2 is used);
 243   // otherwise remains unused.
 244 #if defined(COMPILER2) || INCLUDE_JVMCI
 245   _defer_initial_card_mark = is_server_compilation_mode_vm() &&  ReduceInitialCardMarks && can_elide_tlab_store_barriers()
 246                              && (DeferInitialCardMark || card_mark_must_follow_store());
 247 #else
 248   assert(_defer_initial_card_mark == false, "Who would set it?");
 249 #endif
 250 }
 251 
 252 #ifndef PRODUCT
 253 void CollectedHeap::check_for_bad_heap_word_value(HeapWord* addr, size_t size) {
 254   if (CheckMemoryInitialization && ZapUnusedHeapArea) {
 255     for (size_t slot = 0; slot < size; slot += 1) {
 256       assert((*(intptr_t*) (addr + slot)) != ((intptr_t) badHeapWordVal),
 257              "Found badHeapWordValue in post-allocation check");
 258     }
 259   }
 260 }
 261 
 262 void CollectedHeap::check_for_non_bad_heap_word_value(HeapWord* addr, size_t size) {
 263   if (CheckMemoryInitialization && ZapUnusedHeapArea) {
 264     for (size_t slot = 0; slot < size; slot += 1) {


 528   // created. Callers be careful that you know that mutators
 529   // aren't going to interfere -- for instance, this is permissible
 530   // if we are still single-threaded and have either not yet
 531   // started allocating (nothing much to verify) or we have
 532   // started allocating but are now a full-fledged JavaThread
 533   // (and have thus made our TLAB's) available for filling.
 534   assert(SafepointSynchronize::is_at_safepoint() ||
 535          !is_init_completed(),
 536          "Should only be called at a safepoint or at start-up"
 537          " otherwise concurrent mutator activity may make heap "
 538          " unparsable again");
 539   const bool use_tlab = UseTLAB;
 540   const bool deferred = _defer_initial_card_mark;
 541   // The main thread starts allocating via a TLAB even before it
 542   // has added itself to the threads list at vm boot-up.
 543   assert(!use_tlab || Threads::first() != NULL,
 544          "Attempt to fill tlabs before main thread has been added"
 545          " to threads list is doomed to failure!");
 546   for (JavaThread *thread = Threads::first(); thread; thread = thread->next()) {
 547      if (use_tlab) thread->tlab().make_parsable(retire_tlabs);
 548 #if defined(COMPILER2) || INCLUDE_JVMCI
 549      // The deferred store barriers must all have been flushed to the
 550      // card-table (or other remembered set structure) before GC starts
 551      // processing the card-table (or other remembered set).
 552      if (deferred) flush_deferred_store_barrier(thread);
 553 #else
 554      assert(!deferred, "Should be false");
 555      assert(thread->deferred_card_mark().is_empty(), "Should be empty");
 556 #endif
 557   }
 558 }
 559 
 560 void CollectedHeap::accumulate_statistics_all_tlabs() {
 561   if (UseTLAB) {
 562     assert(SafepointSynchronize::is_at_safepoint() ||
 563          !is_init_completed(),
 564          "should only accumulate statistics on tlabs at safepoint");
 565 
 566     ThreadLocalAllocBuffer::accumulate_statistics_before_gc();
 567   }
 568 }




 224       break;
 225     }
 226     case GCCause::_metadata_GC_clear_soft_refs: {
 227       HandleMark hm;
 228       do_full_collection(true);         // do clear all soft refs
 229       break;
 230     }
 231     default:
 232       ShouldNotReachHere(); // Unexpected use of this function
 233   }
 234 }
 235 
 236 void CollectedHeap::set_barrier_set(BarrierSet* barrier_set) {
 237   _barrier_set = barrier_set;
 238   oopDesc::set_bs(_barrier_set);
 239 }
 240 
 241 void CollectedHeap::pre_initialize() {
 242   // Used for ReduceInitialCardMarks (when COMPILER2 is used);
 243   // otherwise remains unused.
 244 #if COMPILER2_OR_JVMCI
 245   _defer_initial_card_mark = is_server_compilation_mode_vm() &&  ReduceInitialCardMarks && can_elide_tlab_store_barriers()
 246                              && (DeferInitialCardMark || card_mark_must_follow_store());
 247 #else
 248   assert(_defer_initial_card_mark == false, "Who would set it?");
 249 #endif
 250 }
 251 
 252 #ifndef PRODUCT
 253 void CollectedHeap::check_for_bad_heap_word_value(HeapWord* addr, size_t size) {
 254   if (CheckMemoryInitialization && ZapUnusedHeapArea) {
 255     for (size_t slot = 0; slot < size; slot += 1) {
 256       assert((*(intptr_t*) (addr + slot)) != ((intptr_t) badHeapWordVal),
 257              "Found badHeapWordValue in post-allocation check");
 258     }
 259   }
 260 }
 261 
 262 void CollectedHeap::check_for_non_bad_heap_word_value(HeapWord* addr, size_t size) {
 263   if (CheckMemoryInitialization && ZapUnusedHeapArea) {
 264     for (size_t slot = 0; slot < size; slot += 1) {


 528   // created. Callers be careful that you know that mutators
 529   // aren't going to interfere -- for instance, this is permissible
 530   // if we are still single-threaded and have either not yet
 531   // started allocating (nothing much to verify) or we have
 532   // started allocating but are now a full-fledged JavaThread
 533   // (and have thus made our TLAB's) available for filling.
 534   assert(SafepointSynchronize::is_at_safepoint() ||
 535          !is_init_completed(),
 536          "Should only be called at a safepoint or at start-up"
 537          " otherwise concurrent mutator activity may make heap "
 538          " unparsable again");
 539   const bool use_tlab = UseTLAB;
 540   const bool deferred = _defer_initial_card_mark;
 541   // The main thread starts allocating via a TLAB even before it
 542   // has added itself to the threads list at vm boot-up.
 543   assert(!use_tlab || Threads::first() != NULL,
 544          "Attempt to fill tlabs before main thread has been added"
 545          " to threads list is doomed to failure!");
 546   for (JavaThread *thread = Threads::first(); thread; thread = thread->next()) {
 547      if (use_tlab) thread->tlab().make_parsable(retire_tlabs);
 548 #if COMPILER2_OR_JVMCI
 549      // The deferred store barriers must all have been flushed to the
 550      // card-table (or other remembered set structure) before GC starts
 551      // processing the card-table (or other remembered set).
 552      if (deferred) flush_deferred_store_barrier(thread);
 553 #else
 554      assert(!deferred, "Should be false");
 555      assert(thread->deferred_card_mark().is_empty(), "Should be empty");
 556 #endif
 557   }
 558 }
 559 
 560 void CollectedHeap::accumulate_statistics_all_tlabs() {
 561   if (UseTLAB) {
 562     assert(SafepointSynchronize::is_at_safepoint() ||
 563          !is_init_completed(),
 564          "should only accumulate statistics on tlabs at safepoint");
 565 
 566     ThreadLocalAllocBuffer::accumulate_statistics_before_gc();
 567   }
 568 }


< prev index next >