< prev index next >

src/share/vm/gc/shared/collectedHeap.cpp

Print this page




 214       break;
 215     }
 216     case GCCause::_last_ditch_collection: {
 217       HandleMark hm;
 218       do_full_collection(true);         // do clear all soft refs
 219       break;
 220     }
 221     default:
 222       ShouldNotReachHere(); // Unexpected use of this function
 223   }
 224 }
 225 
 226 void CollectedHeap::set_barrier_set(BarrierSet* barrier_set) {
 227   _barrier_set = barrier_set;
 228   oopDesc::set_bs(_barrier_set);
 229 }
 230 
 231 void CollectedHeap::pre_initialize() {
 232   // Used for ReduceInitialCardMarks (when COMPILER2 is used);
 233   // otherwise remains unused.
 234 #ifdef COMPILER2
 235   _defer_initial_card_mark =    ReduceInitialCardMarks && can_elide_tlab_store_barriers()
 236                              && (DeferInitialCardMark || card_mark_must_follow_store());
 237 #else
 238   assert(_defer_initial_card_mark == false, "Who would set it?");
 239 #endif
 240 }
 241 
 242 #ifndef PRODUCT
 243 void CollectedHeap::check_for_bad_heap_word_value(HeapWord* addr, size_t size) {
 244   if (CheckMemoryInitialization && ZapUnusedHeapArea) {
 245     for (size_t slot = 0; slot < size; slot += 1) {
 246       assert((*(intptr_t*) (addr + slot)) != ((intptr_t) badHeapWordVal),
 247              "Found badHeapWordValue in post-allocation check");
 248     }
 249   }
 250 }
 251 
 252 void CollectedHeap::check_for_non_bad_heap_word_value(HeapWord* addr, size_t size) {
 253   if (CheckMemoryInitialization && ZapUnusedHeapArea) {
 254     for (size_t slot = 0; slot < size; slot += 1) {


 522   // created. Callers be careful that you know that mutators
 523   // aren't going to interfere -- for instance, this is permissible
 524   // if we are still single-threaded and have either not yet
 525   // started allocating (nothing much to verify) or we have
 526   // started allocating but are now a full-fledged JavaThread
 527   // (and have thus made our TLAB's) available for filling.
 528   assert(SafepointSynchronize::is_at_safepoint() ||
 529          !is_init_completed(),
 530          "Should only be called at a safepoint or at start-up"
 531          " otherwise concurrent mutator activity may make heap "
 532          " unparsable again");
 533   const bool use_tlab = UseTLAB;
 534   const bool deferred = _defer_initial_card_mark;
 535   // The main thread starts allocating via a TLAB even before it
 536   // has added itself to the threads list at vm boot-up.
 537   assert(!use_tlab || Threads::first() != NULL,
 538          "Attempt to fill tlabs before main thread has been added"
 539          " to threads list is doomed to failure!");
 540   for (JavaThread *thread = Threads::first(); thread; thread = thread->next()) {
 541      if (use_tlab) thread->tlab().make_parsable(retire_tlabs);
 542 #ifdef COMPILER2
 543      // The deferred store barriers must all have been flushed to the
 544      // card-table (or other remembered set structure) before GC starts
 545      // processing the card-table (or other remembered set).
 546      if (deferred) flush_deferred_store_barrier(thread);
 547 #else
 548      assert(!deferred, "Should be false");
 549      assert(thread->deferred_card_mark().is_empty(), "Should be empty");
 550 #endif
 551   }
 552 }
 553 
 554 void CollectedHeap::accumulate_statistics_all_tlabs() {
 555   if (UseTLAB) {
 556     assert(SafepointSynchronize::is_at_safepoint() ||
 557          !is_init_completed(),
 558          "should only accumulate statistics on tlabs at safepoint");
 559 
 560     ThreadLocalAllocBuffer::accumulate_statistics_before_gc();
 561   }
 562 }




 214       break;
 215     }
 216     case GCCause::_last_ditch_collection: {
 217       HandleMark hm;
 218       do_full_collection(true);         // do clear all soft refs
 219       break;
 220     }
 221     default:
 222       ShouldNotReachHere(); // Unexpected use of this function
 223   }
 224 }
 225 
 226 void CollectedHeap::set_barrier_set(BarrierSet* barrier_set) {
 227   _barrier_set = barrier_set;
 228   oopDesc::set_bs(_barrier_set);
 229 }
 230 
 231 void CollectedHeap::pre_initialize() {
 232   // Used for ReduceInitialCardMarks (when COMPILER2 is used);
 233   // otherwise remains unused.
 234 #if defined(COMPILER2) || INCLUDE_JVMCI
 235   _defer_initial_card_mark =    ReduceInitialCardMarks && can_elide_tlab_store_barriers()
 236                              && (DeferInitialCardMark || card_mark_must_follow_store());
 237 #else
 238   assert(_defer_initial_card_mark == false, "Who would set it?");
 239 #endif
 240 }
 241 
 242 #ifndef PRODUCT
 243 void CollectedHeap::check_for_bad_heap_word_value(HeapWord* addr, size_t size) {
 244   if (CheckMemoryInitialization && ZapUnusedHeapArea) {
 245     for (size_t slot = 0; slot < size; slot += 1) {
 246       assert((*(intptr_t*) (addr + slot)) != ((intptr_t) badHeapWordVal),
 247              "Found badHeapWordValue in post-allocation check");
 248     }
 249   }
 250 }
 251 
 252 void CollectedHeap::check_for_non_bad_heap_word_value(HeapWord* addr, size_t size) {
 253   if (CheckMemoryInitialization && ZapUnusedHeapArea) {
 254     for (size_t slot = 0; slot < size; slot += 1) {


 522   // created. Callers be careful that you know that mutators
 523   // aren't going to interfere -- for instance, this is permissible
 524   // if we are still single-threaded and have either not yet
 525   // started allocating (nothing much to verify) or we have
 526   // started allocating but are now a full-fledged JavaThread
 527   // (and have thus made our TLAB's) available for filling.
 528   assert(SafepointSynchronize::is_at_safepoint() ||
 529          !is_init_completed(),
 530          "Should only be called at a safepoint or at start-up"
 531          " otherwise concurrent mutator activity may make heap "
 532          " unparsable again");
 533   const bool use_tlab = UseTLAB;
 534   const bool deferred = _defer_initial_card_mark;
 535   // The main thread starts allocating via a TLAB even before it
 536   // has added itself to the threads list at vm boot-up.
 537   assert(!use_tlab || Threads::first() != NULL,
 538          "Attempt to fill tlabs before main thread has been added"
 539          " to threads list is doomed to failure!");
 540   for (JavaThread *thread = Threads::first(); thread; thread = thread->next()) {
 541      if (use_tlab) thread->tlab().make_parsable(retire_tlabs);
 542 #if defined(COMPILER2) || INCLUDE_JVMCI
 543      // The deferred store barriers must all have been flushed to the
 544      // card-table (or other remembered set structure) before GC starts
 545      // processing the card-table (or other remembered set).
 546      if (deferred) flush_deferred_store_barrier(thread);
 547 #else
 548      assert(!deferred, "Should be false");
 549      assert(thread->deferred_card_mark().is_empty(), "Should be empty");
 550 #endif
 551   }
 552 }
 553 
 554 void CollectedHeap::accumulate_statistics_all_tlabs() {
 555   if (UseTLAB) {
 556     assert(SafepointSynchronize::is_at_safepoint() ||
 557          !is_init_completed(),
 558          "should only accumulate statistics on tlabs at safepoint");
 559 
 560     ThreadLocalAllocBuffer::accumulate_statistics_before_gc();
 561   }
 562 }


< prev index next >