src/share/vm/gc/shared/collectedHeap.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File hotspot Sdiff src/share/vm/gc/shared

src/share/vm/gc/shared/collectedHeap.cpp

Print this page




 210       break;
 211     }
 212     case GCCause::_last_ditch_collection: {
 213       HandleMark hm;
 214       do_full_collection(true);         // do clear all soft refs
 215       break;
 216     }
 217     default:
 218       ShouldNotReachHere(); // Unexpected use of this function
 219   }
 220 }
 221 
 222 void CollectedHeap::set_barrier_set(BarrierSet* barrier_set) {
 223   _barrier_set = barrier_set;
 224   oopDesc::set_bs(_barrier_set);
 225 }
 226 
 227 void CollectedHeap::pre_initialize() {
 228   // Used for ReduceInitialCardMarks (when COMPILER2 is used);
 229   // otherwise remains unused.
 230 #ifdef COMPILER2
 231   _defer_initial_card_mark =    ReduceInitialCardMarks && can_elide_tlab_store_barriers()
 232                              && (DeferInitialCardMark || card_mark_must_follow_store());
 233 #else
 234   assert(_defer_initial_card_mark == false, "Who would set it?");
 235 #endif
 236 }
 237 
 238 #ifndef PRODUCT
 239 void CollectedHeap::check_for_bad_heap_word_value(HeapWord* addr, size_t size) {
 240   if (CheckMemoryInitialization && ZapUnusedHeapArea) {
 241     for (size_t slot = 0; slot < size; slot += 1) {
 242       assert((*(intptr_t*) (addr + slot)) != ((intptr_t) badHeapWordVal),
 243              "Found badHeapWordValue in post-allocation check");
 244     }
 245   }
 246 }
 247 
 248 void CollectedHeap::check_for_non_bad_heap_word_value(HeapWord* addr, size_t size) {
 249   if (CheckMemoryInitialization && ZapUnusedHeapArea) {
 250     for (size_t slot = 0; slot < size; slot += 1) {


 518   // created. Callers be careful that you know that mutators
 519   // aren't going to interfere -- for instance, this is permissible
 520   // if we are still single-threaded and have either not yet
 521   // started allocating (nothing much to verify) or we have
 522   // started allocating but are now a full-fledged JavaThread
 523   // (and have thus made our TLAB's) available for filling.
 524   assert(SafepointSynchronize::is_at_safepoint() ||
 525          !is_init_completed(),
 526          "Should only be called at a safepoint or at start-up"
 527          " otherwise concurrent mutator activity may make heap "
 528          " unparsable again");
 529   const bool use_tlab = UseTLAB;
 530   const bool deferred = _defer_initial_card_mark;
 531   // The main thread starts allocating via a TLAB even before it
 532   // has added itself to the threads list at vm boot-up.
 533   assert(!use_tlab || Threads::first() != NULL,
 534          "Attempt to fill tlabs before main thread has been added"
 535          " to threads list is doomed to failure!");
 536   for (JavaThread *thread = Threads::first(); thread; thread = thread->next()) {
 537      if (use_tlab) thread->tlab().make_parsable(retire_tlabs);
 538 #ifdef COMPILER2
 539      // The deferred store barriers must all have been flushed to the
 540      // card-table (or other remembered set structure) before GC starts
 541      // processing the card-table (or other remembered set).
 542      if (deferred) flush_deferred_store_barrier(thread);
 543 #else
 544      assert(!deferred, "Should be false");
 545      assert(thread->deferred_card_mark().is_empty(), "Should be empty");
 546 #endif
 547   }
 548 }
 549 
 550 void CollectedHeap::accumulate_statistics_all_tlabs() {
 551   if (UseTLAB) {
 552     assert(SafepointSynchronize::is_at_safepoint() ||
 553          !is_init_completed(),
 554          "should only accumulate statistics on tlabs at safepoint");
 555 
 556     ThreadLocalAllocBuffer::accumulate_statistics_before_gc();
 557   }
 558 }




 210       break;
 211     }
 212     case GCCause::_last_ditch_collection: {
 213       HandleMark hm;
 214       do_full_collection(true);         // do clear all soft refs
 215       break;
 216     }
 217     default:
 218       ShouldNotReachHere(); // Unexpected use of this function
 219   }
 220 }
 221 
 222 void CollectedHeap::set_barrier_set(BarrierSet* barrier_set) {
 223   _barrier_set = barrier_set;
 224   oopDesc::set_bs(_barrier_set);
 225 }
 226 
 227 void CollectedHeap::pre_initialize() {
 228   // Used for ReduceInitialCardMarks (when COMPILER2 is used);
 229   // otherwise remains unused.
 230 #if defined(COMPILER2) || INCLUDE_JVMCI
 231   _defer_initial_card_mark =    ReduceInitialCardMarks && can_elide_tlab_store_barriers()
 232                              && (DeferInitialCardMark || card_mark_must_follow_store());
 233 #else
 234   assert(_defer_initial_card_mark == false, "Who would set it?");
 235 #endif
 236 }
 237 
 238 #ifndef PRODUCT
 239 void CollectedHeap::check_for_bad_heap_word_value(HeapWord* addr, size_t size) {
 240   if (CheckMemoryInitialization && ZapUnusedHeapArea) {
 241     for (size_t slot = 0; slot < size; slot += 1) {
 242       assert((*(intptr_t*) (addr + slot)) != ((intptr_t) badHeapWordVal),
 243              "Found badHeapWordValue in post-allocation check");
 244     }
 245   }
 246 }
 247 
 248 void CollectedHeap::check_for_non_bad_heap_word_value(HeapWord* addr, size_t size) {
 249   if (CheckMemoryInitialization && ZapUnusedHeapArea) {
 250     for (size_t slot = 0; slot < size; slot += 1) {


 518   // created. Callers be careful that you know that mutators
 519   // aren't going to interfere -- for instance, this is permissible
 520   // if we are still single-threaded and have either not yet
 521   // started allocating (nothing much to verify) or we have
 522   // started allocating but are now a full-fledged JavaThread
 523   // (and have thus made our TLAB's) available for filling.
 524   assert(SafepointSynchronize::is_at_safepoint() ||
 525          !is_init_completed(),
 526          "Should only be called at a safepoint or at start-up"
 527          " otherwise concurrent mutator activity may make heap "
 528          " unparsable again");
 529   const bool use_tlab = UseTLAB;
 530   const bool deferred = _defer_initial_card_mark;
 531   // The main thread starts allocating via a TLAB even before it
 532   // has added itself to the threads list at vm boot-up.
 533   assert(!use_tlab || Threads::first() != NULL,
 534          "Attempt to fill tlabs before main thread has been added"
 535          " to threads list is doomed to failure!");
 536   for (JavaThread *thread = Threads::first(); thread; thread = thread->next()) {
 537      if (use_tlab) thread->tlab().make_parsable(retire_tlabs);
 538 #if defined(COMPILER2) || INCLUDE_JVMCI
 539      // The deferred store barriers must all have been flushed to the
 540      // card-table (or other remembered set structure) before GC starts
 541      // processing the card-table (or other remembered set).
 542      if (deferred) flush_deferred_store_barrier(thread);
 543 #else
 544      assert(!deferred, "Should be false");
 545      assert(thread->deferred_card_mark().is_empty(), "Should be empty");
 546 #endif
 547   }
 548 }
 549 
 550 void CollectedHeap::accumulate_statistics_all_tlabs() {
 551   if (UseTLAB) {
 552     assert(SafepointSynchronize::is_at_safepoint() ||
 553          !is_init_completed(),
 554          "should only accumulate statistics on tlabs at safepoint");
 555 
 556     ThreadLocalAllocBuffer::accumulate_statistics_before_gc();
 557   }
 558 }


src/share/vm/gc/shared/collectedHeap.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File