src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp

Print this page
rev 6498 : 8044796: G1: Enable G1CollectedHeap::stop()
Reviewed-by:


 416   HeapRegion* head;
 417   HeapRegion* hr;
 418   do {
 419     head = _dirty_cards_region_list;
 420     if (head == NULL) {
 421       return NULL;
 422     }
 423     HeapRegion* new_head = head->get_next_dirty_cards_region();
 424     if (head == new_head) {
 425       // The last region.
 426       new_head = NULL;
 427     }
 428     hr = (HeapRegion*)Atomic::cmpxchg_ptr(new_head, &_dirty_cards_region_list,
 429                                           head);
 430   } while (hr != head);
 431   assert(hr != NULL, "invariant");
 432   hr->set_next_dirty_cards_region(NULL);
 433   return hr;
 434 }
 435 
 436 void G1CollectedHeap::stop_conc_gc_threads() {
 437   _cg1r->stop();
 438   _cmThread->stop();
 439   if (G1StringDedup::is_enabled()) {
 440     G1StringDedup::stop();
 441   }
 442 }
 443 
 444 #ifdef ASSERT
 445 // A region is added to the collection set as it is retired
 446 // so an address p can point to a region which will be in the
 447 // collection set but has not yet been retired.  This method
 448 // therefore is only accurate during a GC pause after all
 449 // regions have been retired.  It is used for debugging
 450 // to check if an nmethod has references to objects that can
 451 // be move during a partial collection.  Though it can be
 452 // inaccurate, it is sufficient for G1 because the conservative
 453 // implementation of is_scavengable() for G1 will indicate that
 454 // all nmethods must be scanned during a partial collection.
 455 bool G1CollectedHeap::is_in_partial_collection(const void* p) {
 456   if (p == NULL) {
 457     return false;
 458   }
 459   return heap_region_containing(p)->in_collection_set();
 460 }
 461 #endif
 462 
 463 // Returns true if the reference points to an object that


2157   // require BOT updates or not and, if it doesn't, then a non-young
2158   // region will complain that it cannot support allocations without
2159   // BOT updates. So we'll tag the dummy region as young to avoid that.
2160   dummy_region->set_young();
2161   // Make sure it's full.
2162   dummy_region->set_top(dummy_region->end());
2163   G1AllocRegion::setup(this, dummy_region);
2164 
2165   init_mutator_alloc_region();
2166 
2167   // Do create of the monitoring and management support so that
2168   // values in the heap have been properly initialized.
2169   _g1mm = new G1MonitoringSupport(this);
2170 
2171   G1StringDedup::initialize();
2172 
2173   return JNI_OK;
2174 }
2175 
2176 void G1CollectedHeap::stop() {
2177 #if 0
2178   // Stopping concurrent worker threads is currently disabled until
2179   // some bugs in concurrent mark has been resolve. Without fixing
2180   // those bugs first we risk haning during VM exit when trying to
2181   // stop these threads.
2182 
2183   // Abort any ongoing concurrent root region scanning and stop all
2184   // concurrent threads. We do this to make sure these threads do
2185   // not continue to execute and access resources (e.g. gclog_or_tty)
2186   // that are destroyed during shutdown.
2187   _cm->root_regions()->abort();
2188   _cm->root_regions()->wait_until_scan_finished();
2189   stop_conc_gc_threads();
2190 #endif

2191 }
2192 
2193 size_t G1CollectedHeap::conservative_max_heap_alignment() {
2194   return HeapRegion::max_region_size();
2195 }
2196 
2197 void G1CollectedHeap::ref_processing_init() {
2198   // Reference processing in G1 currently works as follows:
2199   //
2200   // * There are two reference processor instances. One is
2201   //   used to record and process discovered references
2202   //   during concurrent marking; the other is used to
2203   //   record and process references during STW pauses
2204   //   (both full and incremental).
2205   // * Both ref processors need to 'span' the entire heap as
2206   //   the regions in the collection set may be dotted around.
2207   //
2208   // * For the concurrent marking ref processor:
2209   //   * Reference discovery is enabled at initial marking.
2210   //   * Reference discovery is disabled and the discovered




 416   HeapRegion* head;
 417   HeapRegion* hr;
 418   do {
 419     head = _dirty_cards_region_list;
 420     if (head == NULL) {
 421       return NULL;
 422     }
 423     HeapRegion* new_head = head->get_next_dirty_cards_region();
 424     if (head == new_head) {
 425       // The last region.
 426       new_head = NULL;
 427     }
 428     hr = (HeapRegion*)Atomic::cmpxchg_ptr(new_head, &_dirty_cards_region_list,
 429                                           head);
 430   } while (hr != head);
 431   assert(hr != NULL, "invariant");
 432   hr->set_next_dirty_cards_region(NULL);
 433   return hr;
 434 }
 435 








 436 #ifdef ASSERT
 437 // A region is added to the collection set as it is retired
 438 // so an address p can point to a region which will be in the
 439 // collection set but has not yet been retired.  This method
 440 // therefore is only accurate during a GC pause after all
 441 // regions have been retired.  It is used for debugging
 442 // to check if an nmethod has references to objects that can
 443 // be move during a partial collection.  Though it can be
 444 // inaccurate, it is sufficient for G1 because the conservative
 445 // implementation of is_scavengable() for G1 will indicate that
 446 // all nmethods must be scanned during a partial collection.
 447 bool G1CollectedHeap::is_in_partial_collection(const void* p) {
 448   if (p == NULL) {
 449     return false;
 450   }
 451   return heap_region_containing(p)->in_collection_set();
 452 }
 453 #endif
 454 
 455 // Returns true if the reference points to an object that


2149   // require BOT updates or not and, if it doesn't, then a non-young
2150   // region will complain that it cannot support allocations without
2151   // BOT updates. So we'll tag the dummy region as young to avoid that.
2152   dummy_region->set_young();
2153   // Make sure it's full.
2154   dummy_region->set_top(dummy_region->end());
2155   G1AllocRegion::setup(this, dummy_region);
2156 
2157   init_mutator_alloc_region();
2158 
2159   // Do create of the monitoring and management support so that
2160   // values in the heap have been properly initialized.
2161   _g1mm = new G1MonitoringSupport(this);
2162 
2163   G1StringDedup::initialize();
2164 
2165   return JNI_OK;
2166 }
2167 
2168 void G1CollectedHeap::stop() {
2169   // Stop all concurrent threads. We do this to make sure these threads
2170   // do not continue to execute and access resources (e.g. gclog_or_tty)







2171   // that are destroyed during shutdown.
2172   _cg1r->stop();
2173   _cmThread->stop();
2174   if (G1StringDedup::is_enabled()) {
2175     G1StringDedup::stop();
2176   }
2177 }
2178 
2179 size_t G1CollectedHeap::conservative_max_heap_alignment() {
2180   return HeapRegion::max_region_size();
2181 }
2182 
2183 void G1CollectedHeap::ref_processing_init() {
2184   // Reference processing in G1 currently works as follows:
2185   //
2186   // * There are two reference processor instances. One is
2187   //   used to record and process discovered references
2188   //   during concurrent marking; the other is used to
2189   //   record and process references during STW pauses
2190   //   (both full and incremental).
2191   // * Both ref processors need to 'span' the entire heap as
2192   //   the regions in the collection set may be dotted around.
2193   //
2194   // * For the concurrent marking ref processor:
2195   //   * Reference discovery is enabled at initial marking.
2196   //   * Reference discovery is disabled and the discovered