< prev index next >

src/share/vm/gc/g1/g1ConcurrentMark.hpp

Print this page




 212 // while the cycle is active. Given that, during evacuation pauses, we
 213 // do not copy objects that are explicitly marked, what we have to do
 214 // for the root regions is to scan them and mark all objects reachable
 215 // from them. According to the SATB assumptions, we only need to visit
 216 // each object once during marking. So, as long as we finish this scan
 217 // before the next evacuation pause, we can copy the objects from the
 218 // root regions without having to mark them or do anything else to them.
 219 //
 220 // Currently, we only support root region scanning once (at the start
 221 // of the marking cycle) and the root regions are all the survivor
 222 // regions populated during the initial-mark pause.
 223 class G1CMRootRegions VALUE_OBJ_CLASS_SPEC {
 224 private:
 225   YoungList*           _young_list;
 226   G1ConcurrentMark*    _cm;
 227 
 228   volatile bool        _scan_in_progress;
 229   volatile bool        _should_abort;
 230   HeapRegion* volatile _next_survivor;
 231 


 232 public:
 233   G1CMRootRegions();
 234   // We actually do most of the initialization in this method.
 235   void init(G1CollectedHeap* g1h, G1ConcurrentMark* cm);
 236 
 237   // Reset the claiming / scanning of the root regions.
 238   void prepare_for_scan();
 239 
 240   // Forces get_next() to return NULL so that the iteration aborts early.
 241   void abort() { _should_abort = true; }
 242 
 243   // Return true if the CM thread are actively scanning root regions,
 244   // false otherwise.
 245   bool scan_in_progress() { return _scan_in_progress; }
 246 
 247   // Claim the next root region to scan atomically, or return NULL if
 248   // all have been claimed.
 249   HeapRegion* claim_next();
 250 
 251   void cancel_scan();




 212 // while the cycle is active. Given that, during evacuation pauses, we
 213 // do not copy objects that are explicitly marked, what we have to do
 214 // for the root regions is to scan them and mark all objects reachable
 215 // from them. According to the SATB assumptions, we only need to visit
 216 // each object once during marking. So, as long as we finish this scan
 217 // before the next evacuation pause, we can copy the objects from the
 218 // root regions without having to mark them or do anything else to them.
 219 //
 220 // Currently, we only support root region scanning once (at the start
 221 // of the marking cycle) and the root regions are all the survivor
 222 // regions populated during the initial-mark pause.
 223 class G1CMRootRegions VALUE_OBJ_CLASS_SPEC {
 224 private:
 225   YoungList*           _young_list;
 226   G1ConcurrentMark*    _cm;
 227 
 228   volatile bool        _scan_in_progress;
 229   volatile bool        _should_abort;
 230   HeapRegion* volatile _next_survivor;
 231 
 232   void notify_scan_done();
 233 
 234 public:
 235   G1CMRootRegions();
 236   // We actually do most of the initialization in this method.
 237   void init(G1CollectedHeap* g1h, G1ConcurrentMark* cm);
 238 
 239   // Reset the claiming / scanning of the root regions.
 240   void prepare_for_scan();
 241 
 242   // Forces get_next() to return NULL so that the iteration aborts early.
 243   void abort() { _should_abort = true; }
 244 
 245   // Return true if the CM thread are actively scanning root regions,
 246   // false otherwise.
 247   bool scan_in_progress() { return _scan_in_progress; }
 248 
 249   // Claim the next root region to scan atomically, or return NULL if
 250   // all have been claimed.
 251   HeapRegion* claim_next();
 252 
 253   void cancel_scan();


< prev index next >