< prev index next >

src/share/vm/gc/g1/g1OopClosures.inline.hpp

Print this page




 161 void G1InvokeIfNotTriggeredClosure::do_oop(oop* p)       { do_oop_work(p); }
 162 void G1InvokeIfNotTriggeredClosure::do_oop(narrowOop* p) { do_oop_work(p); }
 163 
 164 template <class T>
 165 inline void G1UpdateRSOrPushRefOopClosure::do_oop_work(T* p) {
 166   oop obj = oopDesc::load_decode_heap_oop(p);
 167   if (obj == NULL) {
 168     return;
 169   }
 170 
 171 #ifdef ASSERT
 172   // can't do because of races
 173   // assert(obj == NULL || obj->is_oop(), "expected an oop");
 174 
 175   // Do the safe subset of is_oop
 176 #ifdef CHECK_UNHANDLED_OOPS
 177   oopDesc* o = obj.obj();
 178 #else
 179   oopDesc* o = obj;
 180 #endif // CHECK_UNHANDLED_OOPS
 181   assert((intptr_t)o % MinObjAlignmentInBytes == 0, "not oop aligned");
 182   assert(_g1->is_in_reserved(obj), "must be in heap");
 183 #endif // ASSERT
 184 
 185   assert(_from != NULL, "from region must be non-NULL");
 186   assert(_from->is_in_reserved(p), "p is not in from");
 187 
 188   HeapRegion* to = _g1->heap_region_containing(obj);
 189   if (_from == to) {
 190     // Normally this closure should only be called with cross-region references.
 191     // But since Java threads are manipulating the references concurrently and we
 192     // reload the values things may have changed.
 193     return;
 194   }
 195 
 196   // The _record_refs_into_cset flag is true during the RSet
 197   // updating part of an evacuation pause. It is false at all
 198   // other times:
 199   //  * rebuilding the remembered sets after a full GC
 200   //  * during concurrent refinement.
 201   //  * updating the remembered sets of regions in the collection




 161 void G1InvokeIfNotTriggeredClosure::do_oop(oop* p)       { do_oop_work(p); }
 162 void G1InvokeIfNotTriggeredClosure::do_oop(narrowOop* p) { do_oop_work(p); }
 163 
 164 template <class T>
 165 inline void G1UpdateRSOrPushRefOopClosure::do_oop_work(T* p) {
 166   oop obj = oopDesc::load_decode_heap_oop(p);
 167   if (obj == NULL) {
 168     return;
 169   }
 170 
 171 #ifdef ASSERT
 172   // can't do because of races
 173   // assert(obj == NULL || obj->is_oop(), "expected an oop");
 174 
 175   // Do the safe subset of is_oop
 176 #ifdef CHECK_UNHANDLED_OOPS
 177   oopDesc* o = obj.obj();
 178 #else
 179   oopDesc* o = obj;
 180 #endif // CHECK_UNHANDLED_OOPS
 181   assert(((intptr_t)o & MinObjAlignmentInBytesMask) == 0, "not oop aligned");
 182   assert(_g1->is_in_reserved(obj), "must be in heap");
 183 #endif // ASSERT
 184 
 185   assert(_from != NULL, "from region must be non-NULL");
 186   assert(_from->is_in_reserved(p), "p is not in from");
 187 
 188   HeapRegion* to = _g1->heap_region_containing(obj);
 189   if (_from == to) {
 190     // Normally this closure should only be called with cross-region references.
 191     // But since Java threads are manipulating the references concurrently and we
 192     // reload the values things may have changed.
 193     return;
 194   }
 195 
 196   // The _record_refs_into_cset flag is true during the RSet
 197   // updating part of an evacuation pause. It is false at all
 198   // other times:
 199   //  * rebuilding the remembered sets after a full GC
 200   //  * during concurrent refinement.
 201   //  * updating the remembered sets of regions in the collection


< prev index next >