< prev index next >

src/hotspot/share/gc/g1/g1OopClosures.inline.hpp

Print this page




  98 inline void G1CMOopClosure::do_oop_work(T* p) {
  99   _task->deal_with_reference(p);
 100 }
 101 
 102 template <class T>
 103 inline void G1RootRegionScanClosure::do_oop_work(T* p) {
 104   T heap_oop = RawAccess<MO_VOLATILE>::oop_load(p);
 105   if (CompressedOops::is_null(heap_oop)) {
 106     return;
 107   }
 108   oop obj = CompressedOops::decode_not_null(heap_oop);
 109   _cm->mark_in_next_bitmap(_worker_id, obj);
 110 }
 111 
 112 template <class T>
 113 inline static void check_obj_during_refinement(T* p, oop const obj) {
 114 #ifdef ASSERT
 115   G1CollectedHeap* g1h = G1CollectedHeap::heap();
 116   // can't do because of races
 117   // assert(oopDesc::is_oop_or_null(obj), "expected an oop");
 118   g1h->check_oop_location(obj);
 119 
 120   HeapRegion* from = g1h->heap_region_containing(p);
 121 
 122   assert(from != NULL, "from region must be non-NULL");
 123   assert(from->is_in_reserved(p) ||
 124          (from->is_humongous() &&
 125           g1h->heap_region_containing(p)->is_humongous() &&
 126           from->humongous_start_region() == g1h->heap_region_containing(p)->humongous_start_region()),
 127          "p " PTR_FORMAT " is not in the same region %u or part of the correct humongous object starting at region %u.",
 128          p2i(p), from->hrm_index(), from->humongous_start_region()->hrm_index());
 129 #endif // ASSERT
 130 }
 131 
 132 template <class T>
 133 inline void G1ConcurrentRefineOopClosure::do_oop_work(T* p) {
 134   T o = RawAccess<MO_VOLATILE>::oop_load(p);
 135   if (CompressedOops::is_null(o)) {
 136     return;
 137   }
 138   oop obj = CompressedOops::decode_not_null(o);




  98 inline void G1CMOopClosure::do_oop_work(T* p) {
  99   _task->deal_with_reference(p);
 100 }
 101 
 102 template <class T>
 103 inline void G1RootRegionScanClosure::do_oop_work(T* p) {
 104   T heap_oop = RawAccess<MO_VOLATILE>::oop_load(p);
 105   if (CompressedOops::is_null(heap_oop)) {
 106     return;
 107   }
 108   oop obj = CompressedOops::decode_not_null(heap_oop);
 109   _cm->mark_in_next_bitmap(_worker_id, obj);
 110 }
 111 
 112 template <class T>
 113 inline static void check_obj_during_refinement(T* p, oop const obj) {
 114 #ifdef ASSERT
 115   G1CollectedHeap* g1h = G1CollectedHeap::heap();
 116   // can't do because of races
 117   // assert(oopDesc::is_oop_or_null(obj), "expected an oop");
 118   assert(g1h->is_oop_location(obj), "invalid oop location");
 119 
 120   HeapRegion* from = g1h->heap_region_containing(p);
 121 
 122   assert(from != NULL, "from region must be non-NULL");
 123   assert(from->is_in_reserved(p) ||
 124          (from->is_humongous() &&
 125           g1h->heap_region_containing(p)->is_humongous() &&
 126           from->humongous_start_region() == g1h->heap_region_containing(p)->humongous_start_region()),
 127          "p " PTR_FORMAT " is not in the same region %u or part of the correct humongous object starting at region %u.",
 128          p2i(p), from->hrm_index(), from->humongous_start_region()->hrm_index());
 129 #endif // ASSERT
 130 }
 131 
 132 template <class T>
 133 inline void G1ConcurrentRefineOopClosure::do_oop_work(T* p) {
 134   T o = RawAccess<MO_VOLATILE>::oop_load(p);
 135   if (CompressedOops::is_null(o)) {
 136     return;
 137   }
 138   oop obj = CompressedOops::decode_not_null(o);


< prev index next >