< prev index next >

src/hotspot/share/gc/shared/genOopClosures.inline.hpp

Print this page




  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #ifndef SHARE_GC_SHARED_GENOOPCLOSURES_INLINE_HPP
  26 #define SHARE_GC_SHARED_GENOOPCLOSURES_INLINE_HPP
  27 
  28 #include "gc/shared/cardTableRS.hpp"
  29 #include "gc/shared/genCollectedHeap.hpp"
  30 #include "gc/shared/genOopClosures.hpp"
  31 #include "gc/shared/generation.hpp"
  32 #include "gc/shared/space.hpp"
  33 #include "oops/access.inline.hpp"
  34 #include "oops/compressedOops.inline.hpp"
  35 #include "oops/oop.inline.hpp"
  36 #if INCLUDE_SERIALGC
  37 #include "gc/serial/defNewGeneration.inline.hpp"
  38 #endif
  39 
  40 inline OopsInGenClosure::OopsInGenClosure(Generation* gen) :
  41   OopIterateClosure(gen->ref_processor()), _orig_gen(gen), _rs(NULL) {
  42   set_generation(gen);
  43 }
















  44 
  45 inline void OopsInGenClosure::set_generation(Generation* gen) {
  46   _gen = gen;
  47   _gen_boundary = _gen->reserved().start();
  48   // Barrier set for the heap, must be set after heap is initialized
  49   if (_rs == NULL) {
  50     _rs = GenCollectedHeap::heap()->rem_set();
  51   }
  52 }
  53 
  54 template <class T> inline void OopsInGenClosure::do_barrier(T* p) {
  55   assert(generation()->is_in_reserved(p), "expected ref in generation");












  56   T heap_oop = RawAccess<>::oop_load(p);
  57   assert(!CompressedOops::is_null(heap_oop), "expected non-null oop");
  58   oop obj = CompressedOops::decode_not_null(heap_oop);
  59   // If p points to a younger generation, mark the card.
  60   if (cast_from_oop<HeapWord*>(obj) < _gen_boundary) {
  61     _rs->inline_write_ref_field_gc(p, obj);
  62   }
  63 }
  64 
  65 inline BasicOopsInGenClosure::BasicOopsInGenClosure(Generation* gen) : OopsInGenClosure(gen) {
  66 }
  67 
  68 inline void OopsInClassLoaderDataOrGenClosure::do_cld_barrier() {
  69   assert(_scanned_cld != NULL, "Must be");
  70   if (!_scanned_cld->has_modified_oops()) {
  71     _scanned_cld->record_modified_oops();
  72   }
  73 }
  74 
  75 #if INCLUDE_SERIALGC
  76 
  77 template <class T> inline void FastScanClosure::do_oop_work(T* p) {
  78   T heap_oop = RawAccess<>::oop_load(p);
  79   // Should we copy the obj?
  80   if (!CompressedOops::is_null(heap_oop)) {
  81     oop obj = CompressedOops::decode_not_null(heap_oop);
  82     if (cast_from_oop<HeapWord*>(obj) < _boundary) {
  83       assert(!_g->to()->is_in_reserved(obj), "Scanning field twice?");
  84       oop new_obj = obj->is_forwarded() ? obj->forwardee()
  85                                         : _g->copy_to_survivor_space(obj);
  86       RawAccess<IS_NOT_NULL>::oop_store(p, new_obj);
  87       if (is_scanning_a_cld()) {
  88         do_cld_barrier();
  89       } else if (_gc_barrier) {
  90         // Now call parent closure
  91         do_barrier(p);
  92       }
  93     }
  94   }
  95 }
  96 
  97 inline void FastScanClosure::do_oop(oop* p)       { FastScanClosure::do_oop_work(p); }
  98 inline void FastScanClosure::do_oop(narrowOop* p) { FastScanClosure::do_oop_work(p); }
  99 
 100 #endif // INCLUDE_SERIALGC
 101 
 102 template <class T> void FilteringClosure::do_oop_work(T* p) {
 103   T heap_oop = RawAccess<>::oop_load(p);
 104   if (!CompressedOops::is_null(heap_oop)) {
 105     oop obj = CompressedOops::decode_not_null(heap_oop);
 106     if (cast_from_oop<HeapWord*>(obj) < _boundary) {
 107       _cl->do_oop(p);
 108     }
 109   }
 110 }
 111 
 112 inline void FilteringClosure::do_oop(oop* p)       { FilteringClosure::do_oop_work(p); }
 113 inline void FilteringClosure::do_oop(narrowOop* p) { FilteringClosure::do_oop_work(p); }
 114 
 115 #if INCLUDE_SERIALGC
 116 
 117 // Note similarity to FastScanClosure; the difference is that
 118 // the barrier set is taken care of outside this closure.


  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #ifndef SHARE_GC_SHARED_GENOOPCLOSURES_INLINE_HPP
  26 #define SHARE_GC_SHARED_GENOOPCLOSURES_INLINE_HPP
  27 
  28 #include "gc/shared/cardTableRS.hpp"
  29 #include "gc/shared/genCollectedHeap.hpp"
  30 #include "gc/shared/genOopClosures.hpp"
  31 #include "gc/shared/generation.hpp"
  32 #include "gc/shared/space.hpp"
  33 #include "oops/access.inline.hpp"
  34 #include "oops/compressedOops.inline.hpp"
  35 #include "oops/oop.inline.hpp"
  36 #if INCLUDE_SERIALGC
  37 #include "gc/serial/defNewGeneration.inline.hpp"
  38 #endif
  39 
  40 #if INCLUDE_SERIALGC
  41 
  42 template <typename Derived>
  43 inline FastScanClosure<Derived>::FastScanClosure(DefNewGeneration* g) :
  44     BasicOopIterateClosure(g->ref_processor()),
  45     _young_gen(g),
  46     _young_gen_end(g->reserved().end()) {}
  47 
  48 template <typename Derived>
  49 template <typename T>
  50 inline void FastScanClosure<Derived>::do_oop_work(T* p) {
  51   T heap_oop = RawAccess<>::oop_load(p);
  52   // Should we copy the obj?
  53   if (!CompressedOops::is_null(heap_oop)) {
  54     oop obj = CompressedOops::decode_not_null(heap_oop);
  55     if (cast_from_oop<HeapWord*>(obj) < _young_gen_end) {
  56       assert(!_young_gen->to()->is_in_reserved(obj), "Scanning field twice?");
  57       oop new_obj = obj->is_forwarded() ? obj->forwardee()
  58                                         : _young_gen->copy_to_survivor_space(obj);
  59       RawAccess<IS_NOT_NULL>::oop_store(p, new_obj);
  60 
  61       static_cast<Derived*>(this)->barrier(p);
  62     }




  63   }
  64 }
  65 
  66 template <typename Derived>
  67 inline void FastScanClosure<Derived>::do_oop(oop* p)       { do_oop_work(p); }
  68 template <typename Derived>
  69 inline void FastScanClosure<Derived>::do_oop(narrowOop* p) { do_oop_work(p); }
  70 
  71 inline DefNewYoungerGenClosure::DefNewYoungerGenClosure(DefNewGeneration* young_gen, Generation* old_gen) :
  72     FastScanClosure<DefNewYoungerGenClosure>(young_gen),
  73     _old_gen(old_gen),
  74     _old_gen_start(old_gen->reserved().start()),
  75     _rs(GenCollectedHeap::heap()->rem_set()) {}
  76 
  77 template <typename T>
  78 void DefNewYoungerGenClosure::barrier(T* p) {
  79   assert(_old_gen->is_in_reserved(p), "expected ref in generation");
  80   T heap_oop = RawAccess<>::oop_load(p);
  81   assert(!CompressedOops::is_null(heap_oop), "expected non-null oop");
  82   oop obj = CompressedOops::decode_not_null(heap_oop);
  83   // If p points to a younger generation, mark the card.
  84   if (cast_from_oop<HeapWord*>(obj) < _old_gen_start) {
  85     _rs->inline_write_ref_field_gc(p, obj);
  86   }
  87 }
  88 
  89 inline DefNewScanClosure::DefNewScanClosure(DefNewGeneration* g) :
  90     FastScanClosure<DefNewScanClosure>(g), _scanned_cld(NULL) {}
  91 
  92 template <class T>
  93 void DefNewScanClosure::barrier(T* p) {
  94   if (_scanned_cld != NULL && !_scanned_cld->has_modified_oops()) {
  95     _scanned_cld->record_modified_oops();
  96   }
  97 }

























  98 
  99 #endif // INCLUDE_SERIALGC
 100 
 101 template <class T> void FilteringClosure::do_oop_work(T* p) {
 102   T heap_oop = RawAccess<>::oop_load(p);
 103   if (!CompressedOops::is_null(heap_oop)) {
 104     oop obj = CompressedOops::decode_not_null(heap_oop);
 105     if (cast_from_oop<HeapWord*>(obj) < _boundary) {
 106       _cl->do_oop(p);
 107     }
 108   }
 109 }
 110 
 111 inline void FilteringClosure::do_oop(oop* p)       { FilteringClosure::do_oop_work(p); }
 112 inline void FilteringClosure::do_oop(narrowOop* p) { FilteringClosure::do_oop_work(p); }
 113 
 114 #if INCLUDE_SERIALGC
 115 
 116 // Note similarity to FastScanClosure; the difference is that
 117 // the barrier set is taken care of outside this closure.
< prev index next >