< prev index next >

src/hotspot/share/gc/cms/cmsOopClosures.inline.hpp

Print this page




  23  */
  24 
  25 #ifndef SHARE_VM_GC_CMS_CMSOOPCLOSURES_INLINE_HPP
  26 #define SHARE_VM_GC_CMS_CMSOOPCLOSURES_INLINE_HPP
  27 
  28 #include "gc/cms/cmsOopClosures.hpp"
  29 #include "gc/cms/concurrentMarkSweepGeneration.hpp"
  30 #include "gc/shared/taskqueue.inline.hpp"
  31 #include "oops/oop.inline.hpp"
  32 
  33 // MetadataAwareOopClosure and MetadataAwareOopsInGenClosure are duplicated,
  34 // until we get rid of OopsInGenClosure.
  35 
  36 inline void MetadataAwareOopsInGenClosure::do_klass_nv(Klass* k) {
  37   ClassLoaderData* cld = k->class_loader_data();
  38   do_cld_nv(cld);
  39 }
  40 inline void MetadataAwareOopsInGenClosure::do_klass(Klass* k) { do_klass_nv(k); }
  41 
  42 inline void MetadataAwareOopsInGenClosure::do_cld_nv(ClassLoaderData* cld) {
  43   assert(_klass_closure._oop_closure == this, "Must be");
  44 
  45   bool claim = true;  // Must claim the class loader data before processing.
  46   cld->oops_do(_klass_closure._oop_closure, &_klass_closure, claim);
  47 }
  48 
  49 // Decode the oop and call do_oop on it.
  50 #define DO_OOP_WORK_IMPL(cls)                                 \
  51   template <class T> void cls::do_oop_work(T* p) {            \
  52     T heap_oop = oopDesc::load_heap_oop(p);                   \
  53     if (!oopDesc::is_null(heap_oop)) {                        \
  54       oop obj = oopDesc::decode_heap_oop_not_null(heap_oop);  \
  55       do_oop(obj);                                            \
  56     }                                                         \
  57   }
  58 
  59 #define DO_OOP_WORK_NV_IMPL(cls)                              \
  60   DO_OOP_WORK_IMPL(cls)                                       \
  61   void cls::do_oop_nv(oop* p)       { cls::do_oop_work(p); }  \
  62   void cls::do_oop_nv(narrowOop* p) { cls::do_oop_work(p); }
  63 
  64 DO_OOP_WORK_IMPL(MarkRefsIntoClosure)
  65 DO_OOP_WORK_IMPL(ParMarkRefsIntoClosure)
  66 DO_OOP_WORK_IMPL(MarkRefsIntoVerifyClosure)




  23  */
  24 
  25 #ifndef SHARE_VM_GC_CMS_CMSOOPCLOSURES_INLINE_HPP
  26 #define SHARE_VM_GC_CMS_CMSOOPCLOSURES_INLINE_HPP
  27 
  28 #include "gc/cms/cmsOopClosures.hpp"
  29 #include "gc/cms/concurrentMarkSweepGeneration.hpp"
  30 #include "gc/shared/taskqueue.inline.hpp"
  31 #include "oops/oop.inline.hpp"
  32 
  33 // MetadataAwareOopClosure and MetadataAwareOopsInGenClosure are duplicated,
  34 // until we get rid of OopsInGenClosure.
  35 
  36 inline void MetadataAwareOopsInGenClosure::do_klass_nv(Klass* k) {
  37   ClassLoaderData* cld = k->class_loader_data();
  38   do_cld_nv(cld);
  39 }
  40 inline void MetadataAwareOopsInGenClosure::do_klass(Klass* k) { do_klass_nv(k); }
  41 
  42 inline void MetadataAwareOopsInGenClosure::do_cld_nv(ClassLoaderData* cld) {


  43   bool claim = true;  // Must claim the class loader data before processing.
  44   cld->oops_do(this, claim);
  45 }
  46 
  47 // Decode the oop and call do_oop on it.
  48 #define DO_OOP_WORK_IMPL(cls)                                 \
  49   template <class T> void cls::do_oop_work(T* p) {            \
  50     T heap_oop = oopDesc::load_heap_oop(p);                   \
  51     if (!oopDesc::is_null(heap_oop)) {                        \
  52       oop obj = oopDesc::decode_heap_oop_not_null(heap_oop);  \
  53       do_oop(obj);                                            \
  54     }                                                         \
  55   }
  56 
  57 #define DO_OOP_WORK_NV_IMPL(cls)                              \
  58   DO_OOP_WORK_IMPL(cls)                                       \
  59   void cls::do_oop_nv(oop* p)       { cls::do_oop_work(p); }  \
  60   void cls::do_oop_nv(narrowOop* p) { cls::do_oop_work(p); }
  61 
  62 DO_OOP_WORK_IMPL(MarkRefsIntoClosure)
  63 DO_OOP_WORK_IMPL(ParMarkRefsIntoClosure)
  64 DO_OOP_WORK_IMPL(MarkRefsIntoVerifyClosure)


< prev index next >