< prev index next >

src/share/vm/oops/instanceKlass.hpp

Print this page




 937   // This bit is initialized in classFileParser.cpp.
 938   // It is false under any of the following conditions:
 939   //  - the class is abstract (including any interface)
 940   //  - the class has a finalizer (if !RegisterFinalizersAtInit)
 941   //  - the class size is larger than FastAllocateSizeLimit
 942   //  - the class is java/lang/Class, which cannot be allocated directly
 943   bool can_be_fastpath_allocated() const {
 944     return !layout_helper_needs_slow_path(layout_helper());
 945   }
 946 
 947   // Java vtable/itable
 948   klassVtable* vtable() const;        // return new klassVtable wrapper
 949   inline Method* method_at_vtable(int index);
 950   klassItable* itable() const;        // return new klassItable wrapper
 951   Method* method_at_itable(Klass* holder, int index, TRAPS);
 952 
 953 #if INCLUDE_JVMTI
 954   void adjust_default_methods(InstanceKlass* holder, bool* trace_name_printed);
 955 #endif // INCLUDE_JVMTI
 956 
 957   // Garbage collection
 958   void oop_follow_contents(oop obj);
 959   int  oop_adjust_pointers(oop obj);
 960 
 961   void clean_implementors_list(BoolObjectClosure* is_alive);
 962   void clean_method_data(BoolObjectClosure* is_alive);
 963   void clean_dependent_nmethods();
 964 
 965   // Explicit metaspace deallocation of fields
 966   // For RedefineClasses and class file parsing errors, we need to deallocate
 967   // instanceKlasses and the metadata they point to.
 968   void deallocate_contents(ClassLoaderData* loader_data);
 969   static void deallocate_methods(ClassLoaderData* loader_data,
 970                                  Array<Method*>* methods);
 971   void static deallocate_interfaces(ClassLoaderData* loader_data,
 972                                     Klass* super_klass,
 973                                     Array<Klass*>* local_interfaces,
 974                                     Array<Klass*>* transitive_interfaces);
 975 
 976   // The constant pool is on stack if any of the methods are executing or
 977   // referenced by handles.
 978   bool on_stack() const { return _constants->on_stack(); }
 979 
 980   // callbacks for actions during class unloading
 981   static void notify_unload_class(InstanceKlass* ik);
 982   static void release_C_heap_structures(InstanceKlass* ik);
 983 
 984   // Parallel Scavenge and Parallel Old
 985   PARALLEL_GC_DECLS
 986 
 987   // Naming
 988   const char* signature_name() const;
 989 
 990   // Iterators
 991   int oop_oop_iterate(oop obj, ExtendedOopClosure* blk) {
 992     return oop_oop_iterate_v(obj, blk);
 993   }
 994 
 995   int oop_oop_iterate_m(oop obj, ExtendedOopClosure* blk, MemRegion mr) {
 996     return oop_oop_iterate_v_m(obj, blk, mr);
 997   }
















































































 998 
 999 #define InstanceKlass_OOP_OOP_ITERATE_DECL(OopClosureType, nv_suffix)      \
1000   int  oop_oop_iterate##nv_suffix(oop obj, OopClosureType* blk);           \
1001   int  oop_oop_iterate##nv_suffix##_m(oop obj, OopClosureType* blk,        \
1002                                       MemRegion mr);
1003 
1004   ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_DECL)
1005   ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceKlass_OOP_OOP_ITERATE_DECL)
1006 
1007 #if INCLUDE_ALL_GCS
1008 #define InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DECL(OopClosureType, nv_suffix) \
1009   int  oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* blk);
1010 
1011   ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DECL)
1012   ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DECL)
1013 #endif // INCLUDE_ALL_GCS
1014 
1015   u2 idnum_allocated_count() const      { return _idnum_allocated_count; }
1016 
1017 public:
1018   void set_in_error_state() {
1019     assert(DumpSharedSpaces, "only call this when dumping archive");
1020     _init_state = initialization_error;
1021   }
1022   bool check_sharing_error_state();
1023 
1024 private:
1025   // initialization state
1026 #ifdef ASSERT
1027   void set_init_state(ClassState state);
1028 #else
1029   void set_init_state(ClassState state) { _init_state = (u1)state; }




 937   // This bit is initialized in classFileParser.cpp.
 938   // It is false under any of the following conditions:
 939   //  - the class is abstract (including any interface)
 940   //  - the class has a finalizer (if !RegisterFinalizersAtInit)
 941   //  - the class size is larger than FastAllocateSizeLimit
 942   //  - the class is java/lang/Class, which cannot be allocated directly
 943   bool can_be_fastpath_allocated() const {
 944     return !layout_helper_needs_slow_path(layout_helper());
 945   }
 946 
 947   // Java vtable/itable
 948   klassVtable* vtable() const;        // return new klassVtable wrapper
 949   inline Method* method_at_vtable(int index);
 950   klassItable* itable() const;        // return new klassItable wrapper
 951   Method* method_at_itable(Klass* holder, int index, TRAPS);
 952 
 953 #if INCLUDE_JVMTI
 954   void adjust_default_methods(InstanceKlass* holder, bool* trace_name_printed);
 955 #endif // INCLUDE_JVMTI
 956 




 957   void clean_implementors_list(BoolObjectClosure* is_alive);
 958   void clean_method_data(BoolObjectClosure* is_alive);
 959   void clean_dependent_nmethods();
 960 
 961   // Explicit metaspace deallocation of fields
 962   // For RedefineClasses and class file parsing errors, we need to deallocate
 963   // instanceKlasses and the metadata they point to.
 964   void deallocate_contents(ClassLoaderData* loader_data);
 965   static void deallocate_methods(ClassLoaderData* loader_data,
 966                                  Array<Method*>* methods);
 967   void static deallocate_interfaces(ClassLoaderData* loader_data,
 968                                     Klass* super_klass,
 969                                     Array<Klass*>* local_interfaces,
 970                                     Array<Klass*>* transitive_interfaces);
 971 
 972   // The constant pool is on stack if any of the methods are executing or
 973   // referenced by handles.
 974   bool on_stack() const { return _constants->on_stack(); }
 975 
 976   // callbacks for actions during class unloading
 977   static void notify_unload_class(InstanceKlass* ik);
 978   static void release_C_heap_structures(InstanceKlass* ik);
 979 



 980   // Naming
 981   const char* signature_name() const;
 982 
 983   // GC specific object visitors
 984   //
 985   // Mark Sweep
 986   void oop_ms_follow_contents(oop obj);
 987   int  oop_ms_adjust_pointers(oop obj);
 988 #if INCLUDE_ALL_GCS
 989   // Parallel Scavenge
 990   void oop_ps_push_contents(  oop obj, PSPromotionManager* pm);
 991   // Parallel Compact
 992   void oop_pc_follow_contents(oop obj, ParCompactionManager* cm);
 993   void oop_pc_update_pointers(oop obj);
 994 #endif
 995 
 996   // Oop fields (and metadata) iterators
 997   //  [nv = true]  Use non-virtual calls to do_oop_nv.
 998   //  [nv = false] Use virtual calls to do_oop.
 999   //
1000   // The InstanceKlass iterators also visits the Object's klass.
1001   
1002   // Forward iteration
1003  public:
1004   // Iterate over all oop fields in the oop maps.
1005   template <bool nv, class OopClosureType>
1006   inline void oop_oop_iterate_oop_maps(oop obj, OopClosureType* closure);
1007 
1008  protected:
1009   // Iterate over all oop fields and metadata.
1010   template <bool nv, class OopClosureType>
1011   inline int oop_oop_iterate(oop obj, OopClosureType* closure);
1012 
1013  private:
1014   // Iterate over all oop fields in the oop maps.
1015   // Specialized for [T = oop] or [T = narrowOop].
1016   template <bool nv, typename T, class OopClosureType>
1017   inline void oop_oop_iterate_oop_maps_specialized(oop obj, OopClosureType* closure);
1018 
1019   // Iterate over all oop fields in one oop map.
1020   template <bool nv, typename T, class OopClosureType>
1021   inline void oop_oop_iterate_oop_map(OopMapBlock* map, oop obj, OopClosureType* closure);
1022 
1023 
1024   // Reverse iteration
1025 #if INCLUDE_ALL_GCS
1026  public:
1027   // Iterate over all oop fields in the oop maps.
1028   template <bool nv, class OopClosureType>
1029   inline void oop_oop_iterate_oop_maps_reverse(oop obj, OopClosureType* closure);
1030 
1031  protected:
1032   // Iterate over all oop fields and metadata.
1033   template <bool nv, class OopClosureType>
1034   inline int oop_oop_iterate_reverse(oop obj, OopClosureType* closure);
1035 
1036  private:
1037   // Iterate over all oop fields in the oop maps.
1038   // Specialized for [T = oop] or [T = narrowOop].
1039   template <bool nv, typename T, class OopClosureType>
1040   inline void oop_oop_iterate_oop_maps_specialized_reverse(oop obj, OopClosureType* closure);
1041 
1042   // Iterate over all oop fields in one oop map.
1043   template <bool nv, typename T, class OopClosureType>
1044   inline void oop_oop_iterate_oop_map_reverse(OopMapBlock* map, oop obj, OopClosureType* closure);
1045 #endif
1046 
1047 
1048   // Bounded range iteration
1049  public:
1050   // Iterate over all oop fields in the oop maps.
1051   template <bool nv, class OopClosureType>
1052   inline void oop_oop_iterate_oop_maps_bounded(oop obj, OopClosureType* closure, MemRegion mr);
1053 
1054  protected:
1055   // Iterate over all oop fields and metadata.
1056   template <bool nv, class OopClosureType>
1057   inline int oop_oop_iterate_bounded(oop obj, OopClosureType* closure, MemRegion mr);
1058 
1059  private:
1060   // Iterate over all oop fields in the oop maps.
1061   // Specialized for [T = oop] or [T = narrowOop].
1062   template <bool nv, typename T, class OopClosureType>
1063   inline void oop_oop_iterate_oop_maps_specialized_bounded(oop obj, OopClosureType* closure, MemRegion mr);
1064 
1065   // Iterate over all oop fields in one oop map.
1066   template <bool nv, typename T, class OopClosureType>
1067   inline void oop_oop_iterate_oop_map_bounded(OopMapBlock* map, oop obj, OopClosureType* closure, MemRegion mr);
1068 
1069 
1070  public:
1071 
1072 #define InstanceKlass_OOP_OOP_ITERATE_DECL(OopClosureType, nv_suffix)                   \
1073   int  oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure);                    \
1074   int  oop_oop_iterate##nv_suffix##_m(oop obj, OopClosureType* closure, MemRegion mr);

1075 
1076   ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_DECL)
1077   ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceKlass_OOP_OOP_ITERATE_DECL)
1078 
1079 #if INCLUDE_ALL_GCS
1080 #define InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DECL(OopClosureType, nv_suffix)  \
1081   int  oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure);
1082 
1083   ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DECL)
1084   ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DECL)
1085 #endif // INCLUDE_ALL_GCS
1086 
1087   u2 idnum_allocated_count() const      { return _idnum_allocated_count; }
1088 
1089 public:
1090   void set_in_error_state() {
1091     assert(DumpSharedSpaces, "only call this when dumping archive");
1092     _init_state = initialization_error;
1093   }
1094   bool check_sharing_error_state();
1095 
1096 private:
1097   // initialization state
1098 #ifdef ASSERT
1099   void set_init_state(ClassState state);
1100 #else
1101   void set_init_state(ClassState state) { _init_state = (u1)state; }


< prev index next >