src/share/vm/opto/library_call.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/share/vm/opto

src/share/vm/opto/library_call.cpp

Print this page
rev 5098 : 8022675: Redundant class init check
Reviewed-by: duke


 196   bool inline_string_equals();
 197   Node* round_double_node(Node* n);
 198   bool runtime_math(const TypeFunc* call_type, address funcAddr, const char* funcName);
 199   bool inline_math_native(vmIntrinsics::ID id);
 200   bool inline_trig(vmIntrinsics::ID id);
 201   bool inline_math(vmIntrinsics::ID id);
 202   bool inline_exp();
 203   bool inline_pow();
 204   void finish_pow_exp(Node* result, Node* x, Node* y, const TypeFunc* call_type, address funcAddr, const char* funcName);
 205   bool inline_min_max(vmIntrinsics::ID id);
 206   Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
 207   // This returns Type::AnyPtr, RawPtr, or OopPtr.
 208   int classify_unsafe_addr(Node* &base, Node* &offset);
 209   Node* make_unsafe_address(Node* base, Node* offset);
 210   // Helper for inline_unsafe_access.
 211   // Generates the guards that check whether the result of
 212   // Unsafe.getObject should be recorded in an SATB log buffer.
 213   void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar);
 214   bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile);
 215   bool inline_unsafe_prefetch(bool is_native_ptr, bool is_store, bool is_static);

 216   bool inline_unsafe_allocate();
 217   bool inline_unsafe_copyMemory();
 218   bool inline_native_currentThread();
 219 #ifdef TRACE_HAVE_INTRINSICS
 220   bool inline_native_classID();
 221   bool inline_native_threadID();
 222 #endif
 223   bool inline_native_time_funcs(address method, const char* funcName);
 224   bool inline_native_isInterrupted();
 225   bool inline_native_Class_query(vmIntrinsics::ID id);
 226   bool inline_native_subtype_check();
 227 
 228   bool inline_native_newArray();
 229   bool inline_native_getLength();
 230   bool inline_array_copyOf(bool is_copyOfRange);
 231   bool inline_array_equals();
 232   void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark);
 233   bool inline_native_clone(bool is_virtual);
 234   bool inline_native_Reflection_getCallerClass();
 235   // Helper function for inlining native object hash method


2875 bool LibraryCallKit::inline_unsafe_fence(vmIntrinsics::ID id) {
2876   // Regardless of form, don't allow previous ld/st to move down,
2877   // then issue acquire, release, or volatile mem_bar.
2878   insert_mem_bar(Op_MemBarCPUOrder);
2879   switch(id) {
2880     case vmIntrinsics::_loadFence:
2881       insert_mem_bar(Op_MemBarAcquire);
2882       return true;
2883     case vmIntrinsics::_storeFence:
2884       insert_mem_bar(Op_MemBarRelease);
2885       return true;
2886     case vmIntrinsics::_fullFence:
2887       insert_mem_bar(Op_MemBarVolatile);
2888       return true;
2889     default:
2890       fatal_unexpected_iid(id);
2891       return false;
2892   }
2893 }
2894 













2895 //----------------------------inline_unsafe_allocate---------------------------
2896 // public native Object sun.mics.Unsafe.allocateInstance(Class<?> cls);
2897 bool LibraryCallKit::inline_unsafe_allocate() {
2898   if (callee()->is_static())  return false;  // caller must have the capability!
2899 
2900   null_check_receiver();  // null-check, then ignore
2901   Node* cls = null_check(argument(1));
2902   if (stopped())  return true;
2903 

2904   Node* kls = load_klass_from_mirror(cls, false, NULL, 0);
2905   kls = null_check(kls);
2906   if (stopped())  return true;  // argument was like int.class
2907 


2908   // Note:  The argument might still be an illegal value like
2909   // Serializable.class or Object[].class.   The runtime will handle it.
2910   // But we must make an explicit check for initialization.
2911   Node* insp = basic_plus_adr(kls, in_bytes(InstanceKlass::init_state_offset()));
2912   // Use T_BOOLEAN for InstanceKlass::_init_state so the compiler
2913   // can generate code to load it as unsigned byte.
2914   Node* inst = make_load(NULL, insp, TypeInt::UBYTE, T_BOOLEAN);
2915   Node* bits = intcon(InstanceKlass::fully_initialized);
2916   Node* test = _gvn.transform(new (C) SubINode(inst, bits));
2917   // The 'test' is non-zero if we need to take a slow path.

2918 
2919   Node* obj = new_instance(kls, test);
2920   set_result(obj);
2921   return true;
2922 }
2923 
2924 #ifdef TRACE_HAVE_INTRINSICS
2925 /*
2926  * oop -> myklass
2927  * myklass->trace_id |= USED
2928  * return myklass->trace_id & ~0x3
2929  */
2930 bool LibraryCallKit::inline_native_classID() {
2931   null_check_receiver();  // null-check, then ignore
2932   Node* cls = null_check(argument(1), T_OBJECT);
2933   Node* kls = load_klass_from_mirror(cls, false, NULL, 0);
2934   kls = null_check(kls, T_OBJECT);
2935   ByteSize offset = TRACE_ID_OFFSET;
2936   Node* insp = basic_plus_adr(kls, in_bytes(offset));
2937   Node* tvalue = make_load(NULL, insp, TypeLong::LONG, T_LONG);




 196   bool inline_string_equals();
 197   Node* round_double_node(Node* n);
 198   bool runtime_math(const TypeFunc* call_type, address funcAddr, const char* funcName);
 199   bool inline_math_native(vmIntrinsics::ID id);
 200   bool inline_trig(vmIntrinsics::ID id);
 201   bool inline_math(vmIntrinsics::ID id);
 202   bool inline_exp();
 203   bool inline_pow();
 204   void finish_pow_exp(Node* result, Node* x, Node* y, const TypeFunc* call_type, address funcAddr, const char* funcName);
 205   bool inline_min_max(vmIntrinsics::ID id);
 206   Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
 207   // This returns Type::AnyPtr, RawPtr, or OopPtr.
 208   int classify_unsafe_addr(Node* &base, Node* &offset);
 209   Node* make_unsafe_address(Node* base, Node* offset);
 210   // Helper for inline_unsafe_access.
 211   // Generates the guards that check whether the result of
 212   // Unsafe.getObject should be recorded in an SATB log buffer.
 213   void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar);
 214   bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile);
 215   bool inline_unsafe_prefetch(bool is_native_ptr, bool is_store, bool is_static);
 216   static bool klass_needs_init_guard(Node* kls);
 217   bool inline_unsafe_allocate();
 218   bool inline_unsafe_copyMemory();
 219   bool inline_native_currentThread();
 220 #ifdef TRACE_HAVE_INTRINSICS
 221   bool inline_native_classID();
 222   bool inline_native_threadID();
 223 #endif
 224   bool inline_native_time_funcs(address method, const char* funcName);
 225   bool inline_native_isInterrupted();
 226   bool inline_native_Class_query(vmIntrinsics::ID id);
 227   bool inline_native_subtype_check();
 228 
 229   bool inline_native_newArray();
 230   bool inline_native_getLength();
 231   bool inline_array_copyOf(bool is_copyOfRange);
 232   bool inline_array_equals();
 233   void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark);
 234   bool inline_native_clone(bool is_virtual);
 235   bool inline_native_Reflection_getCallerClass();
 236   // Helper function for inlining native object hash method


2876 bool LibraryCallKit::inline_unsafe_fence(vmIntrinsics::ID id) {
2877   // Regardless of form, don't allow previous ld/st to move down,
2878   // then issue acquire, release, or volatile mem_bar.
2879   insert_mem_bar(Op_MemBarCPUOrder);
2880   switch(id) {
2881     case vmIntrinsics::_loadFence:
2882       insert_mem_bar(Op_MemBarAcquire);
2883       return true;
2884     case vmIntrinsics::_storeFence:
2885       insert_mem_bar(Op_MemBarRelease);
2886       return true;
2887     case vmIntrinsics::_fullFence:
2888       insert_mem_bar(Op_MemBarVolatile);
2889       return true;
2890     default:
2891       fatal_unexpected_iid(id);
2892       return false;
2893   }
2894 }
2895 
2896 bool LibraryCallKit::klass_needs_init_guard(Node* kls) {
2897   if (!kls->is_Con()) {
2898     return true;
2899   }
2900   const TypeKlassPtr* klsptr = kls->bottom_type()->isa_klassptr();
2901   if (klsptr == NULL) {
2902     return true;
2903   }
2904   ciInstanceKlass* ik = klsptr->klass()->as_instance_klass();
2905   // don't need a guard for a klass that is already initialized
2906   return !ik->is_initialized();
2907 }
2908 
2909 //----------------------------inline_unsafe_allocate---------------------------
2910 // public native Object sun.misc.Unsafe.allocateInstance(Class<?> cls);
2911 bool LibraryCallKit::inline_unsafe_allocate() {
2912   if (callee()->is_static())  return false;  // caller must have the capability!
2913 
2914   null_check_receiver();  // null-check, then ignore
2915   Node* cls = null_check(argument(1));
2916   if (stopped())  return true;
2917 
2918 
2919   Node* kls = load_klass_from_mirror(cls, false, NULL, 0);
2920   kls = null_check(kls);
2921   if (stopped())  return true;  // argument was like int.class
2922 
2923   Node* test = NULL;
2924   if (LibraryCallKit::klass_needs_init_guard(kls)) {
2925     // Note:  The argument might still be an illegal value like
2926     // Serializable.class or Object[].class.   The runtime will handle it.
2927     // But we must make an explicit check for initialization.
2928     Node* insp = basic_plus_adr(kls, in_bytes(InstanceKlass::init_state_offset()));
2929     // Use T_BOOLEAN for InstanceKlass::_init_state so the compiler
2930     // can generate code to load it as unsigned byte.
2931     Node* inst = make_load(NULL, insp, TypeInt::UBYTE, T_BOOLEAN);
2932     Node* bits = intcon(InstanceKlass::fully_initialized);
2933     test = _gvn.transform(new (C) SubINode(inst, bits));
2934     // The 'test' is non-zero if we need to take a slow path.
2935   }
2936 
2937   Node* obj = new_instance(kls, test);
2938   set_result(obj);
2939   return true;
2940 }
2941 
2942 #ifdef TRACE_HAVE_INTRINSICS
2943 /*
2944  * oop -> myklass
2945  * myklass->trace_id |= USED
2946  * return myklass->trace_id & ~0x3
2947  */
2948 bool LibraryCallKit::inline_native_classID() {
2949   null_check_receiver();  // null-check, then ignore
2950   Node* cls = null_check(argument(1), T_OBJECT);
2951   Node* kls = load_klass_from_mirror(cls, false, NULL, 0);
2952   kls = null_check(kls, T_OBJECT);
2953   ByteSize offset = TRACE_ID_OFFSET;
2954   Node* insp = basic_plus_adr(kls, in_bytes(offset));
2955   Node* tvalue = make_load(NULL, insp, TypeLong::LONG, T_LONG);


src/share/vm/opto/library_call.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File