< prev index next >

src/hotspot/share/opto/library_call.cpp

Print this page




 247   // This returns Type::AnyPtr, RawPtr, or OopPtr.
 248   int classify_unsafe_addr(Node* &base, Node* &offset, BasicType type);
 249   Node* make_unsafe_address(Node*& base, Node* offset, DecoratorSet decorators, BasicType type = T_ILLEGAL, bool can_cast = false);
 250 
 251   typedef enum { Relaxed, Opaque, Volatile, Acquire, Release } AccessKind;
 252   DecoratorSet mo_decorator_for_access_kind(AccessKind kind);
 253   bool inline_unsafe_access(bool is_store, BasicType type, AccessKind kind, bool is_unaligned);
 254   static bool klass_needs_init_guard(Node* kls);
 255   bool inline_unsafe_allocate();
 256   bool inline_unsafe_newArray(bool uninitialized);
 257   bool inline_unsafe_writeback0();
 258   bool inline_unsafe_writebackSync0(bool is_pre);
 259   bool inline_unsafe_copyMemory();
 260   bool inline_native_currentThread();
 261 
 262   bool inline_native_time_funcs(address method, const char* funcName);
 263 #ifdef JFR_HAVE_INTRINSICS
 264   bool inline_native_classID();
 265   bool inline_native_getEventWriter();
 266 #endif
 267   bool inline_native_isInterrupted();
 268   bool inline_native_Class_query(vmIntrinsics::ID id);
 269   bool inline_native_subtype_check();
 270   bool inline_native_getLength();
 271   bool inline_array_copyOf(bool is_copyOfRange);
 272   bool inline_array_equals(StrIntrinsicNode::ArgEnc ae);
 273   bool inline_preconditions_checkIndex();
 274   void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array);
 275   bool inline_native_clone(bool is_virtual);
 276   bool inline_native_Reflection_getCallerClass();
 277   // Helper function for inlining native object hash method
 278   bool inline_native_hashcode(bool is_virtual, bool is_static);
 279   bool inline_native_getClass();
 280 
 281   // Helper functions for inlining arraycopy
 282   bool inline_arraycopy();
 283   AllocateArrayNode* tightly_coupled_allocation(Node* ptr,
 284                                                 RegionNode* slow_region);
 285   JVMState* arraycopy_restore_alloc_state(AllocateArrayNode* alloc, int& saved_reexecute_sp);
 286   void arraycopy_move_allocation_here(AllocateArrayNode* alloc, Node* dest, JVMState* saved_jvms, int saved_reexecute_sp,
 287                                       uint new_idx);


 735   case vmIntrinsics::_compareAndExchangeLongRelease:       return inline_unsafe_load_store(T_LONG,   LS_cmp_exchange,  Release);
 736 
 737   case vmIntrinsics::_getAndAddByte:                    return inline_unsafe_load_store(T_BYTE,   LS_get_add,       Volatile);
 738   case vmIntrinsics::_getAndAddShort:                   return inline_unsafe_load_store(T_SHORT,  LS_get_add,       Volatile);
 739   case vmIntrinsics::_getAndAddInt:                     return inline_unsafe_load_store(T_INT,    LS_get_add,       Volatile);
 740   case vmIntrinsics::_getAndAddLong:                    return inline_unsafe_load_store(T_LONG,   LS_get_add,       Volatile);
 741 
 742   case vmIntrinsics::_getAndSetByte:                    return inline_unsafe_load_store(T_BYTE,   LS_get_set,       Volatile);
 743   case vmIntrinsics::_getAndSetShort:                   return inline_unsafe_load_store(T_SHORT,  LS_get_set,       Volatile);
 744   case vmIntrinsics::_getAndSetInt:                     return inline_unsafe_load_store(T_INT,    LS_get_set,       Volatile);
 745   case vmIntrinsics::_getAndSetLong:                    return inline_unsafe_load_store(T_LONG,   LS_get_set,       Volatile);
 746   case vmIntrinsics::_getAndSetReference:               return inline_unsafe_load_store(T_OBJECT, LS_get_set,       Volatile);
 747 
 748   case vmIntrinsics::_loadFence:
 749   case vmIntrinsics::_storeFence:
 750   case vmIntrinsics::_fullFence:                return inline_unsafe_fence(intrinsic_id());
 751 
 752   case vmIntrinsics::_onSpinWait:               return inline_onspinwait();
 753 
 754   case vmIntrinsics::_currentThread:            return inline_native_currentThread();
 755   case vmIntrinsics::_isInterrupted:            return inline_native_isInterrupted();
 756 
 757 #ifdef JFR_HAVE_INTRINSICS
 758   case vmIntrinsics::_counterTime:              return inline_native_time_funcs(CAST_FROM_FN_PTR(address, JFR_TIME_FUNCTION), "counterTime");
 759   case vmIntrinsics::_getClassId:               return inline_native_classID();
 760   case vmIntrinsics::_getEventWriter:           return inline_native_getEventWriter();
 761 #endif
 762   case vmIntrinsics::_currentTimeMillis:        return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeMillis), "currentTimeMillis");
 763   case vmIntrinsics::_nanoTime:                 return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeNanos), "nanoTime");
 764   case vmIntrinsics::_writeback0:               return inline_unsafe_writeback0();
 765   case vmIntrinsics::_writebackPreSync0:        return inline_unsafe_writebackSync0(true);
 766   case vmIntrinsics::_writebackPostSync0:       return inline_unsafe_writebackSync0(false);
 767   case vmIntrinsics::_allocateInstance:         return inline_unsafe_allocate();
 768   case vmIntrinsics::_copyMemory:               return inline_unsafe_copyMemory();
 769   case vmIntrinsics::_getLength:                return inline_native_getLength();
 770   case vmIntrinsics::_copyOf:                   return inline_array_copyOf(false);
 771   case vmIntrinsics::_copyOfRange:              return inline_array_copyOf(true);
 772   case vmIntrinsics::_equalsB:                  return inline_array_equals(StrIntrinsicNode::LL);
 773   case vmIntrinsics::_equalsC:                  return inline_array_equals(StrIntrinsicNode::UU);
 774   case vmIntrinsics::_Preconditions_checkIndex: return inline_preconditions_checkIndex();
 775   case vmIntrinsics::_clone:                    return inline_native_clone(intrinsic()->is_virtual());


3021   result_val->init_req(_null_path, null());
3022 
3023   Node* jobj_is_not_null = _gvn.transform(new IfFalseNode(iff_jobj_null));
3024   set_control(jobj_is_not_null);
3025   Node* res = access_load(jobj, TypeInstPtr::NOTNULL, T_OBJECT,
3026                           IN_NATIVE | C2_CONTROL_DEPENDENT_LOAD);
3027   result_rgn->init_req(_normal_path, control());
3028   result_val->init_req(_normal_path, res);
3029 
3030   set_result(result_rgn, result_val);
3031 
3032   return true;
3033 }
3034 
3035 #endif // JFR_HAVE_INTRINSICS
3036 
3037 //------------------------inline_native_currentThread------------------
3038 bool LibraryCallKit::inline_native_currentThread() {
3039   Node* junk = NULL;
3040   set_result(generate_current_thread(junk));
3041   return true;
3042 }
3043 
3044 //------------------------inline_native_isInterrupted------------------
3045 // private native boolean java.lang.Thread.isInterrupted(boolean ClearInterrupted);
3046 bool LibraryCallKit::inline_native_isInterrupted() {
3047   // Add a fast path to t.isInterrupted(clear_int):
3048   //   (t == Thread.current() &&
3049   //    (!TLS._osthread._interrupted || WINDOWS_ONLY(false) NOT_WINDOWS(!clear_int)))
3050   //   ? TLS._osthread._interrupted : /*slow path:*/ t.isInterrupted(clear_int)
3051   // So, in the common case that the interrupt bit is false,
3052   // we avoid making a call into the VM.  Even if the interrupt bit
3053   // is true, if the clear_int argument is false, we avoid the VM call.
3054   // However, if the receiver is not currentThread, we must call the VM,
3055   // because there must be some locking done around the operation.
3056 
3057   // We only go to the fast case code if we pass two guards.
3058   // Paths which do not pass are accumulated in the slow_region.
3059 
3060   enum {
3061     no_int_result_path   = 1, // t == Thread.current() && !TLS._osthread._interrupted
3062     no_clear_result_path = 2, // t == Thread.current() &&  TLS._osthread._interrupted && !clear_int
3063     slow_result_path     = 3, // slow path: t.isInterrupted(clear_int)
3064     PATH_LIMIT
3065   };
3066 
3067   // Ensure that it's not possible to move the load of TLS._osthread._interrupted flag
3068   // out of the function.
3069   insert_mem_bar(Op_MemBarCPUOrder);
3070 
3071   RegionNode* result_rgn = new RegionNode(PATH_LIMIT);
3072   PhiNode*    result_val = new PhiNode(result_rgn, TypeInt::BOOL);
3073 
3074   RegionNode* slow_region = new RegionNode(1);
3075   record_for_igvn(slow_region);
3076 
3077   // (a) Receiving thread must be the current thread.
3078   Node* rec_thr = argument(0);
3079   Node* tls_ptr = NULL;
3080   Node* cur_thr = generate_current_thread(tls_ptr);
3081 
3082   // Resolve oops to stable for CmpP below.
3083   cur_thr = access_resolve(cur_thr, 0);
3084   rec_thr = access_resolve(rec_thr, 0);
3085 
3086   Node* cmp_thr = _gvn.transform(new CmpPNode(cur_thr, rec_thr));
3087   Node* bol_thr = _gvn.transform(new BoolNode(cmp_thr, BoolTest::ne));
3088 
3089   generate_slow_guard(bol_thr, slow_region);
3090 
3091   // (b) Interrupt bit on TLS must be false.
3092   Node* p = basic_plus_adr(top()/*!oop*/, tls_ptr, in_bytes(JavaThread::osthread_offset()));
3093   Node* osthread = make_load(NULL, p, TypeRawPtr::NOTNULL, T_ADDRESS, MemNode::unordered);
3094   p = basic_plus_adr(top()/*!oop*/, osthread, in_bytes(OSThread::interrupted_offset()));
3095 
3096   // Set the control input on the field _interrupted read to prevent it floating up.
3097   Node* int_bit = make_load(control(), p, TypeInt::BOOL, T_INT, MemNode::unordered);
3098   Node* cmp_bit = _gvn.transform(new CmpINode(int_bit, intcon(0)));
3099   Node* bol_bit = _gvn.transform(new BoolNode(cmp_bit, BoolTest::ne));
3100 
3101   IfNode* iff_bit = create_and_map_if(control(), bol_bit, PROB_UNLIKELY_MAG(3), COUNT_UNKNOWN);
3102 
3103   // First fast path:  if (!TLS._interrupted) return false;
3104   Node* false_bit = _gvn.transform(new IfFalseNode(iff_bit));
3105   result_rgn->init_req(no_int_result_path, false_bit);
3106   result_val->init_req(no_int_result_path, intcon(0));
3107 
3108   // drop through to next case
3109   set_control( _gvn.transform(new IfTrueNode(iff_bit)));
3110 
3111 #ifndef _WINDOWS
3112   // (c) Or, if interrupt bit is set and clear_int is false, use 2nd fast path.
3113   Node* clr_arg = argument(1);
3114   Node* cmp_arg = _gvn.transform(new CmpINode(clr_arg, intcon(0)));
3115   Node* bol_arg = _gvn.transform(new BoolNode(cmp_arg, BoolTest::ne));
3116   IfNode* iff_arg = create_and_map_if(control(), bol_arg, PROB_FAIR, COUNT_UNKNOWN);
3117 
3118   // Second fast path:  ... else if (!clear_int) return true;
3119   Node* false_arg = _gvn.transform(new IfFalseNode(iff_arg));
3120   result_rgn->init_req(no_clear_result_path, false_arg);
3121   result_val->init_req(no_clear_result_path, intcon(1));
3122 
3123   // drop through to next case
3124   set_control( _gvn.transform(new IfTrueNode(iff_arg)));
3125 #else
3126   // To return true on Windows you must read the _interrupted field
3127   // and check the event state i.e. take the slow path.
3128 #endif // _WINDOWS
3129 
3130   // (d) Otherwise, go to the slow path.
3131   slow_region->add_req(control());
3132   set_control( _gvn.transform(slow_region));
3133 
3134   if (stopped()) {
3135     // There is no slow path.
3136     result_rgn->init_req(slow_result_path, top());
3137     result_val->init_req(slow_result_path, top());
3138   } else {
3139     // non-virtual because it is a private non-static
3140     CallJavaNode* slow_call = generate_method_call(vmIntrinsics::_isInterrupted);
3141 
3142     Node* slow_val = set_results_for_java_call(slow_call);
3143     // this->control() comes from set_results_for_java_call
3144 
3145     Node* fast_io  = slow_call->in(TypeFunc::I_O);
3146     Node* fast_mem = slow_call->in(TypeFunc::Memory);
3147 
3148     // These two phis are pre-filled with copies of of the fast IO and Memory
3149     PhiNode* result_mem  = PhiNode::make(result_rgn, fast_mem, Type::MEMORY, TypePtr::BOTTOM);
3150     PhiNode* result_io   = PhiNode::make(result_rgn, fast_io,  Type::ABIO);
3151 
3152     result_rgn->init_req(slow_result_path, control());
3153     result_io ->init_req(slow_result_path, i_o());
3154     result_mem->init_req(slow_result_path, reset_memory());
3155     result_val->init_req(slow_result_path, slow_val);
3156 
3157     set_all_memory(_gvn.transform(result_mem));
3158     set_i_o(       _gvn.transform(result_io));
3159   }
3160 
3161   C->set_has_split_ifs(true); // Has chance for split-if optimization
3162   set_result(result_rgn, result_val);
3163   return true;
3164 }
3165 
3166 //---------------------------load_mirror_from_klass----------------------------
3167 // Given a klass oop, load its java mirror (a java.lang.Class oop).
3168 Node* LibraryCallKit::load_mirror_from_klass(Node* klass) {
3169   Node* p = basic_plus_adr(klass, in_bytes(Klass::java_mirror_offset()));
3170   Node* load = make_load(NULL, p, TypeRawPtr::NOTNULL, T_ADDRESS, MemNode::unordered);
3171   // mirror = ((OopHandle)mirror)->resolve();
3172   return access_load(load, TypeInstPtr::MIRROR, T_OBJECT, IN_NATIVE);
3173 }
3174 
3175 //-----------------------load_klass_from_mirror_common-------------------------
3176 // Given a java mirror (a java.lang.Class oop), load its corresponding klass oop.
3177 // Test the klass oop for null (signifying a primitive Class like Integer.TYPE),
3178 // and branch to the given path on the region.
3179 // If never_see_null, take an uncommon trap on null, so we can optimistically
3180 // compile for the non-null case.
3181 // If the region is NULL, force never_see_null = true.
3182 Node* LibraryCallKit::load_klass_from_mirror_common(Node* mirror,




 247   // This returns Type::AnyPtr, RawPtr, or OopPtr.
 248   int classify_unsafe_addr(Node* &base, Node* &offset, BasicType type);
 249   Node* make_unsafe_address(Node*& base, Node* offset, DecoratorSet decorators, BasicType type = T_ILLEGAL, bool can_cast = false);
 250 
 251   typedef enum { Relaxed, Opaque, Volatile, Acquire, Release } AccessKind;
 252   DecoratorSet mo_decorator_for_access_kind(AccessKind kind);
 253   bool inline_unsafe_access(bool is_store, BasicType type, AccessKind kind, bool is_unaligned);
 254   static bool klass_needs_init_guard(Node* kls);
 255   bool inline_unsafe_allocate();
 256   bool inline_unsafe_newArray(bool uninitialized);
 257   bool inline_unsafe_writeback0();
 258   bool inline_unsafe_writebackSync0(bool is_pre);
 259   bool inline_unsafe_copyMemory();
 260   bool inline_native_currentThread();
 261 
 262   bool inline_native_time_funcs(address method, const char* funcName);
 263 #ifdef JFR_HAVE_INTRINSICS
 264   bool inline_native_classID();
 265   bool inline_native_getEventWriter();
 266 #endif

 267   bool inline_native_Class_query(vmIntrinsics::ID id);
 268   bool inline_native_subtype_check();
 269   bool inline_native_getLength();
 270   bool inline_array_copyOf(bool is_copyOfRange);
 271   bool inline_array_equals(StrIntrinsicNode::ArgEnc ae);
 272   bool inline_preconditions_checkIndex();
 273   void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array);
 274   bool inline_native_clone(bool is_virtual);
 275   bool inline_native_Reflection_getCallerClass();
 276   // Helper function for inlining native object hash method
 277   bool inline_native_hashcode(bool is_virtual, bool is_static);
 278   bool inline_native_getClass();
 279 
 280   // Helper functions for inlining arraycopy
 281   bool inline_arraycopy();
 282   AllocateArrayNode* tightly_coupled_allocation(Node* ptr,
 283                                                 RegionNode* slow_region);
 284   JVMState* arraycopy_restore_alloc_state(AllocateArrayNode* alloc, int& saved_reexecute_sp);
 285   void arraycopy_move_allocation_here(AllocateArrayNode* alloc, Node* dest, JVMState* saved_jvms, int saved_reexecute_sp,
 286                                       uint new_idx);


 734   case vmIntrinsics::_compareAndExchangeLongRelease:       return inline_unsafe_load_store(T_LONG,   LS_cmp_exchange,  Release);
 735 
 736   case vmIntrinsics::_getAndAddByte:                    return inline_unsafe_load_store(T_BYTE,   LS_get_add,       Volatile);
 737   case vmIntrinsics::_getAndAddShort:                   return inline_unsafe_load_store(T_SHORT,  LS_get_add,       Volatile);
 738   case vmIntrinsics::_getAndAddInt:                     return inline_unsafe_load_store(T_INT,    LS_get_add,       Volatile);
 739   case vmIntrinsics::_getAndAddLong:                    return inline_unsafe_load_store(T_LONG,   LS_get_add,       Volatile);
 740 
 741   case vmIntrinsics::_getAndSetByte:                    return inline_unsafe_load_store(T_BYTE,   LS_get_set,       Volatile);
 742   case vmIntrinsics::_getAndSetShort:                   return inline_unsafe_load_store(T_SHORT,  LS_get_set,       Volatile);
 743   case vmIntrinsics::_getAndSetInt:                     return inline_unsafe_load_store(T_INT,    LS_get_set,       Volatile);
 744   case vmIntrinsics::_getAndSetLong:                    return inline_unsafe_load_store(T_LONG,   LS_get_set,       Volatile);
 745   case vmIntrinsics::_getAndSetReference:               return inline_unsafe_load_store(T_OBJECT, LS_get_set,       Volatile);
 746 
 747   case vmIntrinsics::_loadFence:
 748   case vmIntrinsics::_storeFence:
 749   case vmIntrinsics::_fullFence:                return inline_unsafe_fence(intrinsic_id());
 750 
 751   case vmIntrinsics::_onSpinWait:               return inline_onspinwait();
 752 
 753   case vmIntrinsics::_currentThread:            return inline_native_currentThread();

 754 
 755 #ifdef JFR_HAVE_INTRINSICS
 756   case vmIntrinsics::_counterTime:              return inline_native_time_funcs(CAST_FROM_FN_PTR(address, JFR_TIME_FUNCTION), "counterTime");
 757   case vmIntrinsics::_getClassId:               return inline_native_classID();
 758   case vmIntrinsics::_getEventWriter:           return inline_native_getEventWriter();
 759 #endif
 760   case vmIntrinsics::_currentTimeMillis:        return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeMillis), "currentTimeMillis");
 761   case vmIntrinsics::_nanoTime:                 return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeNanos), "nanoTime");
 762   case vmIntrinsics::_writeback0:               return inline_unsafe_writeback0();
 763   case vmIntrinsics::_writebackPreSync0:        return inline_unsafe_writebackSync0(true);
 764   case vmIntrinsics::_writebackPostSync0:       return inline_unsafe_writebackSync0(false);
 765   case vmIntrinsics::_allocateInstance:         return inline_unsafe_allocate();
 766   case vmIntrinsics::_copyMemory:               return inline_unsafe_copyMemory();
 767   case vmIntrinsics::_getLength:                return inline_native_getLength();
 768   case vmIntrinsics::_copyOf:                   return inline_array_copyOf(false);
 769   case vmIntrinsics::_copyOfRange:              return inline_array_copyOf(true);
 770   case vmIntrinsics::_equalsB:                  return inline_array_equals(StrIntrinsicNode::LL);
 771   case vmIntrinsics::_equalsC:                  return inline_array_equals(StrIntrinsicNode::UU);
 772   case vmIntrinsics::_Preconditions_checkIndex: return inline_preconditions_checkIndex();
 773   case vmIntrinsics::_clone:                    return inline_native_clone(intrinsic()->is_virtual());


3019   result_val->init_req(_null_path, null());
3020 
3021   Node* jobj_is_not_null = _gvn.transform(new IfFalseNode(iff_jobj_null));
3022   set_control(jobj_is_not_null);
3023   Node* res = access_load(jobj, TypeInstPtr::NOTNULL, T_OBJECT,
3024                           IN_NATIVE | C2_CONTROL_DEPENDENT_LOAD);
3025   result_rgn->init_req(_normal_path, control());
3026   result_val->init_req(_normal_path, res);
3027 
3028   set_result(result_rgn, result_val);
3029 
3030   return true;
3031 }
3032 
3033 #endif // JFR_HAVE_INTRINSICS
3034 
3035 //------------------------inline_native_currentThread------------------
3036 bool LibraryCallKit::inline_native_currentThread() {
3037   Node* junk = NULL;
3038   set_result(generate_current_thread(junk));


























































































































3039   return true;
3040 }
3041 
3042 //---------------------------load_mirror_from_klass----------------------------
3043 // Given a klass oop, load its java mirror (a java.lang.Class oop).
3044 Node* LibraryCallKit::load_mirror_from_klass(Node* klass) {
3045   Node* p = basic_plus_adr(klass, in_bytes(Klass::java_mirror_offset()));
3046   Node* load = make_load(NULL, p, TypeRawPtr::NOTNULL, T_ADDRESS, MemNode::unordered);
3047   // mirror = ((OopHandle)mirror)->resolve();
3048   return access_load(load, TypeInstPtr::MIRROR, T_OBJECT, IN_NATIVE);
3049 }
3050 
3051 //-----------------------load_klass_from_mirror_common-------------------------
3052 // Given a java mirror (a java.lang.Class oop), load its corresponding klass oop.
3053 // Test the klass oop for null (signifying a primitive Class like Integer.TYPE),
3054 // and branch to the given path on the region.
3055 // If never_see_null, take an uncommon trap on null, so we can optimistically
3056 // compile for the non-null case.
3057 // If the region is NULL, force never_see_null = true.
3058 Node* LibraryCallKit::load_klass_from_mirror_common(Node* mirror,


< prev index next >