246 // This returns Type::AnyPtr, RawPtr, or OopPtr.
247 int classify_unsafe_addr(Node* &base, Node* &offset, BasicType type);
248 Node* make_unsafe_address(Node*& base, Node* offset, DecoratorSet decorators, BasicType type = T_ILLEGAL, bool can_cast = false);
249
250 typedef enum { Relaxed, Opaque, Volatile, Acquire, Release } AccessKind;
251 DecoratorSet mo_decorator_for_access_kind(AccessKind kind);
252 bool inline_unsafe_access(bool is_store, BasicType type, AccessKind kind, bool is_unaligned);
253 static bool klass_needs_init_guard(Node* kls);
254 bool inline_unsafe_allocate();
255 bool inline_unsafe_newArray(bool uninitialized);
256 bool inline_unsafe_writeback0();
257 bool inline_unsafe_writebackSync0(bool is_pre);
258 bool inline_unsafe_copyMemory();
259 bool inline_native_currentThread();
260
261 bool inline_native_time_funcs(address method, const char* funcName);
262 #ifdef JFR_HAVE_INTRINSICS
263 bool inline_native_classID();
264 bool inline_native_getEventWriter();
265 #endif
266 bool inline_native_isInterrupted();
267 bool inline_native_Class_query(vmIntrinsics::ID id);
268 bool inline_native_subtype_check();
269 bool inline_native_getLength();
270 bool inline_array_copyOf(bool is_copyOfRange);
271 bool inline_array_equals(StrIntrinsicNode::ArgEnc ae);
272 bool inline_preconditions_checkIndex();
273 void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array);
274 bool inline_native_clone(bool is_virtual);
275 bool inline_native_Reflection_getCallerClass();
276 // Helper function for inlining native object hash method
277 bool inline_native_hashcode(bool is_virtual, bool is_static);
278 bool inline_native_getClass();
279
280 // Helper functions for inlining arraycopy
281 bool inline_arraycopy();
282 AllocateArrayNode* tightly_coupled_allocation(Node* ptr,
283 RegionNode* slow_region);
284 JVMState* arraycopy_restore_alloc_state(AllocateArrayNode* alloc, int& saved_reexecute_sp);
285 void arraycopy_move_allocation_here(AllocateArrayNode* alloc, Node* dest, JVMState* saved_jvms, int saved_reexecute_sp,
286 uint new_idx);
734 case vmIntrinsics::_compareAndExchangeLongRelease: return inline_unsafe_load_store(T_LONG, LS_cmp_exchange, Release);
735
736 case vmIntrinsics::_getAndAddByte: return inline_unsafe_load_store(T_BYTE, LS_get_add, Volatile);
737 case vmIntrinsics::_getAndAddShort: return inline_unsafe_load_store(T_SHORT, LS_get_add, Volatile);
738 case vmIntrinsics::_getAndAddInt: return inline_unsafe_load_store(T_INT, LS_get_add, Volatile);
739 case vmIntrinsics::_getAndAddLong: return inline_unsafe_load_store(T_LONG, LS_get_add, Volatile);
740
741 case vmIntrinsics::_getAndSetByte: return inline_unsafe_load_store(T_BYTE, LS_get_set, Volatile);
742 case vmIntrinsics::_getAndSetShort: return inline_unsafe_load_store(T_SHORT, LS_get_set, Volatile);
743 case vmIntrinsics::_getAndSetInt: return inline_unsafe_load_store(T_INT, LS_get_set, Volatile);
744 case vmIntrinsics::_getAndSetLong: return inline_unsafe_load_store(T_LONG, LS_get_set, Volatile);
745 case vmIntrinsics::_getAndSetReference: return inline_unsafe_load_store(T_OBJECT, LS_get_set, Volatile);
746
747 case vmIntrinsics::_loadFence:
748 case vmIntrinsics::_storeFence:
749 case vmIntrinsics::_fullFence: return inline_unsafe_fence(intrinsic_id());
750
751 case vmIntrinsics::_onSpinWait: return inline_onspinwait();
752
753 case vmIntrinsics::_currentThread: return inline_native_currentThread();
754 case vmIntrinsics::_isInterrupted: return inline_native_isInterrupted();
755
756 #ifdef JFR_HAVE_INTRINSICS
757 case vmIntrinsics::_counterTime: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, JFR_TIME_FUNCTION), "counterTime");
758 case vmIntrinsics::_getClassId: return inline_native_classID();
759 case vmIntrinsics::_getEventWriter: return inline_native_getEventWriter();
760 #endif
761 case vmIntrinsics::_currentTimeMillis: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeMillis), "currentTimeMillis");
762 case vmIntrinsics::_nanoTime: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeNanos), "nanoTime");
763 case vmIntrinsics::_writeback0: return inline_unsafe_writeback0();
764 case vmIntrinsics::_writebackPreSync0: return inline_unsafe_writebackSync0(true);
765 case vmIntrinsics::_writebackPostSync0: return inline_unsafe_writebackSync0(false);
766 case vmIntrinsics::_allocateInstance: return inline_unsafe_allocate();
767 case vmIntrinsics::_copyMemory: return inline_unsafe_copyMemory();
768 case vmIntrinsics::_getLength: return inline_native_getLength();
769 case vmIntrinsics::_copyOf: return inline_array_copyOf(false);
770 case vmIntrinsics::_copyOfRange: return inline_array_copyOf(true);
771 case vmIntrinsics::_equalsB: return inline_array_equals(StrIntrinsicNode::LL);
772 case vmIntrinsics::_equalsC: return inline_array_equals(StrIntrinsicNode::UU);
773 case vmIntrinsics::_Preconditions_checkIndex: return inline_preconditions_checkIndex();
774 case vmIntrinsics::_clone: return inline_native_clone(intrinsic()->is_virtual());
3020 result_val->init_req(_null_path, null());
3021
3022 Node* jobj_is_not_null = _gvn.transform(new IfFalseNode(iff_jobj_null));
3023 set_control(jobj_is_not_null);
3024 Node* res = access_load(jobj, TypeInstPtr::NOTNULL, T_OBJECT,
3025 IN_NATIVE | C2_CONTROL_DEPENDENT_LOAD);
3026 result_rgn->init_req(_normal_path, control());
3027 result_val->init_req(_normal_path, res);
3028
3029 set_result(result_rgn, result_val);
3030
3031 return true;
3032 }
3033
3034 #endif // JFR_HAVE_INTRINSICS
3035
3036 //------------------------inline_native_currentThread------------------
3037 bool LibraryCallKit::inline_native_currentThread() {
3038 Node* junk = NULL;
3039 set_result(generate_current_thread(junk));
3040 return true;
3041 }
3042
3043 //------------------------inline_native_isInterrupted------------------
3044 // private native boolean java.lang.Thread.isInterrupted(boolean ClearInterrupted);
3045 bool LibraryCallKit::inline_native_isInterrupted() {
3046 // Add a fast path to t.isInterrupted(clear_int):
3047 // (t == Thread.current() &&
3048 // (!TLS._osthread._interrupted || WINDOWS_ONLY(false) NOT_WINDOWS(!clear_int)))
3049 // ? TLS._osthread._interrupted : /*slow path:*/ t.isInterrupted(clear_int)
3050 // So, in the common case that the interrupt bit is false,
3051 // we avoid making a call into the VM. Even if the interrupt bit
3052 // is true, if the clear_int argument is false, we avoid the VM call.
3053 // However, if the receiver is not currentThread, we must call the VM,
3054 // because there must be some locking done around the operation.
3055
3056 // We only go to the fast case code if we pass two guards.
3057 // Paths which do not pass are accumulated in the slow_region.
3058
3059 enum {
3060 no_int_result_path = 1, // t == Thread.current() && !TLS._osthread._interrupted
3061 no_clear_result_path = 2, // t == Thread.current() && TLS._osthread._interrupted && !clear_int
3062 slow_result_path = 3, // slow path: t.isInterrupted(clear_int)
3063 PATH_LIMIT
3064 };
3065
3066 // Ensure that it's not possible to move the load of TLS._osthread._interrupted flag
3067 // out of the function.
3068 insert_mem_bar(Op_MemBarCPUOrder);
3069
3070 RegionNode* result_rgn = new RegionNode(PATH_LIMIT);
3071 PhiNode* result_val = new PhiNode(result_rgn, TypeInt::BOOL);
3072
3073 RegionNode* slow_region = new RegionNode(1);
3074 record_for_igvn(slow_region);
3075
3076 // (a) Receiving thread must be the current thread.
3077 Node* rec_thr = argument(0);
3078 Node* tls_ptr = NULL;
3079 Node* cur_thr = generate_current_thread(tls_ptr);
3080
3081 // Resolve oops to stable for CmpP below.
3082 cur_thr = access_resolve(cur_thr, 0);
3083 rec_thr = access_resolve(rec_thr, 0);
3084
3085 Node* cmp_thr = _gvn.transform(new CmpPNode(cur_thr, rec_thr));
3086 Node* bol_thr = _gvn.transform(new BoolNode(cmp_thr, BoolTest::ne));
3087
3088 generate_slow_guard(bol_thr, slow_region);
3089
3090 // (b) Interrupt bit on TLS must be false.
3091 Node* p = basic_plus_adr(top()/*!oop*/, tls_ptr, in_bytes(JavaThread::osthread_offset()));
3092 Node* osthread = make_load(NULL, p, TypeRawPtr::NOTNULL, T_ADDRESS, MemNode::unordered);
3093 p = basic_plus_adr(top()/*!oop*/, osthread, in_bytes(OSThread::interrupted_offset()));
3094
3095 // Set the control input on the field _interrupted read to prevent it floating up.
3096 Node* int_bit = make_load(control(), p, TypeInt::BOOL, T_INT, MemNode::unordered);
3097 Node* cmp_bit = _gvn.transform(new CmpINode(int_bit, intcon(0)));
3098 Node* bol_bit = _gvn.transform(new BoolNode(cmp_bit, BoolTest::ne));
3099
3100 IfNode* iff_bit = create_and_map_if(control(), bol_bit, PROB_UNLIKELY_MAG(3), COUNT_UNKNOWN);
3101
3102 // First fast path: if (!TLS._interrupted) return false;
3103 Node* false_bit = _gvn.transform(new IfFalseNode(iff_bit));
3104 result_rgn->init_req(no_int_result_path, false_bit);
3105 result_val->init_req(no_int_result_path, intcon(0));
3106
3107 // drop through to next case
3108 set_control( _gvn.transform(new IfTrueNode(iff_bit)));
3109
3110 #ifndef _WINDOWS
3111 // (c) Or, if interrupt bit is set and clear_int is false, use 2nd fast path.
3112 Node* clr_arg = argument(1);
3113 Node* cmp_arg = _gvn.transform(new CmpINode(clr_arg, intcon(0)));
3114 Node* bol_arg = _gvn.transform(new BoolNode(cmp_arg, BoolTest::ne));
3115 IfNode* iff_arg = create_and_map_if(control(), bol_arg, PROB_FAIR, COUNT_UNKNOWN);
3116
3117 // Second fast path: ... else if (!clear_int) return true;
3118 Node* false_arg = _gvn.transform(new IfFalseNode(iff_arg));
3119 result_rgn->init_req(no_clear_result_path, false_arg);
3120 result_val->init_req(no_clear_result_path, intcon(1));
3121
3122 // drop through to next case
3123 set_control( _gvn.transform(new IfTrueNode(iff_arg)));
3124 #else
3125 // To return true on Windows you must read the _interrupted field
3126 // and check the event state i.e. take the slow path.
3127 #endif // _WINDOWS
3128
3129 // (d) Otherwise, go to the slow path.
3130 slow_region->add_req(control());
3131 set_control( _gvn.transform(slow_region));
3132
3133 if (stopped()) {
3134 // There is no slow path.
3135 result_rgn->init_req(slow_result_path, top());
3136 result_val->init_req(slow_result_path, top());
3137 } else {
3138 // non-virtual because it is a private non-static
3139 CallJavaNode* slow_call = generate_method_call(vmIntrinsics::_isInterrupted);
3140
3141 Node* slow_val = set_results_for_java_call(slow_call);
3142 // this->control() comes from set_results_for_java_call
3143
3144 Node* fast_io = slow_call->in(TypeFunc::I_O);
3145 Node* fast_mem = slow_call->in(TypeFunc::Memory);
3146
3147 // These two phis are pre-filled with copies of of the fast IO and Memory
3148 PhiNode* result_mem = PhiNode::make(result_rgn, fast_mem, Type::MEMORY, TypePtr::BOTTOM);
3149 PhiNode* result_io = PhiNode::make(result_rgn, fast_io, Type::ABIO);
3150
3151 result_rgn->init_req(slow_result_path, control());
3152 result_io ->init_req(slow_result_path, i_o());
3153 result_mem->init_req(slow_result_path, reset_memory());
3154 result_val->init_req(slow_result_path, slow_val);
3155
3156 set_all_memory(_gvn.transform(result_mem));
3157 set_i_o( _gvn.transform(result_io));
3158 }
3159
3160 C->set_has_split_ifs(true); // Has chance for split-if optimization
3161 set_result(result_rgn, result_val);
3162 return true;
3163 }
3164
3165 //---------------------------load_mirror_from_klass----------------------------
3166 // Given a klass oop, load its java mirror (a java.lang.Class oop).
3167 Node* LibraryCallKit::load_mirror_from_klass(Node* klass) {
3168 Node* p = basic_plus_adr(klass, in_bytes(Klass::java_mirror_offset()));
3169 Node* load = make_load(NULL, p, TypeRawPtr::NOTNULL, T_ADDRESS, MemNode::unordered);
3170 // mirror = ((OopHandle)mirror)->resolve();
3171 return access_load(load, TypeInstPtr::MIRROR, T_OBJECT, IN_NATIVE);
3172 }
3173
3174 //-----------------------load_klass_from_mirror_common-------------------------
3175 // Given a java mirror (a java.lang.Class oop), load its corresponding klass oop.
3176 // Test the klass oop for null (signifying a primitive Class like Integer.TYPE),
3177 // and branch to the given path on the region.
3178 // If never_see_null, take an uncommon trap on null, so we can optimistically
3179 // compile for the non-null case.
3180 // If the region is NULL, force never_see_null = true.
3181 Node* LibraryCallKit::load_klass_from_mirror_common(Node* mirror,
|
246 // This returns Type::AnyPtr, RawPtr, or OopPtr.
247 int classify_unsafe_addr(Node* &base, Node* &offset, BasicType type);
248 Node* make_unsafe_address(Node*& base, Node* offset, DecoratorSet decorators, BasicType type = T_ILLEGAL, bool can_cast = false);
249
250 typedef enum { Relaxed, Opaque, Volatile, Acquire, Release } AccessKind;
251 DecoratorSet mo_decorator_for_access_kind(AccessKind kind);
252 bool inline_unsafe_access(bool is_store, BasicType type, AccessKind kind, bool is_unaligned);
253 static bool klass_needs_init_guard(Node* kls);
254 bool inline_unsafe_allocate();
255 bool inline_unsafe_newArray(bool uninitialized);
256 bool inline_unsafe_writeback0();
257 bool inline_unsafe_writebackSync0(bool is_pre);
258 bool inline_unsafe_copyMemory();
259 bool inline_native_currentThread();
260
261 bool inline_native_time_funcs(address method, const char* funcName);
262 #ifdef JFR_HAVE_INTRINSICS
263 bool inline_native_classID();
264 bool inline_native_getEventWriter();
265 #endif
266 bool inline_native_Class_query(vmIntrinsics::ID id);
267 bool inline_native_subtype_check();
268 bool inline_native_getLength();
269 bool inline_array_copyOf(bool is_copyOfRange);
270 bool inline_array_equals(StrIntrinsicNode::ArgEnc ae);
271 bool inline_preconditions_checkIndex();
272 void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array);
273 bool inline_native_clone(bool is_virtual);
274 bool inline_native_Reflection_getCallerClass();
275 // Helper function for inlining native object hash method
276 bool inline_native_hashcode(bool is_virtual, bool is_static);
277 bool inline_native_getClass();
278
279 // Helper functions for inlining arraycopy
280 bool inline_arraycopy();
281 AllocateArrayNode* tightly_coupled_allocation(Node* ptr,
282 RegionNode* slow_region);
283 JVMState* arraycopy_restore_alloc_state(AllocateArrayNode* alloc, int& saved_reexecute_sp);
284 void arraycopy_move_allocation_here(AllocateArrayNode* alloc, Node* dest, JVMState* saved_jvms, int saved_reexecute_sp,
285 uint new_idx);
733 case vmIntrinsics::_compareAndExchangeLongRelease: return inline_unsafe_load_store(T_LONG, LS_cmp_exchange, Release);
734
735 case vmIntrinsics::_getAndAddByte: return inline_unsafe_load_store(T_BYTE, LS_get_add, Volatile);
736 case vmIntrinsics::_getAndAddShort: return inline_unsafe_load_store(T_SHORT, LS_get_add, Volatile);
737 case vmIntrinsics::_getAndAddInt: return inline_unsafe_load_store(T_INT, LS_get_add, Volatile);
738 case vmIntrinsics::_getAndAddLong: return inline_unsafe_load_store(T_LONG, LS_get_add, Volatile);
739
740 case vmIntrinsics::_getAndSetByte: return inline_unsafe_load_store(T_BYTE, LS_get_set, Volatile);
741 case vmIntrinsics::_getAndSetShort: return inline_unsafe_load_store(T_SHORT, LS_get_set, Volatile);
742 case vmIntrinsics::_getAndSetInt: return inline_unsafe_load_store(T_INT, LS_get_set, Volatile);
743 case vmIntrinsics::_getAndSetLong: return inline_unsafe_load_store(T_LONG, LS_get_set, Volatile);
744 case vmIntrinsics::_getAndSetReference: return inline_unsafe_load_store(T_OBJECT, LS_get_set, Volatile);
745
746 case vmIntrinsics::_loadFence:
747 case vmIntrinsics::_storeFence:
748 case vmIntrinsics::_fullFence: return inline_unsafe_fence(intrinsic_id());
749
750 case vmIntrinsics::_onSpinWait: return inline_onspinwait();
751
752 case vmIntrinsics::_currentThread: return inline_native_currentThread();
753
754 #ifdef JFR_HAVE_INTRINSICS
755 case vmIntrinsics::_counterTime: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, JFR_TIME_FUNCTION), "counterTime");
756 case vmIntrinsics::_getClassId: return inline_native_classID();
757 case vmIntrinsics::_getEventWriter: return inline_native_getEventWriter();
758 #endif
759 case vmIntrinsics::_currentTimeMillis: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeMillis), "currentTimeMillis");
760 case vmIntrinsics::_nanoTime: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeNanos), "nanoTime");
761 case vmIntrinsics::_writeback0: return inline_unsafe_writeback0();
762 case vmIntrinsics::_writebackPreSync0: return inline_unsafe_writebackSync0(true);
763 case vmIntrinsics::_writebackPostSync0: return inline_unsafe_writebackSync0(false);
764 case vmIntrinsics::_allocateInstance: return inline_unsafe_allocate();
765 case vmIntrinsics::_copyMemory: return inline_unsafe_copyMemory();
766 case vmIntrinsics::_getLength: return inline_native_getLength();
767 case vmIntrinsics::_copyOf: return inline_array_copyOf(false);
768 case vmIntrinsics::_copyOfRange: return inline_array_copyOf(true);
769 case vmIntrinsics::_equalsB: return inline_array_equals(StrIntrinsicNode::LL);
770 case vmIntrinsics::_equalsC: return inline_array_equals(StrIntrinsicNode::UU);
771 case vmIntrinsics::_Preconditions_checkIndex: return inline_preconditions_checkIndex();
772 case vmIntrinsics::_clone: return inline_native_clone(intrinsic()->is_virtual());
3018 result_val->init_req(_null_path, null());
3019
3020 Node* jobj_is_not_null = _gvn.transform(new IfFalseNode(iff_jobj_null));
3021 set_control(jobj_is_not_null);
3022 Node* res = access_load(jobj, TypeInstPtr::NOTNULL, T_OBJECT,
3023 IN_NATIVE | C2_CONTROL_DEPENDENT_LOAD);
3024 result_rgn->init_req(_normal_path, control());
3025 result_val->init_req(_normal_path, res);
3026
3027 set_result(result_rgn, result_val);
3028
3029 return true;
3030 }
3031
3032 #endif // JFR_HAVE_INTRINSICS
3033
3034 //------------------------inline_native_currentThread------------------
3035 bool LibraryCallKit::inline_native_currentThread() {
3036 Node* junk = NULL;
3037 set_result(generate_current_thread(junk));
3038 return true;
3039 }
3040
3041 //---------------------------load_mirror_from_klass----------------------------
3042 // Given a klass oop, load its java mirror (a java.lang.Class oop).
3043 Node* LibraryCallKit::load_mirror_from_klass(Node* klass) {
3044 Node* p = basic_plus_adr(klass, in_bytes(Klass::java_mirror_offset()));
3045 Node* load = make_load(NULL, p, TypeRawPtr::NOTNULL, T_ADDRESS, MemNode::unordered);
3046 // mirror = ((OopHandle)mirror)->resolve();
3047 return access_load(load, TypeInstPtr::MIRROR, T_OBJECT, IN_NATIVE);
3048 }
3049
3050 //-----------------------load_klass_from_mirror_common-------------------------
3051 // Given a java mirror (a java.lang.Class oop), load its corresponding klass oop.
3052 // Test the klass oop for null (signifying a primitive Class like Integer.TYPE),
3053 // and branch to the given path on the region.
3054 // If never_see_null, take an uncommon trap on null, so we can optimistically
3055 // compile for the non-null case.
3056 // If the region is NULL, force never_see_null = true.
3057 Node* LibraryCallKit::load_klass_from_mirror_common(Node* mirror,
|