242 Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
243 // This returns Type::AnyPtr, RawPtr, or OopPtr.
244 int classify_unsafe_addr(Node* &base, Node* &offset, BasicType type);
245 Node* make_unsafe_address(Node*& base, Node* offset, BasicType type = T_ILLEGAL, bool can_cast = false);
246 // Helper for inline_unsafe_access.
247 // Generates the guards that check whether the result of
248 // Unsafe.getObject should be recorded in an SATB log buffer.
249 void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar);
250
251 typedef enum { Relaxed, Opaque, Volatile, Acquire, Release } AccessKind;
252 bool inline_unsafe_access(bool is_store, BasicType type, AccessKind kind, bool is_unaligned);
253 static bool klass_needs_init_guard(Node* kls);
254 bool inline_unsafe_allocate();
255 bool inline_unsafe_newArray(bool uninitialized);
256 bool inline_unsafe_copyMemory();
257 bool inline_native_currentThread();
258
259 bool inline_native_time_funcs(address method, const char* funcName);
260 #ifdef TRACE_HAVE_INTRINSICS
261 bool inline_native_classID();
262 bool inline_native_getBufferWriter();
263 #endif
264 bool inline_native_isInterrupted();
265 bool inline_native_Class_query(vmIntrinsics::ID id);
266 bool inline_native_subtype_check();
267 bool inline_native_getLength();
268 bool inline_array_copyOf(bool is_copyOfRange);
269 bool inline_array_equals(StrIntrinsicNode::ArgEnc ae);
270 bool inline_preconditions_checkIndex();
271 void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark);
272 bool inline_native_clone(bool is_virtual);
273 bool inline_native_Reflection_getCallerClass();
274 // Helper function for inlining native object hash method
275 bool inline_native_hashcode(bool is_virtual, bool is_static);
276 bool inline_native_getClass();
277
278 // Helper functions for inlining arraycopy
279 bool inline_arraycopy();
280 AllocateArrayNode* tightly_coupled_allocation(Node* ptr,
281 RegionNode* slow_region);
282 JVMState* arraycopy_restore_alloc_state(AllocateArrayNode* alloc, int& saved_reexecute_sp);
731 case vmIntrinsics::_getAndAddLong: return inline_unsafe_load_store(T_LONG, LS_get_add, Volatile);
732
733 case vmIntrinsics::_getAndSetByte: return inline_unsafe_load_store(T_BYTE, LS_get_set, Volatile);
734 case vmIntrinsics::_getAndSetShort: return inline_unsafe_load_store(T_SHORT, LS_get_set, Volatile);
735 case vmIntrinsics::_getAndSetInt: return inline_unsafe_load_store(T_INT, LS_get_set, Volatile);
736 case vmIntrinsics::_getAndSetLong: return inline_unsafe_load_store(T_LONG, LS_get_set, Volatile);
737 case vmIntrinsics::_getAndSetObject: return inline_unsafe_load_store(T_OBJECT, LS_get_set, Volatile);
738
739 case vmIntrinsics::_loadFence:
740 case vmIntrinsics::_storeFence:
741 case vmIntrinsics::_fullFence: return inline_unsafe_fence(intrinsic_id());
742
743 case vmIntrinsics::_onSpinWait: return inline_onspinwait();
744
745 case vmIntrinsics::_currentThread: return inline_native_currentThread();
746 case vmIntrinsics::_isInterrupted: return inline_native_isInterrupted();
747
748 #ifdef TRACE_HAVE_INTRINSICS
749 case vmIntrinsics::_counterTime: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, TRACE_TIME_METHOD), "counterTime");
750 case vmIntrinsics::_getClassId: return inline_native_classID();
751 case vmIntrinsics::_getBufferWriter: return inline_native_getBufferWriter();
752 #endif
753 case vmIntrinsics::_currentTimeMillis: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeMillis), "currentTimeMillis");
754 case vmIntrinsics::_nanoTime: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeNanos), "nanoTime");
755 case vmIntrinsics::_allocateInstance: return inline_unsafe_allocate();
756 case vmIntrinsics::_copyMemory: return inline_unsafe_copyMemory();
757 case vmIntrinsics::_getLength: return inline_native_getLength();
758 case vmIntrinsics::_copyOf: return inline_array_copyOf(false);
759 case vmIntrinsics::_copyOfRange: return inline_array_copyOf(true);
760 case vmIntrinsics::_equalsB: return inline_array_equals(StrIntrinsicNode::LL);
761 case vmIntrinsics::_equalsC: return inline_array_equals(StrIntrinsicNode::UU);
762 case vmIntrinsics::_Preconditions_checkIndex: return inline_preconditions_checkIndex();
763 case vmIntrinsics::_clone: return inline_native_clone(intrinsic()->is_virtual());
764
765 case vmIntrinsics::_allocateUninitializedArray: return inline_unsafe_newArray(true);
766 case vmIntrinsics::_newArray: return inline_unsafe_newArray(false);
767
768 case vmIntrinsics::_isAssignableFrom: return inline_native_subtype_check();
769
770 case vmIntrinsics::_isInstance:
771 case vmIntrinsics::_getModifiers:
3303
3304 Node* clsused = longcon(0x01l); // set the class bit
3305 Node* orl = _gvn.transform(new OrLNode(tvalue, clsused));
3306 const TypePtr *adr_type = _gvn.type(insp)->isa_ptr();
3307 store_to_memory(control(), insp, orl, T_LONG, adr_type, MemNode::unordered);
3308
3309 #ifdef TRACE_ID_META_BITS
3310 Node* mbits = longcon(~TRACE_ID_META_BITS);
3311 tvalue = _gvn.transform(new AndLNode(tvalue, mbits));
3312 #endif
3313 #ifdef TRACE_ID_CLASS_SHIFT
3314 Node* cbits = intcon(TRACE_ID_CLASS_SHIFT);
3315 tvalue = _gvn.transform(new URShiftLNode(tvalue, cbits));
3316 #endif
3317
3318 set_result(tvalue);
3319 return true;
3320
3321 }
3322
3323 bool LibraryCallKit::inline_native_getBufferWriter() {
3324 Node* tls_ptr = _gvn.transform(new ThreadLocalNode());
3325
3326 Node* jobj_ptr = basic_plus_adr(top(), tls_ptr,
3327 in_bytes(TRACE_THREAD_DATA_WRITER_OFFSET)
3328 );
3329
3330 Node* jobj = make_load(control(), jobj_ptr, TypeRawPtr::BOTTOM, T_ADDRESS, MemNode::unordered);
3331
3332 Node* jobj_cmp_null = _gvn.transform( new CmpPNode(jobj, null()) );
3333 Node* test_jobj_eq_null = _gvn.transform( new BoolNode(jobj_cmp_null, BoolTest::eq) );
3334
3335 IfNode* iff_jobj_null =
3336 create_and_map_if(control(), test_jobj_eq_null, PROB_MIN, COUNT_UNKNOWN);
3337
3338 enum { _normal_path = 1,
3339 _null_path = 2,
3340 PATH_LIMIT };
3341
3342 RegionNode* result_rgn = new RegionNode(PATH_LIMIT);
3343 PhiNode* result_val = new PhiNode(result_rgn, TypePtr::BOTTOM);
|
242 Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
243 // This returns Type::AnyPtr, RawPtr, or OopPtr.
244 int classify_unsafe_addr(Node* &base, Node* &offset, BasicType type);
245 Node* make_unsafe_address(Node*& base, Node* offset, BasicType type = T_ILLEGAL, bool can_cast = false);
246 // Helper for inline_unsafe_access.
247 // Generates the guards that check whether the result of
248 // Unsafe.getObject should be recorded in an SATB log buffer.
249 void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar);
250
251 typedef enum { Relaxed, Opaque, Volatile, Acquire, Release } AccessKind;
252 bool inline_unsafe_access(bool is_store, BasicType type, AccessKind kind, bool is_unaligned);
253 static bool klass_needs_init_guard(Node* kls);
254 bool inline_unsafe_allocate();
255 bool inline_unsafe_newArray(bool uninitialized);
256 bool inline_unsafe_copyMemory();
257 bool inline_native_currentThread();
258
259 bool inline_native_time_funcs(address method, const char* funcName);
260 #ifdef TRACE_HAVE_INTRINSICS
261 bool inline_native_classID();
262 bool inline_native_getEventWriter();
263 #endif
264 bool inline_native_isInterrupted();
265 bool inline_native_Class_query(vmIntrinsics::ID id);
266 bool inline_native_subtype_check();
267 bool inline_native_getLength();
268 bool inline_array_copyOf(bool is_copyOfRange);
269 bool inline_array_equals(StrIntrinsicNode::ArgEnc ae);
270 bool inline_preconditions_checkIndex();
271 void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark);
272 bool inline_native_clone(bool is_virtual);
273 bool inline_native_Reflection_getCallerClass();
274 // Helper function for inlining native object hash method
275 bool inline_native_hashcode(bool is_virtual, bool is_static);
276 bool inline_native_getClass();
277
278 // Helper functions for inlining arraycopy
279 bool inline_arraycopy();
280 AllocateArrayNode* tightly_coupled_allocation(Node* ptr,
281 RegionNode* slow_region);
282 JVMState* arraycopy_restore_alloc_state(AllocateArrayNode* alloc, int& saved_reexecute_sp);
731 case vmIntrinsics::_getAndAddLong: return inline_unsafe_load_store(T_LONG, LS_get_add, Volatile);
732
733 case vmIntrinsics::_getAndSetByte: return inline_unsafe_load_store(T_BYTE, LS_get_set, Volatile);
734 case vmIntrinsics::_getAndSetShort: return inline_unsafe_load_store(T_SHORT, LS_get_set, Volatile);
735 case vmIntrinsics::_getAndSetInt: return inline_unsafe_load_store(T_INT, LS_get_set, Volatile);
736 case vmIntrinsics::_getAndSetLong: return inline_unsafe_load_store(T_LONG, LS_get_set, Volatile);
737 case vmIntrinsics::_getAndSetObject: return inline_unsafe_load_store(T_OBJECT, LS_get_set, Volatile);
738
739 case vmIntrinsics::_loadFence:
740 case vmIntrinsics::_storeFence:
741 case vmIntrinsics::_fullFence: return inline_unsafe_fence(intrinsic_id());
742
743 case vmIntrinsics::_onSpinWait: return inline_onspinwait();
744
745 case vmIntrinsics::_currentThread: return inline_native_currentThread();
746 case vmIntrinsics::_isInterrupted: return inline_native_isInterrupted();
747
748 #ifdef TRACE_HAVE_INTRINSICS
749 case vmIntrinsics::_counterTime: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, TRACE_TIME_METHOD), "counterTime");
750 case vmIntrinsics::_getClassId: return inline_native_classID();
751 case vmIntrinsics::_getEventWriter: return inline_native_getEventWriter();
752 #endif
753 case vmIntrinsics::_currentTimeMillis: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeMillis), "currentTimeMillis");
754 case vmIntrinsics::_nanoTime: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeNanos), "nanoTime");
755 case vmIntrinsics::_allocateInstance: return inline_unsafe_allocate();
756 case vmIntrinsics::_copyMemory: return inline_unsafe_copyMemory();
757 case vmIntrinsics::_getLength: return inline_native_getLength();
758 case vmIntrinsics::_copyOf: return inline_array_copyOf(false);
759 case vmIntrinsics::_copyOfRange: return inline_array_copyOf(true);
760 case vmIntrinsics::_equalsB: return inline_array_equals(StrIntrinsicNode::LL);
761 case vmIntrinsics::_equalsC: return inline_array_equals(StrIntrinsicNode::UU);
762 case vmIntrinsics::_Preconditions_checkIndex: return inline_preconditions_checkIndex();
763 case vmIntrinsics::_clone: return inline_native_clone(intrinsic()->is_virtual());
764
765 case vmIntrinsics::_allocateUninitializedArray: return inline_unsafe_newArray(true);
766 case vmIntrinsics::_newArray: return inline_unsafe_newArray(false);
767
768 case vmIntrinsics::_isAssignableFrom: return inline_native_subtype_check();
769
770 case vmIntrinsics::_isInstance:
771 case vmIntrinsics::_getModifiers:
3303
3304 Node* clsused = longcon(0x01l); // set the class bit
3305 Node* orl = _gvn.transform(new OrLNode(tvalue, clsused));
3306 const TypePtr *adr_type = _gvn.type(insp)->isa_ptr();
3307 store_to_memory(control(), insp, orl, T_LONG, adr_type, MemNode::unordered);
3308
3309 #ifdef TRACE_ID_META_BITS
3310 Node* mbits = longcon(~TRACE_ID_META_BITS);
3311 tvalue = _gvn.transform(new AndLNode(tvalue, mbits));
3312 #endif
3313 #ifdef TRACE_ID_CLASS_SHIFT
3314 Node* cbits = intcon(TRACE_ID_CLASS_SHIFT);
3315 tvalue = _gvn.transform(new URShiftLNode(tvalue, cbits));
3316 #endif
3317
3318 set_result(tvalue);
3319 return true;
3320
3321 }
3322
3323 bool LibraryCallKit::inline_native_getEventWriter() {
3324 Node* tls_ptr = _gvn.transform(new ThreadLocalNode());
3325
3326 Node* jobj_ptr = basic_plus_adr(top(), tls_ptr,
3327 in_bytes(TRACE_THREAD_DATA_WRITER_OFFSET)
3328 );
3329
3330 Node* jobj = make_load(control(), jobj_ptr, TypeRawPtr::BOTTOM, T_ADDRESS, MemNode::unordered);
3331
3332 Node* jobj_cmp_null = _gvn.transform( new CmpPNode(jobj, null()) );
3333 Node* test_jobj_eq_null = _gvn.transform( new BoolNode(jobj_cmp_null, BoolTest::eq) );
3334
3335 IfNode* iff_jobj_null =
3336 create_and_map_if(control(), test_jobj_eq_null, PROB_MIN, COUNT_UNKNOWN);
3337
3338 enum { _normal_path = 1,
3339 _null_path = 2,
3340 PATH_LIMIT };
3341
3342 RegionNode* result_rgn = new RegionNode(PATH_LIMIT);
3343 PhiNode* result_val = new PhiNode(result_rgn, TypePtr::BOTTOM);
|