src/share/vm/opto/library_call.cpp

Print this page
rev 10065 : 8147844: new method j.l.Runtime.onSpinWait() and the corresponding x86 hotspot instrinsic
Summary: adds c1 & c2 x86 intrinsics for j.l.Runtime.onSpinWait() that utilize the 'pause' instruction
Contributed-by: ikrylov, ygaevsky
Reviewed-by: iveresov, vlivanov, kvn


 261   bool inline_array_equals(StrIntrinsicNode::ArgEnc ae);
 262   bool inline_objects_checkIndex();
 263   void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark);
 264   bool inline_native_clone(bool is_virtual);
 265   bool inline_native_Reflection_getCallerClass();
 266   // Helper function for inlining native object hash method
 267   bool inline_native_hashcode(bool is_virtual, bool is_static);
 268   bool inline_native_getClass();
 269 
 270   // Helper functions for inlining arraycopy
 271   bool inline_arraycopy();
 272   AllocateArrayNode* tightly_coupled_allocation(Node* ptr,
 273                                                 RegionNode* slow_region);
 274   JVMState* arraycopy_restore_alloc_state(AllocateArrayNode* alloc, int& saved_reexecute_sp);
 275   void arraycopy_move_allocation_here(AllocateArrayNode* alloc, Node* dest, JVMState* saved_jvms, int saved_reexecute_sp);
 276 
 277   typedef enum { LS_xadd, LS_xchg, LS_cmpxchg } LoadStoreKind;
 278   bool inline_unsafe_load_store(BasicType type,  LoadStoreKind kind);
 279   bool inline_unsafe_ordered_store(BasicType type);
 280   bool inline_unsafe_fence(vmIntrinsics::ID id);

 281   bool inline_fp_conversions(vmIntrinsics::ID id);
 282   bool inline_number_methods(vmIntrinsics::ID id);
 283   bool inline_reference_get();
 284   bool inline_Class_cast();
 285   bool inline_aescrypt_Block(vmIntrinsics::ID id);
 286   bool inline_cipherBlockChaining_AESCrypt(vmIntrinsics::ID id);
 287   bool inline_counterMode_AESCrypt(vmIntrinsics::ID id);
 288   Node* inline_cipherBlockChaining_AESCrypt_predicate(bool decrypting);
 289   Node* inline_counterMode_AESCrypt_predicate();
 290   Node* get_key_start_from_aescrypt_object(Node* aescrypt_object);
 291   Node* get_original_key_start_from_aescrypt_object(Node* aescrypt_object);
 292   bool inline_ghash_processBlocks();
 293   bool inline_sha_implCompress(vmIntrinsics::ID id);
 294   bool inline_digestBase_implCompressMB(int predicate);
 295   bool inline_sha_implCompressMB(Node* digestBaseObj, ciInstanceKlass* instklass_SHA,
 296                                  bool long_state, address stubAddr, const char *stubName,
 297                                  Node* src_start, Node* ofs, Node* limit);
 298   Node* get_state_from_sha_object(Node *sha_object);
 299   Node* get_state_from_sha5_object(Node *sha_object);
 300   Node* inline_digestBase_implCompressMB_predicate(int predicate);


 621   case vmIntrinsics::_putLongUnaligned:         return inline_unsafe_access(!is_native_ptr,  is_store, T_LONG,    !is_volatile, true);
 622 
 623   case vmIntrinsics::_compareAndSwapObject:     return inline_unsafe_load_store(T_OBJECT, LS_cmpxchg);
 624   case vmIntrinsics::_compareAndSwapInt:        return inline_unsafe_load_store(T_INT,    LS_cmpxchg);
 625   case vmIntrinsics::_compareAndSwapLong:       return inline_unsafe_load_store(T_LONG,   LS_cmpxchg);
 626 
 627   case vmIntrinsics::_putOrderedObject:         return inline_unsafe_ordered_store(T_OBJECT);
 628   case vmIntrinsics::_putOrderedInt:            return inline_unsafe_ordered_store(T_INT);
 629   case vmIntrinsics::_putOrderedLong:           return inline_unsafe_ordered_store(T_LONG);
 630 
 631   case vmIntrinsics::_getAndAddInt:             return inline_unsafe_load_store(T_INT,    LS_xadd);
 632   case vmIntrinsics::_getAndAddLong:            return inline_unsafe_load_store(T_LONG,   LS_xadd);
 633   case vmIntrinsics::_getAndSetInt:             return inline_unsafe_load_store(T_INT,    LS_xchg);
 634   case vmIntrinsics::_getAndSetLong:            return inline_unsafe_load_store(T_LONG,   LS_xchg);
 635   case vmIntrinsics::_getAndSetObject:          return inline_unsafe_load_store(T_OBJECT, LS_xchg);
 636 
 637   case vmIntrinsics::_loadFence:
 638   case vmIntrinsics::_storeFence:
 639   case vmIntrinsics::_fullFence:                return inline_unsafe_fence(intrinsic_id());
 640 


 641   case vmIntrinsics::_currentThread:            return inline_native_currentThread();
 642   case vmIntrinsics::_isInterrupted:            return inline_native_isInterrupted();
 643 
 644 #ifdef TRACE_HAVE_INTRINSICS
 645   case vmIntrinsics::_classID:                  return inline_native_classID();
 646   case vmIntrinsics::_threadID:                 return inline_native_threadID();
 647   case vmIntrinsics::_counterTime:              return inline_native_time_funcs(CAST_FROM_FN_PTR(address, TRACE_TIME_METHOD), "counterTime");
 648 #endif
 649   case vmIntrinsics::_currentTimeMillis:        return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeMillis), "currentTimeMillis");
 650   case vmIntrinsics::_nanoTime:                 return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeNanos), "nanoTime");
 651   case vmIntrinsics::_allocateInstance:         return inline_unsafe_allocate();
 652   case vmIntrinsics::_copyMemory:               return inline_unsafe_copyMemory();
 653   case vmIntrinsics::_newArray:                 return inline_native_newArray();
 654   case vmIntrinsics::_getLength:                return inline_native_getLength();
 655   case vmIntrinsics::_copyOf:                   return inline_array_copyOf(false);
 656   case vmIntrinsics::_copyOfRange:              return inline_array_copyOf(true);
 657   case vmIntrinsics::_equalsB:                  return inline_array_equals(StrIntrinsicNode::LL);
 658   case vmIntrinsics::_equalsC:                  return inline_array_equals(StrIntrinsicNode::UU);
 659   case vmIntrinsics::_Objects_checkIndex:       return inline_objects_checkIndex();
 660   case vmIntrinsics::_clone:                    return inline_native_clone(intrinsic()->is_virtual());


2870 bool LibraryCallKit::inline_unsafe_fence(vmIntrinsics::ID id) {
2871   // Regardless of form, don't allow previous ld/st to move down,
2872   // then issue acquire, release, or volatile mem_bar.
2873   insert_mem_bar(Op_MemBarCPUOrder);
2874   switch(id) {
2875     case vmIntrinsics::_loadFence:
2876       insert_mem_bar(Op_LoadFence);
2877       return true;
2878     case vmIntrinsics::_storeFence:
2879       insert_mem_bar(Op_StoreFence);
2880       return true;
2881     case vmIntrinsics::_fullFence:
2882       insert_mem_bar(Op_MemBarVolatile);
2883       return true;
2884     default:
2885       fatal_unexpected_iid(id);
2886       return false;
2887   }
2888 }
2889 





2890 bool LibraryCallKit::klass_needs_init_guard(Node* kls) {
2891   if (!kls->is_Con()) {
2892     return true;
2893   }
2894   const TypeKlassPtr* klsptr = kls->bottom_type()->isa_klassptr();
2895   if (klsptr == NULL) {
2896     return true;
2897   }
2898   ciInstanceKlass* ik = klsptr->klass()->as_instance_klass();
2899   // don't need a guard for a klass that is already initialized
2900   return !ik->is_initialized();
2901 }
2902 
2903 //----------------------------inline_unsafe_allocate---------------------------
2904 // public native Object Unsafe.allocateInstance(Class<?> cls);
2905 bool LibraryCallKit::inline_unsafe_allocate() {
2906   if (callee()->is_static())  return false;  // caller must have the capability!
2907 
2908   null_check_receiver();  // null-check, then ignore
2909   Node* cls = null_check(argument(1));




 261   bool inline_array_equals(StrIntrinsicNode::ArgEnc ae);
 262   bool inline_objects_checkIndex();
 263   void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark);
 264   bool inline_native_clone(bool is_virtual);
 265   bool inline_native_Reflection_getCallerClass();
 266   // Helper function for inlining native object hash method
 267   bool inline_native_hashcode(bool is_virtual, bool is_static);
 268   bool inline_native_getClass();
 269 
 270   // Helper functions for inlining arraycopy
 271   bool inline_arraycopy();
 272   AllocateArrayNode* tightly_coupled_allocation(Node* ptr,
 273                                                 RegionNode* slow_region);
 274   JVMState* arraycopy_restore_alloc_state(AllocateArrayNode* alloc, int& saved_reexecute_sp);
 275   void arraycopy_move_allocation_here(AllocateArrayNode* alloc, Node* dest, JVMState* saved_jvms, int saved_reexecute_sp);
 276 
 277   typedef enum { LS_xadd, LS_xchg, LS_cmpxchg } LoadStoreKind;
 278   bool inline_unsafe_load_store(BasicType type,  LoadStoreKind kind);
 279   bool inline_unsafe_ordered_store(BasicType type);
 280   bool inline_unsafe_fence(vmIntrinsics::ID id);
 281   bool inline_onspinwait();
 282   bool inline_fp_conversions(vmIntrinsics::ID id);
 283   bool inline_number_methods(vmIntrinsics::ID id);
 284   bool inline_reference_get();
 285   bool inline_Class_cast();
 286   bool inline_aescrypt_Block(vmIntrinsics::ID id);
 287   bool inline_cipherBlockChaining_AESCrypt(vmIntrinsics::ID id);
 288   bool inline_counterMode_AESCrypt(vmIntrinsics::ID id);
 289   Node* inline_cipherBlockChaining_AESCrypt_predicate(bool decrypting);
 290   Node* inline_counterMode_AESCrypt_predicate();
 291   Node* get_key_start_from_aescrypt_object(Node* aescrypt_object);
 292   Node* get_original_key_start_from_aescrypt_object(Node* aescrypt_object);
 293   bool inline_ghash_processBlocks();
 294   bool inline_sha_implCompress(vmIntrinsics::ID id);
 295   bool inline_digestBase_implCompressMB(int predicate);
 296   bool inline_sha_implCompressMB(Node* digestBaseObj, ciInstanceKlass* instklass_SHA,
 297                                  bool long_state, address stubAddr, const char *stubName,
 298                                  Node* src_start, Node* ofs, Node* limit);
 299   Node* get_state_from_sha_object(Node *sha_object);
 300   Node* get_state_from_sha5_object(Node *sha_object);
 301   Node* inline_digestBase_implCompressMB_predicate(int predicate);


 622   case vmIntrinsics::_putLongUnaligned:         return inline_unsafe_access(!is_native_ptr,  is_store, T_LONG,    !is_volatile, true);
 623 
 624   case vmIntrinsics::_compareAndSwapObject:     return inline_unsafe_load_store(T_OBJECT, LS_cmpxchg);
 625   case vmIntrinsics::_compareAndSwapInt:        return inline_unsafe_load_store(T_INT,    LS_cmpxchg);
 626   case vmIntrinsics::_compareAndSwapLong:       return inline_unsafe_load_store(T_LONG,   LS_cmpxchg);
 627 
 628   case vmIntrinsics::_putOrderedObject:         return inline_unsafe_ordered_store(T_OBJECT);
 629   case vmIntrinsics::_putOrderedInt:            return inline_unsafe_ordered_store(T_INT);
 630   case vmIntrinsics::_putOrderedLong:           return inline_unsafe_ordered_store(T_LONG);
 631 
 632   case vmIntrinsics::_getAndAddInt:             return inline_unsafe_load_store(T_INT,    LS_xadd);
 633   case vmIntrinsics::_getAndAddLong:            return inline_unsafe_load_store(T_LONG,   LS_xadd);
 634   case vmIntrinsics::_getAndSetInt:             return inline_unsafe_load_store(T_INT,    LS_xchg);
 635   case vmIntrinsics::_getAndSetLong:            return inline_unsafe_load_store(T_LONG,   LS_xchg);
 636   case vmIntrinsics::_getAndSetObject:          return inline_unsafe_load_store(T_OBJECT, LS_xchg);
 637 
 638   case vmIntrinsics::_loadFence:
 639   case vmIntrinsics::_storeFence:
 640   case vmIntrinsics::_fullFence:                return inline_unsafe_fence(intrinsic_id());
 641 
 642   case vmIntrinsics::_onSpinWait:               return inline_onspinwait();
 643 
 644   case vmIntrinsics::_currentThread:            return inline_native_currentThread();
 645   case vmIntrinsics::_isInterrupted:            return inline_native_isInterrupted();
 646 
 647 #ifdef TRACE_HAVE_INTRINSICS
 648   case vmIntrinsics::_classID:                  return inline_native_classID();
 649   case vmIntrinsics::_threadID:                 return inline_native_threadID();
 650   case vmIntrinsics::_counterTime:              return inline_native_time_funcs(CAST_FROM_FN_PTR(address, TRACE_TIME_METHOD), "counterTime");
 651 #endif
 652   case vmIntrinsics::_currentTimeMillis:        return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeMillis), "currentTimeMillis");
 653   case vmIntrinsics::_nanoTime:                 return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeNanos), "nanoTime");
 654   case vmIntrinsics::_allocateInstance:         return inline_unsafe_allocate();
 655   case vmIntrinsics::_copyMemory:               return inline_unsafe_copyMemory();
 656   case vmIntrinsics::_newArray:                 return inline_native_newArray();
 657   case vmIntrinsics::_getLength:                return inline_native_getLength();
 658   case vmIntrinsics::_copyOf:                   return inline_array_copyOf(false);
 659   case vmIntrinsics::_copyOfRange:              return inline_array_copyOf(true);
 660   case vmIntrinsics::_equalsB:                  return inline_array_equals(StrIntrinsicNode::LL);
 661   case vmIntrinsics::_equalsC:                  return inline_array_equals(StrIntrinsicNode::UU);
 662   case vmIntrinsics::_Objects_checkIndex:       return inline_objects_checkIndex();
 663   case vmIntrinsics::_clone:                    return inline_native_clone(intrinsic()->is_virtual());


2873 bool LibraryCallKit::inline_unsafe_fence(vmIntrinsics::ID id) {
2874   // Regardless of form, don't allow previous ld/st to move down,
2875   // then issue acquire, release, or volatile mem_bar.
2876   insert_mem_bar(Op_MemBarCPUOrder);
2877   switch(id) {
2878     case vmIntrinsics::_loadFence:
2879       insert_mem_bar(Op_LoadFence);
2880       return true;
2881     case vmIntrinsics::_storeFence:
2882       insert_mem_bar(Op_StoreFence);
2883       return true;
2884     case vmIntrinsics::_fullFence:
2885       insert_mem_bar(Op_MemBarVolatile);
2886       return true;
2887     default:
2888       fatal_unexpected_iid(id);
2889       return false;
2890   }
2891 }
2892 
2893 bool LibraryCallKit::inline_onspinwait() {
2894   insert_mem_bar(Op_OnSpinWait);
2895   return true;
2896 }
2897 
2898 bool LibraryCallKit::klass_needs_init_guard(Node* kls) {
2899   if (!kls->is_Con()) {
2900     return true;
2901   }
2902   const TypeKlassPtr* klsptr = kls->bottom_type()->isa_klassptr();
2903   if (klsptr == NULL) {
2904     return true;
2905   }
2906   ciInstanceKlass* ik = klsptr->klass()->as_instance_klass();
2907   // don't need a guard for a klass that is already initialized
2908   return !ik->is_initialized();
2909 }
2910 
2911 //----------------------------inline_unsafe_allocate---------------------------
2912 // public native Object Unsafe.allocateInstance(Class<?> cls);
2913 bool LibraryCallKit::inline_unsafe_allocate() {
2914   if (callee()->is_static())  return false;  // caller must have the capability!
2915 
2916   null_check_receiver();  // null-check, then ignore
2917   Node* cls = null_check(argument(1));