< prev index next >

src/hotspot/share/opto/library_call.cpp

Print this page




 235   bool inline_math_multiplyExactI();
 236   bool inline_math_multiplyExactL();
 237   bool inline_math_multiplyHigh();
 238   bool inline_math_negateExactI();
 239   bool inline_math_negateExactL();
 240   bool inline_math_subtractExactI(bool is_decrement);
 241   bool inline_math_subtractExactL(bool is_decrement);
 242   bool inline_min_max(vmIntrinsics::ID id);
 243   bool inline_notify(vmIntrinsics::ID id);
 244   Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
 245   // This returns Type::AnyPtr, RawPtr, or OopPtr.
 246   int classify_unsafe_addr(Node* &base, Node* &offset, BasicType type);
 247   Node* make_unsafe_address(Node*& base, Node* offset, BasicType type = T_ILLEGAL, bool can_cast = false);
 248 
 249   typedef enum { Relaxed, Opaque, Volatile, Acquire, Release } AccessKind;
 250   DecoratorSet mo_decorator_for_access_kind(AccessKind kind);
 251   bool inline_unsafe_access(bool is_store, BasicType type, AccessKind kind, bool is_unaligned);
 252   static bool klass_needs_init_guard(Node* kls);
 253   bool inline_unsafe_allocate();
 254   bool inline_unsafe_newArray(bool uninitialized);


 255   bool inline_unsafe_copyMemory();
 256   bool inline_native_currentThread();
 257 
 258   bool inline_native_time_funcs(address method, const char* funcName);
 259 #ifdef JFR_HAVE_INTRINSICS
 260   bool inline_native_classID();
 261   bool inline_native_getEventWriter();
 262 #endif
 263   bool inline_native_isInterrupted();
 264   bool inline_native_Class_query(vmIntrinsics::ID id);
 265   bool inline_native_subtype_check();
 266   bool inline_native_getLength();
 267   bool inline_array_copyOf(bool is_copyOfRange);
 268   bool inline_array_equals(StrIntrinsicNode::ArgEnc ae);
 269   bool inline_preconditions_checkIndex();
 270   void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array);
 271   bool inline_native_clone(bool is_virtual);
 272   bool inline_native_Reflection_getCallerClass();
 273   // Helper function for inlining native object hash method
 274   bool inline_native_hashcode(bool is_virtual, bool is_static);


 733   case vmIntrinsics::_getAndSetInt:                     return inline_unsafe_load_store(T_INT,    LS_get_set,       Volatile);
 734   case vmIntrinsics::_getAndSetLong:                    return inline_unsafe_load_store(T_LONG,   LS_get_set,       Volatile);
 735   case vmIntrinsics::_getAndSetObject:                  return inline_unsafe_load_store(T_OBJECT, LS_get_set,       Volatile);
 736 
 737   case vmIntrinsics::_loadFence:
 738   case vmIntrinsics::_storeFence:
 739   case vmIntrinsics::_fullFence:                return inline_unsafe_fence(intrinsic_id());
 740 
 741   case vmIntrinsics::_onSpinWait:               return inline_onspinwait();
 742 
 743   case vmIntrinsics::_currentThread:            return inline_native_currentThread();
 744   case vmIntrinsics::_isInterrupted:            return inline_native_isInterrupted();
 745 
 746 #ifdef JFR_HAVE_INTRINSICS
 747   case vmIntrinsics::_counterTime:              return inline_native_time_funcs(CAST_FROM_FN_PTR(address, JFR_TIME_FUNCTION), "counterTime");
 748   case vmIntrinsics::_getClassId:               return inline_native_classID();
 749   case vmIntrinsics::_getEventWriter:           return inline_native_getEventWriter();
 750 #endif
 751   case vmIntrinsics::_currentTimeMillis:        return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeMillis), "currentTimeMillis");
 752   case vmIntrinsics::_nanoTime:                 return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeNanos), "nanoTime");



 753   case vmIntrinsics::_allocateInstance:         return inline_unsafe_allocate();
 754   case vmIntrinsics::_copyMemory:               return inline_unsafe_copyMemory();
 755   case vmIntrinsics::_getLength:                return inline_native_getLength();
 756   case vmIntrinsics::_copyOf:                   return inline_array_copyOf(false);
 757   case vmIntrinsics::_copyOfRange:              return inline_array_copyOf(true);
 758   case vmIntrinsics::_equalsB:                  return inline_array_equals(StrIntrinsicNode::LL);
 759   case vmIntrinsics::_equalsC:                  return inline_array_equals(StrIntrinsicNode::UU);
 760   case vmIntrinsics::_Preconditions_checkIndex: return inline_preconditions_checkIndex();
 761   case vmIntrinsics::_clone:                    return inline_native_clone(intrinsic()->is_virtual());
 762 
 763   case vmIntrinsics::_allocateUninitializedArray: return inline_unsafe_newArray(true);
 764   case vmIntrinsics::_newArray:                   return inline_unsafe_newArray(false);
 765 
 766   case vmIntrinsics::_isAssignableFrom:         return inline_native_subtype_check();
 767 
 768   case vmIntrinsics::_isInstance:
 769   case vmIntrinsics::_getModifiers:
 770   case vmIntrinsics::_isInterface:
 771   case vmIntrinsics::_isArray:
 772   case vmIntrinsics::_isPrimitive:


2735       return false;
2736   }
2737 }
2738 
2739 bool LibraryCallKit::inline_onspinwait() {
2740   insert_mem_bar(Op_OnSpinWait);
2741   return true;
2742 }
2743 
2744 bool LibraryCallKit::klass_needs_init_guard(Node* kls) {
2745   if (!kls->is_Con()) {
2746     return true;
2747   }
2748   const TypeKlassPtr* klsptr = kls->bottom_type()->isa_klassptr();
2749   if (klsptr == NULL) {
2750     return true;
2751   }
2752   ciInstanceKlass* ik = klsptr->klass()->as_instance_klass();
2753   // don't need a guard for a klass that is already initialized
2754   return !ik->is_initialized();

















































2755 }
2756 
2757 //----------------------------inline_unsafe_allocate---------------------------
2758 // public native Object Unsafe.allocateInstance(Class<?> cls);
2759 bool LibraryCallKit::inline_unsafe_allocate() {
2760   if (callee()->is_static())  return false;  // caller must have the capability!
2761 
2762   null_check_receiver();  // null-check, then ignore
2763   Node* cls = null_check(argument(1));
2764   if (stopped())  return true;
2765 
2766   Node* kls = load_klass_from_mirror(cls, false, NULL, 0);
2767   kls = null_check(kls);
2768   if (stopped())  return true;  // argument was like int.class
2769 
2770   Node* test = NULL;
2771   if (LibraryCallKit::klass_needs_init_guard(kls)) {
2772     // Note:  The argument might still be an illegal value like
2773     // Serializable.class or Object[].class.   The runtime will handle it.
2774     // But we must make an explicit check for initialization.




 235   bool inline_math_multiplyExactI();
 236   bool inline_math_multiplyExactL();
 237   bool inline_math_multiplyHigh();
 238   bool inline_math_negateExactI();
 239   bool inline_math_negateExactL();
 240   bool inline_math_subtractExactI(bool is_decrement);
 241   bool inline_math_subtractExactL(bool is_decrement);
 242   bool inline_min_max(vmIntrinsics::ID id);
 243   bool inline_notify(vmIntrinsics::ID id);
 244   Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
 245   // This returns Type::AnyPtr, RawPtr, or OopPtr.
 246   int classify_unsafe_addr(Node* &base, Node* &offset, BasicType type);
 247   Node* make_unsafe_address(Node*& base, Node* offset, BasicType type = T_ILLEGAL, bool can_cast = false);
 248 
 249   typedef enum { Relaxed, Opaque, Volatile, Acquire, Release } AccessKind;
 250   DecoratorSet mo_decorator_for_access_kind(AccessKind kind);
 251   bool inline_unsafe_access(bool is_store, BasicType type, AccessKind kind, bool is_unaligned);
 252   static bool klass_needs_init_guard(Node* kls);
 253   bool inline_unsafe_allocate();
 254   bool inline_unsafe_newArray(bool uninitialized);
 255   bool inline_unsafe_writeback0();
 256   bool inline_unsafe_writebackSync0(bool isPre);
 257   bool inline_unsafe_copyMemory();
 258   bool inline_native_currentThread();
 259 
 260   bool inline_native_time_funcs(address method, const char* funcName);
 261 #ifdef JFR_HAVE_INTRINSICS
 262   bool inline_native_classID();
 263   bool inline_native_getEventWriter();
 264 #endif
 265   bool inline_native_isInterrupted();
 266   bool inline_native_Class_query(vmIntrinsics::ID id);
 267   bool inline_native_subtype_check();
 268   bool inline_native_getLength();
 269   bool inline_array_copyOf(bool is_copyOfRange);
 270   bool inline_array_equals(StrIntrinsicNode::ArgEnc ae);
 271   bool inline_preconditions_checkIndex();
 272   void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array);
 273   bool inline_native_clone(bool is_virtual);
 274   bool inline_native_Reflection_getCallerClass();
 275   // Helper function for inlining native object hash method
 276   bool inline_native_hashcode(bool is_virtual, bool is_static);


 735   case vmIntrinsics::_getAndSetInt:                     return inline_unsafe_load_store(T_INT,    LS_get_set,       Volatile);
 736   case vmIntrinsics::_getAndSetLong:                    return inline_unsafe_load_store(T_LONG,   LS_get_set,       Volatile);
 737   case vmIntrinsics::_getAndSetObject:                  return inline_unsafe_load_store(T_OBJECT, LS_get_set,       Volatile);
 738 
 739   case vmIntrinsics::_loadFence:
 740   case vmIntrinsics::_storeFence:
 741   case vmIntrinsics::_fullFence:                return inline_unsafe_fence(intrinsic_id());
 742 
 743   case vmIntrinsics::_onSpinWait:               return inline_onspinwait();
 744 
 745   case vmIntrinsics::_currentThread:            return inline_native_currentThread();
 746   case vmIntrinsics::_isInterrupted:            return inline_native_isInterrupted();
 747 
 748 #ifdef JFR_HAVE_INTRINSICS
 749   case vmIntrinsics::_counterTime:              return inline_native_time_funcs(CAST_FROM_FN_PTR(address, JFR_TIME_FUNCTION), "counterTime");
 750   case vmIntrinsics::_getClassId:               return inline_native_classID();
 751   case vmIntrinsics::_getEventWriter:           return inline_native_getEventWriter();
 752 #endif
 753   case vmIntrinsics::_currentTimeMillis:        return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeMillis), "currentTimeMillis");
 754   case vmIntrinsics::_nanoTime:                 return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeNanos), "nanoTime");
 755   case vmIntrinsics::_writeback0:               return inline_unsafe_writeback0();
 756   case vmIntrinsics::_writebackPreSync0:        return inline_unsafe_writebackSync0(true);
 757   case vmIntrinsics::_writebackPostSync0:       return inline_unsafe_writebackSync0(false);
 758   case vmIntrinsics::_allocateInstance:         return inline_unsafe_allocate();
 759   case vmIntrinsics::_copyMemory:               return inline_unsafe_copyMemory();
 760   case vmIntrinsics::_getLength:                return inline_native_getLength();
 761   case vmIntrinsics::_copyOf:                   return inline_array_copyOf(false);
 762   case vmIntrinsics::_copyOfRange:              return inline_array_copyOf(true);
 763   case vmIntrinsics::_equalsB:                  return inline_array_equals(StrIntrinsicNode::LL);
 764   case vmIntrinsics::_equalsC:                  return inline_array_equals(StrIntrinsicNode::UU);
 765   case vmIntrinsics::_Preconditions_checkIndex: return inline_preconditions_checkIndex();
 766   case vmIntrinsics::_clone:                    return inline_native_clone(intrinsic()->is_virtual());
 767 
 768   case vmIntrinsics::_allocateUninitializedArray: return inline_unsafe_newArray(true);
 769   case vmIntrinsics::_newArray:                   return inline_unsafe_newArray(false);
 770 
 771   case vmIntrinsics::_isAssignableFrom:         return inline_native_subtype_check();
 772 
 773   case vmIntrinsics::_isInstance:
 774   case vmIntrinsics::_getModifiers:
 775   case vmIntrinsics::_isInterface:
 776   case vmIntrinsics::_isArray:
 777   case vmIntrinsics::_isPrimitive:


2740       return false;
2741   }
2742 }
2743 
2744 bool LibraryCallKit::inline_onspinwait() {
2745   insert_mem_bar(Op_OnSpinWait);
2746   return true;
2747 }
2748 
2749 bool LibraryCallKit::klass_needs_init_guard(Node* kls) {
2750   if (!kls->is_Con()) {
2751     return true;
2752   }
2753   const TypeKlassPtr* klsptr = kls->bottom_type()->isa_klassptr();
2754   if (klsptr == NULL) {
2755     return true;
2756   }
2757   ciInstanceKlass* ik = klsptr->klass()->as_instance_klass();
2758   // don't need a guard for a klass that is already initialized
2759   return !ik->is_initialized();
2760 }
2761 
2762 //----------------------------inline_unsafe_writeback0-------------------------
2763 // public native void Unsafe.writeback0(long address)
2764 bool LibraryCallKit::inline_unsafe_writeback0() {
2765   if (!Matcher::has_match_rule(Op_CacheWB)) {
2766     return false;
2767   }
2768 #ifndef PRODUCT
2769   assert(Matcher::has_match_rule(Op_CacheWBPreSync), "found match rule for CacheWB but not CacheWBPreSync");
2770   assert(Matcher::has_match_rule(Op_CacheWBPostSync), "found match rule for CacheWB but not CacheWBPostSync");
2771   ciSignature* sig = callee()->signature();
2772   assert(sig->type_at(0)->basic_type() == T_LONG, "Unsafe_writeback0 address is long!");
2773 #endif
2774   null_check_receiver();  // null-check, then ignore
2775   Node *addr = argument(1);
2776   addr = new CastX2PNode(addr);
2777   addr = _gvn.transform(addr);
2778   Node *flush = new CacheWBNode(control(), memory(TypeRawPtr::BOTTOM), addr);
2779   flush = _gvn.transform(flush);
2780   set_memory(flush, TypeRawPtr::BOTTOM);
2781   return true;
2782 }
2783 
2784 //----------------------------inline_unsafe_writeback0-------------------------
2785 // public native void Unsafe.writeback0(long address)
2786 bool LibraryCallKit::inline_unsafe_writebackSync0(bool isPre) {
2787   if (isPre && !Matcher::has_match_rule(Op_CacheWBPreSync)) {
2788     return false;
2789   }
2790   if (!isPre && !Matcher::has_match_rule(Op_CacheWBPostSync)) {
2791     return false;
2792   }
2793 #ifndef PRODUCT
2794   assert(Matcher::has_match_rule(Op_CacheWB),
2795          (isPre ? "found match rule for CacheWBPreSync but not CacheWB"
2796                 : "found match rule for CacheWBPostSync but not CacheWB"));
2797 
2798 #endif
2799   null_check_receiver();  // null-check, then ignore
2800   Node *sync;
2801   if (isPre) {
2802     sync = new CacheWBPreSyncNode(control(), memory(TypeRawPtr::BOTTOM));
2803   } else {
2804     sync = new CacheWBPostSyncNode(control(), memory(TypeRawPtr::BOTTOM));
2805   }
2806   sync = _gvn.transform(sync);
2807   set_memory(sync, TypeRawPtr::BOTTOM);
2808   return true;
2809 }
2810 
2811 //----------------------------inline_unsafe_allocate---------------------------
2812 // public native Object Unsafe.allocateInstance(Class<?> cls);
2813 bool LibraryCallKit::inline_unsafe_allocate() {
2814   if (callee()->is_static())  return false;  // caller must have the capability!
2815 
2816   null_check_receiver();  // null-check, then ignore
2817   Node* cls = null_check(argument(1));
2818   if (stopped())  return true;
2819 
2820   Node* kls = load_klass_from_mirror(cls, false, NULL, 0);
2821   kls = null_check(kls);
2822   if (stopped())  return true;  // argument was like int.class
2823 
2824   Node* test = NULL;
2825   if (LibraryCallKit::klass_needs_init_guard(kls)) {
2826     // Note:  The argument might still be an illegal value like
2827     // Serializable.class or Object[].class.   The runtime will handle it.
2828     // But we must make an explicit check for initialization.


< prev index next >