src/share/vm/opto/library_call.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/share/vm/opto

src/share/vm/opto/library_call.cpp

Print this page




 231   bool inline_math_multiplyExactI();
 232   bool inline_math_multiplyExactL();
 233   bool inline_math_negateExactI();
 234   bool inline_math_negateExactL();
 235   bool inline_math_subtractExactI(bool is_decrement);
 236   bool inline_math_subtractExactL(bool is_decrement);
 237   bool inline_min_max(vmIntrinsics::ID id);
 238   bool inline_notify(vmIntrinsics::ID id);
 239   Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
 240   // This returns Type::AnyPtr, RawPtr, or OopPtr.
 241   int classify_unsafe_addr(Node* &base, Node* &offset);
 242   Node* make_unsafe_address(Node* base, Node* offset);
 243   // Helper for inline_unsafe_access.
 244   // Generates the guards that check whether the result of
 245   // Unsafe.getObject should be recorded in an SATB log buffer.
 246   void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar);
 247 
 248   typedef enum { Relaxed, Opaque, Volatile, Acquire, Release } AccessKind;
 249   bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, AccessKind kind, bool is_unaligned);
 250   static bool klass_needs_init_guard(Node* kls);

 251   bool inline_unsafe_allocate();
 252   bool inline_unsafe_newArray(bool uninitialized);
 253   bool inline_unsafe_copyMemory();
 254   bool inline_native_currentThread();
 255 
 256   bool inline_native_time_funcs(address method, const char* funcName);
 257   bool inline_native_isInterrupted();
 258   bool inline_native_Class_query(vmIntrinsics::ID id);
 259   bool inline_native_subtype_check();
 260   bool inline_native_getLength();
 261   bool inline_array_copyOf(bool is_copyOfRange);
 262   bool inline_array_equals(StrIntrinsicNode::ArgEnc ae);
 263   bool inline_objects_checkIndex();
 264   void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark);
 265   bool inline_native_clone(bool is_virtual);
 266   bool inline_native_Reflection_getCallerClass();
 267   // Helper function for inlining native object hash method
 268   bool inline_native_hashcode(bool is_virtual, bool is_static);
 269   bool inline_native_getClass();
 270 


3124 }
3125 
3126 bool LibraryCallKit::inline_onspinwait() {
3127   insert_mem_bar(Op_OnSpinWait);
3128   return true;
3129 }
3130 
3131 bool LibraryCallKit::klass_needs_init_guard(Node* kls) {
3132   if (!kls->is_Con()) {
3133     return true;
3134   }
3135   const TypeKlassPtr* klsptr = kls->bottom_type()->isa_klassptr();
3136   if (klsptr == NULL) {
3137     return true;
3138   }
3139   ciInstanceKlass* ik = klsptr->klass()->as_instance_klass();
3140   // don't need a guard for a klass that is already initialized
3141   return !ik->is_initialized();
3142 }
3143 
3144 //----------------------------inline_unsafe_allocate---------------------------
3145 // public native Object Unsafe.allocateInstance(Class<?> cls);
3146 bool LibraryCallKit::inline_unsafe_allocate() {
3147   if (callee()->is_static())  return false;  // caller must have the capability!
3148 
3149   null_check_receiver();  // null-check, then ignore
3150   Node* cls = null_check(argument(1));
3151   if (stopped())  return true;
3152 
3153   Node* kls = load_klass_from_mirror(cls, false, NULL, 0);
3154   kls = null_check(kls);
3155   if (stopped())  return true;  // argument was like int.class
3156 
3157   Node* test = NULL;
3158   if (LibraryCallKit::klass_needs_init_guard(kls)) {
3159     // Note:  The argument might still be an illegal value like
3160     // Serializable.class or Object[].class.   The runtime will handle it.
3161     // But we must make an explicit check for initialization.
3162     Node* insp = basic_plus_adr(kls, in_bytes(InstanceKlass::init_state_offset()));
3163     // Use T_BOOLEAN for InstanceKlass::_init_state so the compiler
3164     // can generate code to load it as unsigned byte.
3165     Node* inst = make_load(NULL, insp, TypeInt::UBYTE, T_BOOLEAN, MemNode::unordered);
3166     Node* bits = intcon(InstanceKlass::fully_initialized);
3167     test = _gvn.transform(new SubINode(inst, bits));
3168     // The 'test' is non-zero if we need to take a slow path.
3169   }
3170 












3171   Node* obj = new_instance(kls, test);
3172   set_result(obj);
3173   return true;
3174 }
3175 
3176 //------------------------inline_native_time_funcs--------------
3177 // inline code for System.currentTimeMillis() and System.nanoTime()
3178 // these have the same type and signature
3179 bool LibraryCallKit::inline_native_time_funcs(address funcAddr, const char* funcName) {
3180   const TypeFunc* tf = OptoRuntime::void_long_Type();
3181   const TypePtr* no_memory_effects = NULL;
3182   Node* time = make_runtime_call(RC_LEAF, tf, funcAddr, funcName, no_memory_effects);
3183   Node* value = _gvn.transform(new ProjNode(time, TypeFunc::Parms+0));
3184 #ifdef ASSERT
3185   Node* value_top = _gvn.transform(new ProjNode(time, TypeFunc::Parms+1));
3186   assert(value_top == top(), "second value must be top");
3187 #endif
3188   set_result(value);
3189   return true;
3190 }




 231   bool inline_math_multiplyExactI();
 232   bool inline_math_multiplyExactL();
 233   bool inline_math_negateExactI();
 234   bool inline_math_negateExactL();
 235   bool inline_math_subtractExactI(bool is_decrement);
 236   bool inline_math_subtractExactL(bool is_decrement);
 237   bool inline_min_max(vmIntrinsics::ID id);
 238   bool inline_notify(vmIntrinsics::ID id);
 239   Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
 240   // This returns Type::AnyPtr, RawPtr, or OopPtr.
 241   int classify_unsafe_addr(Node* &base, Node* &offset);
 242   Node* make_unsafe_address(Node* base, Node* offset);
 243   // Helper for inline_unsafe_access.
 244   // Generates the guards that check whether the result of
 245   // Unsafe.getObject should be recorded in an SATB log buffer.
 246   void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar);
 247 
 248   typedef enum { Relaxed, Opaque, Volatile, Acquire, Release } AccessKind;
 249   bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, AccessKind kind, bool is_unaligned);
 250   static bool klass_needs_init_guard(Node* kls);
 251   Node* make_init_guard(Node* kls);
 252   bool inline_unsafe_allocate();
 253   bool inline_unsafe_newArray(bool uninitialized);
 254   bool inline_unsafe_copyMemory();
 255   bool inline_native_currentThread();
 256 
 257   bool inline_native_time_funcs(address method, const char* funcName);
 258   bool inline_native_isInterrupted();
 259   bool inline_native_Class_query(vmIntrinsics::ID id);
 260   bool inline_native_subtype_check();
 261   bool inline_native_getLength();
 262   bool inline_array_copyOf(bool is_copyOfRange);
 263   bool inline_array_equals(StrIntrinsicNode::ArgEnc ae);
 264   bool inline_objects_checkIndex();
 265   void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark);
 266   bool inline_native_clone(bool is_virtual);
 267   bool inline_native_Reflection_getCallerClass();
 268   // Helper function for inlining native object hash method
 269   bool inline_native_hashcode(bool is_virtual, bool is_static);
 270   bool inline_native_getClass();
 271 


3125 }
3126 
3127 bool LibraryCallKit::inline_onspinwait() {
3128   insert_mem_bar(Op_OnSpinWait);
3129   return true;
3130 }
3131 
3132 bool LibraryCallKit::klass_needs_init_guard(Node* kls) {
3133   if (!kls->is_Con()) {
3134     return true;
3135   }
3136   const TypeKlassPtr* klsptr = kls->bottom_type()->isa_klassptr();
3137   if (klsptr == NULL) {
3138     return true;
3139   }
3140   ciInstanceKlass* ik = klsptr->klass()->as_instance_klass();
3141   // don't need a guard for a klass that is already initialized
3142   return !ik->is_initialized();
3143 }
3144 
3145 Node* LibraryCallKit::make_init_guard(Node* kls) {
3146   if (!klass_needs_init_guard(kls))  return NULL;










3147 





3148   Node* insp = basic_plus_adr(kls, in_bytes(InstanceKlass::init_state_offset()));
3149   // Use T_BOOLEAN for InstanceKlass::_init_state so the compiler
3150   // can generate code to load it as unsigned byte.
3151   Node* inst = make_load(NULL, insp, TypeInt::UBYTE, T_BOOLEAN, MemNode::unordered);
3152   Node* bits = intcon(InstanceKlass::fully_initialized);



3153 
3154   // The test is non-zero if we need to take a slow path.
3155   return _gvn.transform(new SubINode(inst, bits));
3156 
3157 }
3158 
3159 //----------------------------inline_unsafe_allocate---------------------------
3160 // public native Object Unsafe.allocateInstance0(Class<?> cls) throws InstantiationException;
3161 bool LibraryCallKit::inline_unsafe_allocate() {
3162   null_check_receiver();  // null-check, then ignore
3163   Node* cls = null_check(argument(1));
3164   Node* kls = load_klass_from_mirror(cls, false, NULL, 0);
3165   Node* test = make_init_guard(kls);
3166   Node* obj = new_instance(kls, test);
3167   set_result(obj);
3168   return true;
3169 }
3170 
3171 //------------------------inline_native_time_funcs--------------
3172 // inline code for System.currentTimeMillis() and System.nanoTime()
3173 // these have the same type and signature
3174 bool LibraryCallKit::inline_native_time_funcs(address funcAddr, const char* funcName) {
3175   const TypeFunc* tf = OptoRuntime::void_long_Type();
3176   const TypePtr* no_memory_effects = NULL;
3177   Node* time = make_runtime_call(RC_LEAF, tf, funcAddr, funcName, no_memory_effects);
3178   Node* value = _gvn.transform(new ProjNode(time, TypeFunc::Parms+0));
3179 #ifdef ASSERT
3180   Node* value_top = _gvn.transform(new ProjNode(time, TypeFunc::Parms+1));
3181   assert(value_top == top(), "second value must be top");
3182 #endif
3183   set_result(value);
3184   return true;
3185 }


src/share/vm/opto/library_call.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File