< prev index next >

src/share/vm/opto/library_call.cpp

Print this page




 227   bool inline_math_addExactL(bool is_increment);
 228   bool inline_math_multiplyExactI();
 229   bool inline_math_multiplyExactL();
 230   bool inline_math_negateExactI();
 231   bool inline_math_negateExactL();
 232   bool inline_math_subtractExactI(bool is_decrement);
 233   bool inline_math_subtractExactL(bool is_decrement);
 234   bool inline_min_max(vmIntrinsics::ID id);
 235   bool inline_notify(vmIntrinsics::ID id);
 236   Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
 237   // This returns Type::AnyPtr, RawPtr, or OopPtr.
 238   int classify_unsafe_addr(Node* &base, Node* &offset);
 239   Node* make_unsafe_address(Node* base, Node* offset);
 240   // Helper for inline_unsafe_access.
 241   // Generates the guards that check whether the result of
 242   // Unsafe.getObject should be recorded in an SATB log buffer.
 243   void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar);
 244   bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile, bool is_unaligned);
 245   static bool klass_needs_init_guard(Node* kls);
 246   bool inline_unsafe_allocate();

 247   bool inline_unsafe_copyMemory();
 248   bool inline_native_currentThread();
 249 #ifdef TRACE_HAVE_INTRINSICS
 250   bool inline_native_classID();
 251   bool inline_native_threadID();
 252 #endif
 253   bool inline_native_time_funcs(address method, const char* funcName);
 254   bool inline_native_isInterrupted();
 255   bool inline_native_Class_query(vmIntrinsics::ID id);
 256   bool inline_native_subtype_check();
 257 
 258   bool inline_native_newArray();
 259   bool inline_native_getLength();
 260   bool inline_array_copyOf(bool is_copyOfRange);
 261   bool inline_array_equals(StrIntrinsicNode::ArgEnc ae);
 262   bool inline_objects_checkIndex();
 263   void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark);
 264   bool inline_native_clone(bool is_virtual);
 265   bool inline_native_Reflection_getCallerClass();
 266   // Helper function for inlining native object hash method
 267   bool inline_native_hashcode(bool is_virtual, bool is_static);
 268   bool inline_native_getClass();
 269 
 270   // Helper functions for inlining arraycopy
 271   bool inline_arraycopy();
 272   AllocateArrayNode* tightly_coupled_allocation(Node* ptr,
 273                                                 RegionNode* slow_region);
 274   JVMState* arraycopy_restore_alloc_state(AllocateArrayNode* alloc, int& saved_reexecute_sp);
 275   void arraycopy_move_allocation_here(AllocateArrayNode* alloc, Node* dest, JVMState* saved_jvms, int saved_reexecute_sp);
 276 
 277   typedef enum { LS_xadd, LS_xchg, LS_cmpxchg } LoadStoreKind;
 278   bool inline_unsafe_load_store(BasicType type,  LoadStoreKind kind);


 633   case vmIntrinsics::_getAndSetInt:             return inline_unsafe_load_store(T_INT,    LS_xchg);
 634   case vmIntrinsics::_getAndSetLong:            return inline_unsafe_load_store(T_LONG,   LS_xchg);
 635   case vmIntrinsics::_getAndSetObject:          return inline_unsafe_load_store(T_OBJECT, LS_xchg);
 636 
 637   case vmIntrinsics::_loadFence:
 638   case vmIntrinsics::_storeFence:
 639   case vmIntrinsics::_fullFence:                return inline_unsafe_fence(intrinsic_id());
 640 
 641   case vmIntrinsics::_currentThread:            return inline_native_currentThread();
 642   case vmIntrinsics::_isInterrupted:            return inline_native_isInterrupted();
 643 
 644 #ifdef TRACE_HAVE_INTRINSICS
 645   case vmIntrinsics::_classID:                  return inline_native_classID();
 646   case vmIntrinsics::_threadID:                 return inline_native_threadID();
 647   case vmIntrinsics::_counterTime:              return inline_native_time_funcs(CAST_FROM_FN_PTR(address, TRACE_TIME_METHOD), "counterTime");
 648 #endif
 649   case vmIntrinsics::_currentTimeMillis:        return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeMillis), "currentTimeMillis");
 650   case vmIntrinsics::_nanoTime:                 return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeNanos), "nanoTime");
 651   case vmIntrinsics::_allocateInstance:         return inline_unsafe_allocate();
 652   case vmIntrinsics::_copyMemory:               return inline_unsafe_copyMemory();
 653   case vmIntrinsics::_newArray:                 return inline_native_newArray();

 654   case vmIntrinsics::_getLength:                return inline_native_getLength();
 655   case vmIntrinsics::_copyOf:                   return inline_array_copyOf(false);
 656   case vmIntrinsics::_copyOfRange:              return inline_array_copyOf(true);
 657   case vmIntrinsics::_equalsB:                  return inline_array_equals(StrIntrinsicNode::LL);
 658   case vmIntrinsics::_equalsC:                  return inline_array_equals(StrIntrinsicNode::UU);
 659   case vmIntrinsics::_Objects_checkIndex:       return inline_objects_checkIndex();
 660   case vmIntrinsics::_clone:                    return inline_native_clone(intrinsic()->is_virtual());
 661 
 662   case vmIntrinsics::_isAssignableFrom:         return inline_native_subtype_check();
 663 
 664   case vmIntrinsics::_isInstance:
 665   case vmIntrinsics::_getModifiers:
 666   case vmIntrinsics::_isInterface:
 667   case vmIntrinsics::_isArray:
 668   case vmIntrinsics::_isPrimitive:
 669   case vmIntrinsics::_getSuperclass:
 670   case vmIntrinsics::_getClassAccessFlags:      return inline_native_Class_query(intrinsic_id());
 671 
 672   case vmIntrinsics::_floatToRawIntBits:
 673   case vmIntrinsics::_floatToIntBits:


3562       set_control(top());
3563       return always_branch;
3564     }
3565   }
3566   // Now test the correct condition.
3567   jint  nval = (obj_array
3568                 ? ((jint)Klass::_lh_array_tag_type_value
3569                    <<    Klass::_lh_array_tag_shift)
3570                 : Klass::_lh_neutral_value);
3571   Node* cmp = _gvn.transform(new CmpINode(layout_val, intcon(nval)));
3572   BoolTest::mask btest = BoolTest::lt;  // correct for testing is_[obj]array
3573   // invert the test if we are looking for a non-array
3574   if (not_array)  btest = BoolTest(btest).negate();
3575   Node* bol = _gvn.transform(new BoolNode(cmp, btest));
3576   return generate_fair_guard(bol, region);
3577 }
3578 
3579 
3580 //-----------------------inline_native_newArray--------------------------
3581 // private static native Object java.lang.reflect.newArray(Class<?> componentType, int length);
3582 bool LibraryCallKit::inline_native_newArray() {
3583   Node* mirror    = argument(0);
3584   Node* count_val = argument(1);








3585 
3586   mirror = null_check(mirror);
3587   // If mirror or obj is dead, only null-path is taken.
3588   if (stopped())  return true;
3589 
3590   enum { _normal_path = 1, _slow_path = 2, PATH_LIMIT };
3591   RegionNode* result_reg = new RegionNode(PATH_LIMIT);
3592   PhiNode*    result_val = new PhiNode(result_reg, TypeInstPtr::NOTNULL);
3593   PhiNode*    result_io  = new PhiNode(result_reg, Type::ABIO);
3594   PhiNode*    result_mem = new PhiNode(result_reg, Type::MEMORY, TypePtr::BOTTOM);
3595 
3596   bool never_see_null = !too_many_traps(Deoptimization::Reason_null_check);
3597   Node* klass_node = load_array_klass_from_mirror(mirror, never_see_null,
3598                                                   result_reg, _slow_path);
3599   Node* normal_ctl   = control();
3600   Node* no_array_ctl = result_reg->in(_slow_path);
3601 
3602   // Generate code for the slow case.  We make a call to newArray().
3603   set_control(no_array_ctl);
3604   if (!stopped()) {


3609     PreserveJVMState pjvms(this);
3610     CallJavaNode* slow_call = generate_method_call_static(vmIntrinsics::_newArray);
3611     Node* slow_result = set_results_for_java_call(slow_call);
3612     // this->control() comes from set_results_for_java_call
3613     result_reg->set_req(_slow_path, control());
3614     result_val->set_req(_slow_path, slow_result);
3615     result_io ->set_req(_slow_path, i_o());
3616     result_mem->set_req(_slow_path, reset_memory());
3617   }
3618 
3619   set_control(normal_ctl);
3620   if (!stopped()) {
3621     // Normal case:  The array type has been cached in the java.lang.Class.
3622     // The following call works fine even if the array type is polymorphic.
3623     // It could be a dynamic mix of int[], boolean[], Object[], etc.
3624     Node* obj = new_array(klass_node, count_val, 0);  // no arguments to push
3625     result_reg->init_req(_normal_path, control());
3626     result_val->init_req(_normal_path, obj);
3627     result_io ->init_req(_normal_path, i_o());
3628     result_mem->init_req(_normal_path, reset_memory());






3629   }
3630 
3631   // Return the combined state.
3632   set_i_o(        _gvn.transform(result_io)  );
3633   set_all_memory( _gvn.transform(result_mem));
3634 
3635   C->set_has_split_ifs(true); // Has chance for split-if optimization
3636   set_result(result_reg, result_val);
3637   return true;
3638 }
3639 
3640 //----------------------inline_native_getLength--------------------------
3641 // public static native int java.lang.reflect.Array.getLength(Object array);
3642 bool LibraryCallKit::inline_native_getLength() {
3643   if (too_many_traps(Deoptimization::Reason_intrinsic))  return false;
3644 
3645   Node* array = null_check(argument(0));
3646   // If array is dead, only null-path is taken.
3647   if (stopped())  return true;
3648 




 227   bool inline_math_addExactL(bool is_increment);
 228   bool inline_math_multiplyExactI();
 229   bool inline_math_multiplyExactL();
 230   bool inline_math_negateExactI();
 231   bool inline_math_negateExactL();
 232   bool inline_math_subtractExactI(bool is_decrement);
 233   bool inline_math_subtractExactL(bool is_decrement);
 234   bool inline_min_max(vmIntrinsics::ID id);
 235   bool inline_notify(vmIntrinsics::ID id);
 236   Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
 237   // This returns Type::AnyPtr, RawPtr, or OopPtr.
 238   int classify_unsafe_addr(Node* &base, Node* &offset);
 239   Node* make_unsafe_address(Node* base, Node* offset);
 240   // Helper for inline_unsafe_access.
 241   // Generates the guards that check whether the result of
 242   // Unsafe.getObject should be recorded in an SATB log buffer.
 243   void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar);
 244   bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile, bool is_unaligned);
 245   static bool klass_needs_init_guard(Node* kls);
 246   bool inline_unsafe_allocate();
 247   bool inline_unsafe_newArray(bool uninit);
 248   bool inline_unsafe_copyMemory();
 249   bool inline_native_currentThread();
 250 #ifdef TRACE_HAVE_INTRINSICS
 251   bool inline_native_classID();
 252   bool inline_native_threadID();
 253 #endif
 254   bool inline_native_time_funcs(address method, const char* funcName);
 255   bool inline_native_isInterrupted();
 256   bool inline_native_Class_query(vmIntrinsics::ID id);
 257   bool inline_native_subtype_check();


 258   bool inline_native_getLength();
 259   bool inline_array_copyOf(bool is_copyOfRange);
 260   bool inline_array_equals(StrIntrinsicNode::ArgEnc ae);
 261   bool inline_objects_checkIndex();
 262   void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark);
 263   bool inline_native_clone(bool is_virtual);
 264   bool inline_native_Reflection_getCallerClass();
 265   // Helper function for inlining native object hash method
 266   bool inline_native_hashcode(bool is_virtual, bool is_static);
 267   bool inline_native_getClass();
 268 
 269   // Helper functions for inlining arraycopy
 270   bool inline_arraycopy();
 271   AllocateArrayNode* tightly_coupled_allocation(Node* ptr,
 272                                                 RegionNode* slow_region);
 273   JVMState* arraycopy_restore_alloc_state(AllocateArrayNode* alloc, int& saved_reexecute_sp);
 274   void arraycopy_move_allocation_here(AllocateArrayNode* alloc, Node* dest, JVMState* saved_jvms, int saved_reexecute_sp);
 275 
 276   typedef enum { LS_xadd, LS_xchg, LS_cmpxchg } LoadStoreKind;
 277   bool inline_unsafe_load_store(BasicType type,  LoadStoreKind kind);


 632   case vmIntrinsics::_getAndSetInt:             return inline_unsafe_load_store(T_INT,    LS_xchg);
 633   case vmIntrinsics::_getAndSetLong:            return inline_unsafe_load_store(T_LONG,   LS_xchg);
 634   case vmIntrinsics::_getAndSetObject:          return inline_unsafe_load_store(T_OBJECT, LS_xchg);
 635 
 636   case vmIntrinsics::_loadFence:
 637   case vmIntrinsics::_storeFence:
 638   case vmIntrinsics::_fullFence:                return inline_unsafe_fence(intrinsic_id());
 639 
 640   case vmIntrinsics::_currentThread:            return inline_native_currentThread();
 641   case vmIntrinsics::_isInterrupted:            return inline_native_isInterrupted();
 642 
 643 #ifdef TRACE_HAVE_INTRINSICS
 644   case vmIntrinsics::_classID:                  return inline_native_classID();
 645   case vmIntrinsics::_threadID:                 return inline_native_threadID();
 646   case vmIntrinsics::_counterTime:              return inline_native_time_funcs(CAST_FROM_FN_PTR(address, TRACE_TIME_METHOD), "counterTime");
 647 #endif
 648   case vmIntrinsics::_currentTimeMillis:        return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeMillis), "currentTimeMillis");
 649   case vmIntrinsics::_nanoTime:                 return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeNanos), "nanoTime");
 650   case vmIntrinsics::_allocateInstance:         return inline_unsafe_allocate();
 651   case vmIntrinsics::_copyMemory:               return inline_unsafe_copyMemory();
 652   case vmIntrinsics::_allocateArrayUninit:      return inline_unsafe_newArray(true);
 653   case vmIntrinsics::_newArray:                 return inline_unsafe_newArray(false);
 654   case vmIntrinsics::_getLength:                return inline_native_getLength();
 655   case vmIntrinsics::_copyOf:                   return inline_array_copyOf(false);
 656   case vmIntrinsics::_copyOfRange:              return inline_array_copyOf(true);
 657   case vmIntrinsics::_equalsB:                  return inline_array_equals(StrIntrinsicNode::LL);
 658   case vmIntrinsics::_equalsC:                  return inline_array_equals(StrIntrinsicNode::UU);
 659   case vmIntrinsics::_Objects_checkIndex:       return inline_objects_checkIndex();
 660   case vmIntrinsics::_clone:                    return inline_native_clone(intrinsic()->is_virtual());
 661 
 662   case vmIntrinsics::_isAssignableFrom:         return inline_native_subtype_check();
 663 
 664   case vmIntrinsics::_isInstance:
 665   case vmIntrinsics::_getModifiers:
 666   case vmIntrinsics::_isInterface:
 667   case vmIntrinsics::_isArray:
 668   case vmIntrinsics::_isPrimitive:
 669   case vmIntrinsics::_getSuperclass:
 670   case vmIntrinsics::_getClassAccessFlags:      return inline_native_Class_query(intrinsic_id());
 671 
 672   case vmIntrinsics::_floatToRawIntBits:
 673   case vmIntrinsics::_floatToIntBits:


3562       set_control(top());
3563       return always_branch;
3564     }
3565   }
3566   // Now test the correct condition.
3567   jint  nval = (obj_array
3568                 ? ((jint)Klass::_lh_array_tag_type_value
3569                    <<    Klass::_lh_array_tag_shift)
3570                 : Klass::_lh_neutral_value);
3571   Node* cmp = _gvn.transform(new CmpINode(layout_val, intcon(nval)));
3572   BoolTest::mask btest = BoolTest::lt;  // correct for testing is_[obj]array
3573   // invert the test if we are looking for a non-array
3574   if (not_array)  btest = BoolTest(btest).negate();
3575   Node* bol = _gvn.transform(new BoolNode(cmp, btest));
3576   return generate_fair_guard(bol, region);
3577 }
3578 
3579 
3580 //-----------------------inline_native_newArray--------------------------
3581 // private static native Object java.lang.reflect.newArray(Class<?> componentType, int length);
3582 // private        native Object Unsafe.allocateArrayUninit0(Class<?> cls, int size);
3583 bool LibraryCallKit::inline_unsafe_newArray(bool uninit) {
3584   Node* mirror;
3585   Node* count_val;
3586   if (uninit) {
3587     mirror    = argument(1);
3588     count_val = argument(2);
3589   } else {
3590     mirror    = argument(0);
3591     count_val = argument(1);
3592   }
3593 
3594   mirror = null_check(mirror);
3595   // If mirror or obj is dead, only null-path is taken.
3596   if (stopped())  return true;
3597 
3598   enum { _normal_path = 1, _slow_path = 2, PATH_LIMIT };
3599   RegionNode* result_reg = new RegionNode(PATH_LIMIT);
3600   PhiNode*    result_val = new PhiNode(result_reg, TypeInstPtr::NOTNULL);
3601   PhiNode*    result_io  = new PhiNode(result_reg, Type::ABIO);
3602   PhiNode*    result_mem = new PhiNode(result_reg, Type::MEMORY, TypePtr::BOTTOM);
3603 
3604   bool never_see_null = !too_many_traps(Deoptimization::Reason_null_check);
3605   Node* klass_node = load_array_klass_from_mirror(mirror, never_see_null,
3606                                                   result_reg, _slow_path);
3607   Node* normal_ctl   = control();
3608   Node* no_array_ctl = result_reg->in(_slow_path);
3609 
3610   // Generate code for the slow case.  We make a call to newArray().
3611   set_control(no_array_ctl);
3612   if (!stopped()) {


3617     PreserveJVMState pjvms(this);
3618     CallJavaNode* slow_call = generate_method_call_static(vmIntrinsics::_newArray);
3619     Node* slow_result = set_results_for_java_call(slow_call);
3620     // this->control() comes from set_results_for_java_call
3621     result_reg->set_req(_slow_path, control());
3622     result_val->set_req(_slow_path, slow_result);
3623     result_io ->set_req(_slow_path, i_o());
3624     result_mem->set_req(_slow_path, reset_memory());
3625   }
3626 
3627   set_control(normal_ctl);
3628   if (!stopped()) {
3629     // Normal case:  The array type has been cached in the java.lang.Class.
3630     // The following call works fine even if the array type is polymorphic.
3631     // It could be a dynamic mix of int[], boolean[], Object[], etc.
3632     Node* obj = new_array(klass_node, count_val, 0);  // no arguments to push
3633     result_reg->init_req(_normal_path, control());
3634     result_val->init_req(_normal_path, obj);
3635     result_io ->init_req(_normal_path, i_o());
3636     result_mem->init_req(_normal_path, reset_memory());
3637 
3638     if (uninit) {
3639       // Mark the allocation so that zeroing is skipped
3640       AllocateArrayNode* alloc = AllocateArrayNode::Ideal_array_allocation(obj, &_gvn);
3641       alloc->maybe_set_complete(&_gvn);
3642     }
3643   }
3644 
3645   // Return the combined state.
3646   set_i_o(        _gvn.transform(result_io)  );
3647   set_all_memory( _gvn.transform(result_mem));
3648 
3649   C->set_has_split_ifs(true); // Has chance for split-if optimization
3650   set_result(result_reg, result_val);
3651   return true;
3652 }
3653 
3654 //----------------------inline_native_getLength--------------------------
3655 // public static native int java.lang.reflect.Array.getLength(Object array);
3656 bool LibraryCallKit::inline_native_getLength() {
3657   if (too_many_traps(Deoptimization::Reason_intrinsic))  return false;
3658 
3659   Node* array = null_check(argument(0));
3660   // If array is dead, only null-path is taken.
3661   if (stopped())  return true;
3662 


< prev index next >