208 Node* round_double_node(Node* n);
209 bool runtime_math(const TypeFunc* call_type, address funcAddr, const char* funcName);
210 bool inline_math_native(vmIntrinsics::ID id);
211 bool inline_trig(vmIntrinsics::ID id);
212 bool inline_math(vmIntrinsics::ID id);
213 template <typename OverflowOp>
214 bool inline_math_overflow(Node* arg1, Node* arg2);
215 void inline_math_mathExact(Node* math, Node* test);
216 bool inline_math_addExactI(bool is_increment);
217 bool inline_math_addExactL(bool is_increment);
218 bool inline_math_multiplyExactI();
219 bool inline_math_multiplyExactL();
220 bool inline_math_negateExactI();
221 bool inline_math_negateExactL();
222 bool inline_math_subtractExactI(bool is_decrement);
223 bool inline_math_subtractExactL(bool is_decrement);
224 bool inline_exp();
225 bool inline_pow();
226 Node* finish_pow_exp(Node* result, Node* x, Node* y, const TypeFunc* call_type, address funcAddr, const char* funcName);
227 bool inline_min_max(vmIntrinsics::ID id);
228 Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
229 // This returns Type::AnyPtr, RawPtr, or OopPtr.
230 int classify_unsafe_addr(Node* &base, Node* &offset);
231 Node* make_unsafe_address(Node* base, Node* offset);
232 // Helper for inline_unsafe_access.
233 // Generates the guards that check whether the result of
234 // Unsafe.getObject should be recorded in an SATB log buffer.
235 void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar);
236 bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile);
237 static bool klass_needs_init_guard(Node* kls);
238 bool inline_unsafe_allocate();
239 bool inline_unsafe_copyMemory();
240 bool inline_native_currentThread();
241 #ifdef TRACE_HAVE_INTRINSICS
242 bool inline_native_classID();
243 bool inline_native_threadID();
244 #endif
245 bool inline_native_time_funcs(address method, const char* funcName);
246 bool inline_native_isInterrupted();
247 bool inline_native_Class_query(vmIntrinsics::ID id);
743
744 switch (intrinsic_id()) {
745 case vmIntrinsics::_hashCode: return inline_native_hashcode(intrinsic()->is_virtual(), !is_static);
746 case vmIntrinsics::_identityHashCode: return inline_native_hashcode(/*!virtual*/ false, is_static);
747 case vmIntrinsics::_getClass: return inline_native_getClass();
748
749 case vmIntrinsics::_dsin:
750 case vmIntrinsics::_dcos:
751 case vmIntrinsics::_dtan:
752 case vmIntrinsics::_dabs:
753 case vmIntrinsics::_datan2:
754 case vmIntrinsics::_dsqrt:
755 case vmIntrinsics::_dexp:
756 case vmIntrinsics::_dlog:
757 case vmIntrinsics::_dlog10:
758 case vmIntrinsics::_dpow: return inline_math_native(intrinsic_id());
759
760 case vmIntrinsics::_min:
761 case vmIntrinsics::_max: return inline_min_max(intrinsic_id());
762
763 case vmIntrinsics::_addExactI: return inline_math_addExactI(false /* add */);
764 case vmIntrinsics::_addExactL: return inline_math_addExactL(false /* add */);
765 case vmIntrinsics::_decrementExactI: return inline_math_subtractExactI(true /* decrement */);
766 case vmIntrinsics::_decrementExactL: return inline_math_subtractExactL(true /* decrement */);
767 case vmIntrinsics::_incrementExactI: return inline_math_addExactI(true /* increment */);
768 case vmIntrinsics::_incrementExactL: return inline_math_addExactL(true /* increment */);
769 case vmIntrinsics::_multiplyExactI: return inline_math_multiplyExactI();
770 case vmIntrinsics::_multiplyExactL: return inline_math_multiplyExactL();
771 case vmIntrinsics::_negateExactI: return inline_math_negateExactI();
772 case vmIntrinsics::_negateExactL: return inline_math_negateExactL();
773 case vmIntrinsics::_subtractExactI: return inline_math_subtractExactI(false /* subtract */);
774 case vmIntrinsics::_subtractExactL: return inline_math_subtractExactL(false /* subtract */);
775
776 case vmIntrinsics::_arraycopy: return inline_arraycopy();
777
778 case vmIntrinsics::_compareTo: return inline_string_compareTo();
779 case vmIntrinsics::_indexOf: return inline_string_indexOf();
780 case vmIntrinsics::_equals: return inline_string_equals();
781
782 case vmIntrinsics::_getObject: return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT, !is_volatile);
2037 #undef FN_PTR
2038
2039 // These intrinsics are not yet correctly implemented
2040 case vmIntrinsics::_datan2:
2041 return false;
2042
2043 default:
2044 fatal_unexpected_iid(id);
2045 return false;
2046 }
2047 }
2048
2049 static bool is_simple_name(Node* n) {
2050 return (n->req() == 1 // constant
2051 || (n->is_Type() && n->as_Type()->type()->singleton())
2052 || n->is_Proj() // parameter or return value
2053 || n->is_Phi() // local of some sort
2054 );
2055 }
2056
2057 //----------------------------inline_min_max-----------------------------------
2058 bool LibraryCallKit::inline_min_max(vmIntrinsics::ID id) {
2059 set_result(generate_min_max(id, argument(0), argument(1)));
2060 return true;
2061 }
2062
2063 void LibraryCallKit::inline_math_mathExact(Node* math, Node *test) {
2064 Node* bol = _gvn.transform( new BoolNode(test, BoolTest::overflow) );
2065 IfNode* check = create_and_map_if(control(), bol, PROB_UNLIKELY_MAG(3), COUNT_UNKNOWN);
2066 Node* fast_path = _gvn.transform( new IfFalseNode(check));
2067 Node* slow_path = _gvn.transform( new IfTrueNode(check) );
2068
2069 {
2070 PreserveJVMState pjvms(this);
2071 PreserveReexecuteState preexecs(this);
2072 jvms()->set_should_reexecute(true);
2073
2074 set_control(slow_path);
2075 set_i_o(i_o());
2076
|
208 Node* round_double_node(Node* n);
209 bool runtime_math(const TypeFunc* call_type, address funcAddr, const char* funcName);
210 bool inline_math_native(vmIntrinsics::ID id);
211 bool inline_trig(vmIntrinsics::ID id);
212 bool inline_math(vmIntrinsics::ID id);
213 template <typename OverflowOp>
214 bool inline_math_overflow(Node* arg1, Node* arg2);
215 void inline_math_mathExact(Node* math, Node* test);
216 bool inline_math_addExactI(bool is_increment);
217 bool inline_math_addExactL(bool is_increment);
218 bool inline_math_multiplyExactI();
219 bool inline_math_multiplyExactL();
220 bool inline_math_negateExactI();
221 bool inline_math_negateExactL();
222 bool inline_math_subtractExactI(bool is_decrement);
223 bool inline_math_subtractExactL(bool is_decrement);
224 bool inline_exp();
225 bool inline_pow();
226 Node* finish_pow_exp(Node* result, Node* x, Node* y, const TypeFunc* call_type, address funcAddr, const char* funcName);
227 bool inline_min_max(vmIntrinsics::ID id);
228 bool inline_notify(vmIntrinsics::ID id);
229 Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
230 // This returns Type::AnyPtr, RawPtr, or OopPtr.
231 int classify_unsafe_addr(Node* &base, Node* &offset);
232 Node* make_unsafe_address(Node* base, Node* offset);
233 // Helper for inline_unsafe_access.
234 // Generates the guards that check whether the result of
235 // Unsafe.getObject should be recorded in an SATB log buffer.
236 void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar);
237 bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile);
238 static bool klass_needs_init_guard(Node* kls);
239 bool inline_unsafe_allocate();
240 bool inline_unsafe_copyMemory();
241 bool inline_native_currentThread();
242 #ifdef TRACE_HAVE_INTRINSICS
243 bool inline_native_classID();
244 bool inline_native_threadID();
245 #endif
246 bool inline_native_time_funcs(address method, const char* funcName);
247 bool inline_native_isInterrupted();
248 bool inline_native_Class_query(vmIntrinsics::ID id);
744
745 switch (intrinsic_id()) {
746 case vmIntrinsics::_hashCode: return inline_native_hashcode(intrinsic()->is_virtual(), !is_static);
747 case vmIntrinsics::_identityHashCode: return inline_native_hashcode(/*!virtual*/ false, is_static);
748 case vmIntrinsics::_getClass: return inline_native_getClass();
749
750 case vmIntrinsics::_dsin:
751 case vmIntrinsics::_dcos:
752 case vmIntrinsics::_dtan:
753 case vmIntrinsics::_dabs:
754 case vmIntrinsics::_datan2:
755 case vmIntrinsics::_dsqrt:
756 case vmIntrinsics::_dexp:
757 case vmIntrinsics::_dlog:
758 case vmIntrinsics::_dlog10:
759 case vmIntrinsics::_dpow: return inline_math_native(intrinsic_id());
760
761 case vmIntrinsics::_min:
762 case vmIntrinsics::_max: return inline_min_max(intrinsic_id());
763
764 case vmIntrinsics::_notify:
765 case vmIntrinsics::_notifyAll:
766 if (InlineNotify) {
767 return inline_notify(intrinsic_id());
768 }
769 return false;
770
771 case vmIntrinsics::_addExactI: return inline_math_addExactI(false /* add */);
772 case vmIntrinsics::_addExactL: return inline_math_addExactL(false /* add */);
773 case vmIntrinsics::_decrementExactI: return inline_math_subtractExactI(true /* decrement */);
774 case vmIntrinsics::_decrementExactL: return inline_math_subtractExactL(true /* decrement */);
775 case vmIntrinsics::_incrementExactI: return inline_math_addExactI(true /* increment */);
776 case vmIntrinsics::_incrementExactL: return inline_math_addExactL(true /* increment */);
777 case vmIntrinsics::_multiplyExactI: return inline_math_multiplyExactI();
778 case vmIntrinsics::_multiplyExactL: return inline_math_multiplyExactL();
779 case vmIntrinsics::_negateExactI: return inline_math_negateExactI();
780 case vmIntrinsics::_negateExactL: return inline_math_negateExactL();
781 case vmIntrinsics::_subtractExactI: return inline_math_subtractExactI(false /* subtract */);
782 case vmIntrinsics::_subtractExactL: return inline_math_subtractExactL(false /* subtract */);
783
784 case vmIntrinsics::_arraycopy: return inline_arraycopy();
785
786 case vmIntrinsics::_compareTo: return inline_string_compareTo();
787 case vmIntrinsics::_indexOf: return inline_string_indexOf();
788 case vmIntrinsics::_equals: return inline_string_equals();
789
790 case vmIntrinsics::_getObject: return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT, !is_volatile);
2045 #undef FN_PTR
2046
2047 // These intrinsics are not yet correctly implemented
2048 case vmIntrinsics::_datan2:
2049 return false;
2050
2051 default:
2052 fatal_unexpected_iid(id);
2053 return false;
2054 }
2055 }
2056
2057 static bool is_simple_name(Node* n) {
2058 return (n->req() == 1 // constant
2059 || (n->is_Type() && n->as_Type()->type()->singleton())
2060 || n->is_Proj() // parameter or return value
2061 || n->is_Phi() // local of some sort
2062 );
2063 }
2064
2065 //----------------------------inline_notify-----------------------------------*
2066 bool LibraryCallKit::inline_notify(vmIntrinsics::ID id) {
2067 const TypeFunc* ftype = OptoRuntime::monitor_notify_Type();
2068 address func;
2069 if (id == vmIntrinsics::_notify) {
2070 func = OptoRuntime::monitor_notify_Java();
2071 } else {
2072 func = OptoRuntime::monitor_notifyAll_Java();
2073 }
2074 Node* call = make_runtime_call(RC_NO_LEAF, ftype, func, NULL, TypeRawPtr::BOTTOM, argument(0));
2075 make_slow_call_ex(call, env()->Throwable_klass(), false);
2076 return true;
2077 }
2078
2079
2080 //----------------------------inline_min_max-----------------------------------
2081 bool LibraryCallKit::inline_min_max(vmIntrinsics::ID id) {
2082 set_result(generate_min_max(id, argument(0), argument(1)));
2083 return true;
2084 }
2085
2086 void LibraryCallKit::inline_math_mathExact(Node* math, Node *test) {
2087 Node* bol = _gvn.transform( new BoolNode(test, BoolTest::overflow) );
2088 IfNode* check = create_and_map_if(control(), bol, PROB_UNLIKELY_MAG(3), COUNT_UNKNOWN);
2089 Node* fast_path = _gvn.transform( new IfFalseNode(check));
2090 Node* slow_path = _gvn.transform( new IfTrueNode(check) );
2091
2092 {
2093 PreserveJVMState pjvms(this);
2094 PreserveReexecuteState preexecs(this);
2095 jvms()->set_should_reexecute(true);
2096
2097 set_control(slow_path);
2098 set_i_o(i_o());
2099
|