251 bool inline_array_copyOf(bool is_copyOfRange);
252 bool inline_array_equals();
253 void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark);
254 bool inline_native_clone(bool is_virtual);
255 bool inline_native_Reflection_getCallerClass();
256 // Helper function for inlining native object hash method
257 bool inline_native_hashcode(bool is_virtual, bool is_static);
258 bool inline_native_getClass();
259
260 // Helper functions for inlining arraycopy
261 bool inline_arraycopy();
262 AllocateArrayNode* tightly_coupled_allocation(Node* ptr,
263 RegionNode* slow_region);
264 typedef enum { LS_xadd, LS_xchg, LS_cmpxchg } LoadStoreKind;
265 bool inline_unsafe_load_store(BasicType type, LoadStoreKind kind);
266 bool inline_unsafe_ordered_store(BasicType type);
267 bool inline_unsafe_fence(vmIntrinsics::ID id);
268 bool inline_fp_conversions(vmIntrinsics::ID id);
269 bool inline_number_methods(vmIntrinsics::ID id);
270 bool inline_reference_get();
271 bool inline_aescrypt_Block(vmIntrinsics::ID id);
272 bool inline_cipherBlockChaining_AESCrypt(vmIntrinsics::ID id);
273 Node* inline_cipherBlockChaining_AESCrypt_predicate(bool decrypting);
274 Node* get_key_start_from_aescrypt_object(Node* aescrypt_object);
275 Node* get_original_key_start_from_aescrypt_object(Node* aescrypt_object);
276 bool inline_sha_implCompress(vmIntrinsics::ID id);
277 bool inline_digestBase_implCompressMB(int predicate);
278 bool inline_sha_implCompressMB(Node* digestBaseObj, ciInstanceKlass* instklass_SHA,
279 bool long_state, address stubAddr, const char *stubName,
280 Node* src_start, Node* ofs, Node* limit);
281 Node* get_state_from_sha_object(Node *sha_object);
282 Node* get_state_from_sha5_object(Node *sha_object);
283 Node* inline_digestBase_implCompressMB_predicate(int predicate);
284 bool inline_encodeISOArray();
285 bool inline_updateCRC32();
286 bool inline_updateBytesCRC32();
287 bool inline_updateByteBufferCRC32();
288 bool inline_multiplyToLen();
289 };
290
852 case vmIntrinsics::_intBitsToFloat:
853 case vmIntrinsics::_doubleToRawLongBits:
854 case vmIntrinsics::_doubleToLongBits:
855 case vmIntrinsics::_longBitsToDouble: return inline_fp_conversions(intrinsic_id());
856
857 case vmIntrinsics::_numberOfLeadingZeros_i:
858 case vmIntrinsics::_numberOfLeadingZeros_l:
859 case vmIntrinsics::_numberOfTrailingZeros_i:
860 case vmIntrinsics::_numberOfTrailingZeros_l:
861 case vmIntrinsics::_bitCount_i:
862 case vmIntrinsics::_bitCount_l:
863 case vmIntrinsics::_reverseBytes_i:
864 case vmIntrinsics::_reverseBytes_l:
865 case vmIntrinsics::_reverseBytes_s:
866 case vmIntrinsics::_reverseBytes_c: return inline_number_methods(intrinsic_id());
867
868 case vmIntrinsics::_getCallerClass: return inline_native_Reflection_getCallerClass();
869
870 case vmIntrinsics::_Reference_get: return inline_reference_get();
871
872 case vmIntrinsics::_aescrypt_encryptBlock:
873 case vmIntrinsics::_aescrypt_decryptBlock: return inline_aescrypt_Block(intrinsic_id());
874
875 case vmIntrinsics::_cipherBlockChaining_encryptAESCrypt:
876 case vmIntrinsics::_cipherBlockChaining_decryptAESCrypt:
877 return inline_cipherBlockChaining_AESCrypt(intrinsic_id());
878
879 case vmIntrinsics::_sha_implCompress:
880 case vmIntrinsics::_sha2_implCompress:
881 case vmIntrinsics::_sha5_implCompress:
882 return inline_sha_implCompress(intrinsic_id());
883
884 case vmIntrinsics::_digestBase_implCompressMB:
885 return inline_digestBase_implCompressMB(predicate);
886
887 case vmIntrinsics::_multiplyToLen:
888 return inline_multiplyToLen();
889
890 case vmIntrinsics::_encodeISOArray:
891 return inline_encodeISOArray();
3529
3530 case vmIntrinsics::_getClassAccessFlags:
3531 p = basic_plus_adr(kls, in_bytes(Klass::access_flags_offset()));
3532 query_value = make_load(NULL, p, TypeInt::INT, T_INT, MemNode::unordered);
3533 break;
3534
3535 default:
3536 fatal_unexpected_iid(id);
3537 break;
3538 }
3539
3540 // Fall-through is the normal case of a query to a real class.
3541 phi->init_req(1, query_value);
3542 region->init_req(1, control());
3543
3544 C->set_has_split_ifs(true); // Has chance for split-if optimization
3545 set_result(region, phi);
3546 return true;
3547 }
3548
3549 //--------------------------inline_native_subtype_check------------------------
3550 // This intrinsic takes the JNI calls out of the heart of
3551 // UnsafeFieldAccessorImpl.set, which improves Field.set, readObject, etc.
3552 bool LibraryCallKit::inline_native_subtype_check() {
3553 // Pull both arguments off the stack.
3554 Node* args[2]; // two java.lang.Class mirrors: superc, subc
3555 args[0] = argument(0);
3556 args[1] = argument(1);
3557 Node* klasses[2]; // corresponding Klasses: superk, subk
3558 klasses[0] = klasses[1] = top();
3559
3560 enum {
3561 // A full decision tree on {superc is prim, subc is prim}:
3562 _prim_0_path = 1, // {P,N} => false
3563 // {P,P} & superc!=subc => false
3564 _prim_same_path, // {P,P} & superc==subc => true
3565 _prim_1_path, // {N,P} => false
3566 _ref_subtype_path, // {N,N} & subtype check wins => true
3567 _both_ref_path, // {N,N} & subtype check loses => false
3568 PATH_LIMIT
|
251 bool inline_array_copyOf(bool is_copyOfRange);
252 bool inline_array_equals();
253 void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark);
254 bool inline_native_clone(bool is_virtual);
255 bool inline_native_Reflection_getCallerClass();
256 // Helper function for inlining native object hash method
257 bool inline_native_hashcode(bool is_virtual, bool is_static);
258 bool inline_native_getClass();
259
260 // Helper functions for inlining arraycopy
261 bool inline_arraycopy();
262 AllocateArrayNode* tightly_coupled_allocation(Node* ptr,
263 RegionNode* slow_region);
264 typedef enum { LS_xadd, LS_xchg, LS_cmpxchg } LoadStoreKind;
265 bool inline_unsafe_load_store(BasicType type, LoadStoreKind kind);
266 bool inline_unsafe_ordered_store(BasicType type);
267 bool inline_unsafe_fence(vmIntrinsics::ID id);
268 bool inline_fp_conversions(vmIntrinsics::ID id);
269 bool inline_number_methods(vmIntrinsics::ID id);
270 bool inline_reference_get();
271 bool inline_Class_cast();
272 bool inline_aescrypt_Block(vmIntrinsics::ID id);
273 bool inline_cipherBlockChaining_AESCrypt(vmIntrinsics::ID id);
274 Node* inline_cipherBlockChaining_AESCrypt_predicate(bool decrypting);
275 Node* get_key_start_from_aescrypt_object(Node* aescrypt_object);
276 Node* get_original_key_start_from_aescrypt_object(Node* aescrypt_object);
277 bool inline_sha_implCompress(vmIntrinsics::ID id);
278 bool inline_digestBase_implCompressMB(int predicate);
279 bool inline_sha_implCompressMB(Node* digestBaseObj, ciInstanceKlass* instklass_SHA,
280 bool long_state, address stubAddr, const char *stubName,
281 Node* src_start, Node* ofs, Node* limit);
282 Node* get_state_from_sha_object(Node *sha_object);
283 Node* get_state_from_sha5_object(Node *sha_object);
284 Node* inline_digestBase_implCompressMB_predicate(int predicate);
285 bool inline_encodeISOArray();
286 bool inline_updateCRC32();
287 bool inline_updateBytesCRC32();
288 bool inline_updateByteBufferCRC32();
289 bool inline_multiplyToLen();
290 };
291
853 case vmIntrinsics::_intBitsToFloat:
854 case vmIntrinsics::_doubleToRawLongBits:
855 case vmIntrinsics::_doubleToLongBits:
856 case vmIntrinsics::_longBitsToDouble: return inline_fp_conversions(intrinsic_id());
857
858 case vmIntrinsics::_numberOfLeadingZeros_i:
859 case vmIntrinsics::_numberOfLeadingZeros_l:
860 case vmIntrinsics::_numberOfTrailingZeros_i:
861 case vmIntrinsics::_numberOfTrailingZeros_l:
862 case vmIntrinsics::_bitCount_i:
863 case vmIntrinsics::_bitCount_l:
864 case vmIntrinsics::_reverseBytes_i:
865 case vmIntrinsics::_reverseBytes_l:
866 case vmIntrinsics::_reverseBytes_s:
867 case vmIntrinsics::_reverseBytes_c: return inline_number_methods(intrinsic_id());
868
869 case vmIntrinsics::_getCallerClass: return inline_native_Reflection_getCallerClass();
870
871 case vmIntrinsics::_Reference_get: return inline_reference_get();
872
873 case vmIntrinsics::_Class_cast: return inline_Class_cast();
874
875 case vmIntrinsics::_aescrypt_encryptBlock:
876 case vmIntrinsics::_aescrypt_decryptBlock: return inline_aescrypt_Block(intrinsic_id());
877
878 case vmIntrinsics::_cipherBlockChaining_encryptAESCrypt:
879 case vmIntrinsics::_cipherBlockChaining_decryptAESCrypt:
880 return inline_cipherBlockChaining_AESCrypt(intrinsic_id());
881
882 case vmIntrinsics::_sha_implCompress:
883 case vmIntrinsics::_sha2_implCompress:
884 case vmIntrinsics::_sha5_implCompress:
885 return inline_sha_implCompress(intrinsic_id());
886
887 case vmIntrinsics::_digestBase_implCompressMB:
888 return inline_digestBase_implCompressMB(predicate);
889
890 case vmIntrinsics::_multiplyToLen:
891 return inline_multiplyToLen();
892
893 case vmIntrinsics::_encodeISOArray:
894 return inline_encodeISOArray();
3532
3533 case vmIntrinsics::_getClassAccessFlags:
3534 p = basic_plus_adr(kls, in_bytes(Klass::access_flags_offset()));
3535 query_value = make_load(NULL, p, TypeInt::INT, T_INT, MemNode::unordered);
3536 break;
3537
3538 default:
3539 fatal_unexpected_iid(id);
3540 break;
3541 }
3542
3543 // Fall-through is the normal case of a query to a real class.
3544 phi->init_req(1, query_value);
3545 region->init_req(1, control());
3546
3547 C->set_has_split_ifs(true); // Has chance for split-if optimization
3548 set_result(region, phi);
3549 return true;
3550 }
3551
3552 //-------------------------inline_Class_cast-------------------
3553 bool LibraryCallKit::inline_Class_cast() {
3554 Node* mirror = argument(0); // Class
3555 Node* obj = argument(1);
3556 const TypeInstPtr* mirror_con = _gvn.type(mirror)->isa_instptr();
3557 if (mirror_con == NULL) {
3558 return false; // dead path (mirror->is_top()).
3559 }
3560 if (obj == NULL || obj->is_top()) {
3561 return false; // dead path
3562 }
3563 const TypeOopPtr* tp = _gvn.type(obj)->isa_oopptr();
3564
3565 // First, see if Class.cast() can be folded statically.
3566 // java_mirror_type() returns non-null for compile-time Class constants.
3567 ciType* tm = mirror_con->java_mirror_type();
3568 if (tm != NULL && tm->is_klass() &&
3569 tp != NULL && tp->klass() != NULL) {
3570 if (!tp->klass()->is_loaded()) {
3571 // Don't use intrinsic when class is not loaded.
3572 return false;
3573 } else {
3574 int static_res = C->static_subtype_check(tm->as_klass(), tp->klass());
3575 if (static_res == Compile::SSC_always_true) {
3576 // isInstance() is true - fold the code.
3577 set_result(obj);
3578 return true;
3579 } else if (static_res == Compile::SSC_always_false) {
3580 // Don't use intrinsic, have to throw ClassCastException.
3581 // If the reference is null, the non-intrinsic bytecode will
3582 // be optimized appropriately.
3583 return false;
3584 }
3585 }
3586 }
3587
3588 // Bailout intrinsic and do normal inlining if exception path is frequent.
3589 if (too_many_traps(Deoptimization::Reason_intrinsic)) {
3590 return false;
3591 }
3592
3593 // Generate dynamic checks.
3594 // Class.cast() is java implementation of _checkcast bytecode.
3595 // Do checkcast (Parse::do_checkcast()) optimizations here.
3596
3597 mirror = null_check(mirror);
3598 // If mirror is dead, only null-path is taken.
3599 if (stopped()) {
3600 return true;
3601 }
3602
3603 // Not-subtype or the mirror's klass ptr is NULL (in case it is a primitive).
3604 enum { _bad_type_path = 1, _prim_path = 2, PATH_LIMIT };
3605 RegionNode* region = new RegionNode(PATH_LIMIT);
3606 record_for_igvn(region);
3607
3608 // Now load the mirror's klass metaobject, and null-check it.
3609 // If kls is null, we have a primitive mirror and
3610 // nothing is an instance of a primitive type.
3611 Node* kls = load_klass_from_mirror(mirror, false, region, _prim_path);
3612
3613 Node* res = top();
3614 if (!stopped()) {
3615 Node* bad_type_ctrl = top();
3616 // Do checkcast optimizations.
3617 res = gen_checkcast(obj, kls, &bad_type_ctrl);
3618 region->init_req(_bad_type_path, bad_type_ctrl);
3619 }
3620 if (region->in(_prim_path) != top() ||
3621 region->in(_bad_type_path) != top()) {
3622 // Let Interpreter throw ClassCastException.
3623 PreserveJVMState pjvms(this);
3624 set_control(_gvn.transform(region));
3625 uncommon_trap(Deoptimization::Reason_intrinsic,
3626 Deoptimization::Action_maybe_recompile);
3627 }
3628 if (!stopped()) {
3629 set_result(res);
3630 }
3631 return true;
3632 }
3633
3634
3635 //--------------------------inline_native_subtype_check------------------------
3636 // This intrinsic takes the JNI calls out of the heart of
3637 // UnsafeFieldAccessorImpl.set, which improves Field.set, readObject, etc.
3638 bool LibraryCallKit::inline_native_subtype_check() {
3639 // Pull both arguments off the stack.
3640 Node* args[2]; // two java.lang.Class mirrors: superc, subc
3641 args[0] = argument(0);
3642 args[1] = argument(1);
3643 Node* klasses[2]; // corresponding Klasses: superk, subk
3644 klasses[0] = klasses[1] = top();
3645
3646 enum {
3647 // A full decision tree on {superc is prim, subc is prim}:
3648 _prim_0_path = 1, // {P,N} => false
3649 // {P,P} & superc!=subc => false
3650 _prim_same_path, // {P,P} & superc==subc => true
3651 _prim_1_path, // {N,P} => false
3652 _ref_subtype_path, // {N,N} & subtype check wins => true
3653 _both_ref_path, // {N,N} & subtype check loses => false
3654 PATH_LIMIT
|