778 case vmIntrinsics::_copyOf: return inline_array_copyOf(false);
779 case vmIntrinsics::_copyOfRange: return inline_array_copyOf(true);
780 case vmIntrinsics::_equalsB: return inline_array_equals(StrIntrinsicNode::LL);
781 case vmIntrinsics::_equalsC: return inline_array_equals(StrIntrinsicNode::UU);
782 case vmIntrinsics::_Preconditions_checkIndex: return inline_preconditions_checkIndex();
783 case vmIntrinsics::_clone: return inline_native_clone(intrinsic()->is_virtual());
784
785 case vmIntrinsics::_allocateUninitializedArray: return inline_unsafe_newArray(true);
786 case vmIntrinsics::_newArray: return inline_unsafe_newArray(false);
787
788 case vmIntrinsics::_isAssignableFrom: return inline_native_subtype_check();
789
790 case vmIntrinsics::_isInstance:
791 case vmIntrinsics::_getModifiers:
792 case vmIntrinsics::_isInterface:
793 case vmIntrinsics::_isArray:
794 case vmIntrinsics::_isPrimitive:
795 case vmIntrinsics::_getSuperclass:
796 case vmIntrinsics::_getClassAccessFlags: return inline_native_Class_query(intrinsic_id());
797
798 case vmIntrinsics::_asValueType:
799 case vmIntrinsics::_asBoxType: return inline_value_Class_conversion(intrinsic_id());
800
801 case vmIntrinsics::_floatToRawIntBits:
802 case vmIntrinsics::_floatToIntBits:
803 case vmIntrinsics::_intBitsToFloat:
804 case vmIntrinsics::_doubleToRawLongBits:
805 case vmIntrinsics::_doubleToLongBits:
806 case vmIntrinsics::_longBitsToDouble: return inline_fp_conversions(intrinsic_id());
807
808 case vmIntrinsics::_numberOfLeadingZeros_i:
809 case vmIntrinsics::_numberOfLeadingZeros_l:
810 case vmIntrinsics::_numberOfTrailingZeros_i:
811 case vmIntrinsics::_numberOfTrailingZeros_l:
812 case vmIntrinsics::_bitCount_i:
813 case vmIntrinsics::_bitCount_l:
814 case vmIntrinsics::_reverseBytes_i:
815 case vmIntrinsics::_reverseBytes_l:
816 case vmIntrinsics::_reverseBytes_s:
817 case vmIntrinsics::_reverseBytes_c: return inline_number_methods(intrinsic_id());
818
819 case vmIntrinsics::_getCallerClass: return inline_native_Reflection_getCallerClass();
3475 case vmIntrinsics::_getClassAccessFlags:
3476 p = basic_plus_adr(kls, in_bytes(Klass::access_flags_offset()));
3477 query_value = make_load(NULL, p, TypeInt::INT, T_INT, MemNode::unordered);
3478 break;
3479
3480 default:
3481 fatal_unexpected_iid(id);
3482 break;
3483 }
3484
3485 // Fall-through is the normal case of a query to a real class.
3486 phi->init_req(1, query_value);
3487 region->init_req(1, control());
3488
3489 C->set_has_split_ifs(true); // Has chance for split-if optimization
3490 set_result(region, phi);
3491 return true;
3492 }
3493
3494 //-------------------------inline_value_Class_conversion-------------------
3495 // public Class<T> java.lang.Class.asBoxType();
3496 // public Class<T> java.lang.Class.asValueType()
3497 bool LibraryCallKit::inline_value_Class_conversion(vmIntrinsics::ID id) {
3498 Node* mirror = argument(0); // Receiver Class
3499 const TypeInstPtr* mirror_con = _gvn.type(mirror)->isa_instptr();
3500 if (mirror_con == NULL) {
3501 return false;
3502 }
3503
3504 bool is_val_type = false;
3505 ciType* tm = mirror_con->java_mirror_type(&is_val_type);
3506 if (tm != NULL && tm->is_valuetype()) {
3507 Node* result = mirror;
3508 if (id == vmIntrinsics::_asValueType && !is_val_type) {
3509 result = _gvn.makecon(TypeInstPtr::make(tm->as_value_klass()->value_mirror_instance()));
3510 } else if (id == vmIntrinsics::_asBoxType && is_val_type) {
3511 result = _gvn.makecon(TypeInstPtr::make(tm->as_value_klass()->box_mirror_instance()));
3512 }
3513 set_result(result);
3514 return true;
3515 }
3516 return false;
3517 }
3518
3519 //-------------------------inline_Class_cast-------------------
3520 bool LibraryCallKit::inline_Class_cast() {
3521 Node* mirror = argument(0); // Class
3522 Node* obj = argument(1);
3523 const TypeInstPtr* mirror_con = _gvn.type(mirror)->isa_instptr();
3524 if (mirror_con == NULL) {
3525 return false; // dead path (mirror->is_top()).
3526 }
3527 if (obj == NULL || obj->is_top()) {
3528 return false; // dead path
3529 }
3530
3531 ciKlass* obj_klass = NULL;
3576 mirror = null_check(mirror);
3577 // If mirror is dead, only null-path is taken.
3578 if (stopped()) {
3579 return true;
3580 }
3581
3582 // Not-subtype or the mirror's klass ptr is NULL (in case it is a primitive).
3583 enum { _bad_type_path = 1, _prim_path = 2, _npe_path = 3, PATH_LIMIT };
3584 RegionNode* region = new RegionNode(PATH_LIMIT);
3585 record_for_igvn(region);
3586
3587 // Now load the mirror's klass metaobject, and null-check it.
3588 // If kls is null, we have a primitive mirror and
3589 // nothing is an instance of a primitive type.
3590 Node* kls = load_klass_from_mirror(mirror, false, region, _prim_path);
3591
3592 Node* res = top();
3593 if (!stopped()) {
3594 // TODO move this into do_checkcast?
3595 if (EnableValhalla && !obj->is_ValueType() && !is_val_type) {
3596 // Check if (mirror == value_mirror && obj == null)
3597 RegionNode* r = new RegionNode(3);
3598 Node* p = basic_plus_adr(mirror, java_lang_Class::value_mirror_offset_in_bytes());
3599 Node* value_mirror = access_load_at(mirror, p, _gvn.type(p)->is_ptr(), TypeInstPtr::MIRROR->cast_to_ptr_type(TypePtr::BotPTR), T_OBJECT, IN_HEAP);
3600 Node* cmp = _gvn.transform(new CmpPNode(mirror, value_mirror));
3601 Node* bol = _gvn.transform(new BoolNode(cmp, BoolTest::ne));
3602 Node* if_ne = generate_fair_guard(bol, NULL);
3603 r->init_req(1, if_ne);
3604
3605 // Casting to .val, check for null
3606 Node* null_ctr = top();
3607 null_check_oop(obj, &null_ctr);
3608 region->init_req(_npe_path, null_ctr);
3609 r->init_req(2, control());
3610
3611 set_control(_gvn.transform(r));
3612 }
3613
3614 Node* bad_type_ctrl = top();
3615 // Do checkcast optimizations.
3616 res = gen_checkcast(obj, kls, &bad_type_ctrl);
3617 region->init_req(_bad_type_path, bad_type_ctrl);
3618 }
3619 if (region->in(_prim_path) != top() ||
3620 region->in(_bad_type_path) != top() ||
|
778 case vmIntrinsics::_copyOf: return inline_array_copyOf(false);
779 case vmIntrinsics::_copyOfRange: return inline_array_copyOf(true);
780 case vmIntrinsics::_equalsB: return inline_array_equals(StrIntrinsicNode::LL);
781 case vmIntrinsics::_equalsC: return inline_array_equals(StrIntrinsicNode::UU);
782 case vmIntrinsics::_Preconditions_checkIndex: return inline_preconditions_checkIndex();
783 case vmIntrinsics::_clone: return inline_native_clone(intrinsic()->is_virtual());
784
785 case vmIntrinsics::_allocateUninitializedArray: return inline_unsafe_newArray(true);
786 case vmIntrinsics::_newArray: return inline_unsafe_newArray(false);
787
788 case vmIntrinsics::_isAssignableFrom: return inline_native_subtype_check();
789
790 case vmIntrinsics::_isInstance:
791 case vmIntrinsics::_getModifiers:
792 case vmIntrinsics::_isInterface:
793 case vmIntrinsics::_isArray:
794 case vmIntrinsics::_isPrimitive:
795 case vmIntrinsics::_getSuperclass:
796 case vmIntrinsics::_getClassAccessFlags: return inline_native_Class_query(intrinsic_id());
797
798 case vmIntrinsics::_asPrimaryType:
799 case vmIntrinsics::_asNullableType: return inline_value_Class_conversion(intrinsic_id());
800
801 case vmIntrinsics::_floatToRawIntBits:
802 case vmIntrinsics::_floatToIntBits:
803 case vmIntrinsics::_intBitsToFloat:
804 case vmIntrinsics::_doubleToRawLongBits:
805 case vmIntrinsics::_doubleToLongBits:
806 case vmIntrinsics::_longBitsToDouble: return inline_fp_conversions(intrinsic_id());
807
808 case vmIntrinsics::_numberOfLeadingZeros_i:
809 case vmIntrinsics::_numberOfLeadingZeros_l:
810 case vmIntrinsics::_numberOfTrailingZeros_i:
811 case vmIntrinsics::_numberOfTrailingZeros_l:
812 case vmIntrinsics::_bitCount_i:
813 case vmIntrinsics::_bitCount_l:
814 case vmIntrinsics::_reverseBytes_i:
815 case vmIntrinsics::_reverseBytes_l:
816 case vmIntrinsics::_reverseBytes_s:
817 case vmIntrinsics::_reverseBytes_c: return inline_number_methods(intrinsic_id());
818
819 case vmIntrinsics::_getCallerClass: return inline_native_Reflection_getCallerClass();
3475 case vmIntrinsics::_getClassAccessFlags:
3476 p = basic_plus_adr(kls, in_bytes(Klass::access_flags_offset()));
3477 query_value = make_load(NULL, p, TypeInt::INT, T_INT, MemNode::unordered);
3478 break;
3479
3480 default:
3481 fatal_unexpected_iid(id);
3482 break;
3483 }
3484
3485 // Fall-through is the normal case of a query to a real class.
3486 phi->init_req(1, query_value);
3487 region->init_req(1, control());
3488
3489 C->set_has_split_ifs(true); // Has chance for split-if optimization
3490 set_result(region, phi);
3491 return true;
3492 }
3493
3494 //-------------------------inline_value_Class_conversion-------------------
3495 // public Class<T> java.lang.Class.asPrimaryType();
3496 // public Class<T> java.lang.Class.asNullableType()
3497 bool LibraryCallKit::inline_value_Class_conversion(vmIntrinsics::ID id) {
3498 Node* mirror = argument(0); // Receiver Class
3499 const TypeInstPtr* mirror_con = _gvn.type(mirror)->isa_instptr();
3500 if (mirror_con == NULL) {
3501 return false;
3502 }
3503
3504 bool is_val_type = false;
3505 ciType* tm = mirror_con->java_mirror_type(&is_val_type);
3506 if (tm != NULL && tm->is_valuetype()) {
3507 Node* result = mirror;
3508 if (id == vmIntrinsics::_asPrimaryType && !is_val_type) {
3509 result = _gvn.makecon(TypeInstPtr::make(tm->as_value_klass()->value_mirror_instance()));
3510 } else if (id == vmIntrinsics::_asNullableType && is_val_type) {
3511 result = _gvn.makecon(TypeInstPtr::make(tm->as_value_klass()->nullable_mirror_instance()));
3512 }
3513 set_result(result);
3514 return true;
3515 }
3516 return false;
3517 }
3518
3519 //-------------------------inline_Class_cast-------------------
3520 bool LibraryCallKit::inline_Class_cast() {
3521 Node* mirror = argument(0); // Class
3522 Node* obj = argument(1);
3523 const TypeInstPtr* mirror_con = _gvn.type(mirror)->isa_instptr();
3524 if (mirror_con == NULL) {
3525 return false; // dead path (mirror->is_top()).
3526 }
3527 if (obj == NULL || obj->is_top()) {
3528 return false; // dead path
3529 }
3530
3531 ciKlass* obj_klass = NULL;
3576 mirror = null_check(mirror);
3577 // If mirror is dead, only null-path is taken.
3578 if (stopped()) {
3579 return true;
3580 }
3581
3582 // Not-subtype or the mirror's klass ptr is NULL (in case it is a primitive).
3583 enum { _bad_type_path = 1, _prim_path = 2, _npe_path = 3, PATH_LIMIT };
3584 RegionNode* region = new RegionNode(PATH_LIMIT);
3585 record_for_igvn(region);
3586
3587 // Now load the mirror's klass metaobject, and null-check it.
3588 // If kls is null, we have a primitive mirror and
3589 // nothing is an instance of a primitive type.
3590 Node* kls = load_klass_from_mirror(mirror, false, region, _prim_path);
3591
3592 Node* res = top();
3593 if (!stopped()) {
3594 // TODO move this into do_checkcast?
3595 if (EnableValhalla && !obj->is_ValueType() && !is_val_type) {
3596 // Check if (mirror == inline_mirror && obj == null)
3597 RegionNode* r = new RegionNode(3);
3598 Node* p = basic_plus_adr(mirror, java_lang_Class::inline_mirror_offset_in_bytes());
3599 Node* inline_mirror = access_load_at(mirror, p, _gvn.type(p)->is_ptr(), TypeInstPtr::MIRROR, T_OBJECT, IN_HEAP);
3600 Node* cmp = _gvn.transform(new CmpPNode(mirror, inline_mirror));
3601 Node* bol = _gvn.transform(new BoolNode(cmp, BoolTest::ne));
3602 Node* if_ne = generate_fair_guard(bol, NULL);
3603 r->init_req(1, if_ne);
3604
3605 // Casting to .val, check for null
3606 Node* null_ctr = top();
3607 null_check_oop(obj, &null_ctr);
3608 region->init_req(_npe_path, null_ctr);
3609 r->init_req(2, control());
3610
3611 set_control(_gvn.transform(r));
3612 }
3613
3614 Node* bad_type_ctrl = top();
3615 // Do checkcast optimizations.
3616 res = gen_checkcast(obj, kls, &bad_type_ctrl);
3617 region->init_req(_bad_type_path, bad_type_ctrl);
3618 }
3619 if (region->in(_prim_path) != top() ||
3620 region->in(_bad_type_path) != top() ||
|