3666 generate_arraycopy(TypeAryPtr::OOPS, T_OBJECT, 3667 original, start, newcopy, intcon(0), moved, 3668 disjoint_bases, length_never_negative); 3669 } 3670 } // original reexecute is set back here 3671 3672 C->set_has_split_ifs(true); // Has chance for split-if optimization 3673 if (!stopped()) { 3674 set_result(newcopy); 3675 } 3676 return true; 3677 } 3678 3679 3680 //----------------------generate_virtual_guard--------------------------- 3681 // Helper for hashCode and clone. Peeks inside the vtable to avoid a call. 3682 Node* LibraryCallKit::generate_virtual_guard(Node* obj_klass, 3683 RegionNode* slow_region) { 3684 ciMethod* method = callee(); 3685 int vtable_index = method->vtable_index(); 3686 // Get the Method* out of the appropriate vtable entry. 3687 int entry_offset = (InstanceKlass::vtable_start_offset() + 3688 vtable_index*vtableEntry::size()) * wordSize + 3689 vtableEntry::method_offset_in_bytes(); 3690 Node* entry_addr = basic_plus_adr(obj_klass, entry_offset); 3691 Node* target_call = make_load(NULL, entry_addr, TypePtr::NOTNULL, T_ADDRESS); 3692 3693 // Compare the target method with the expected method (e.g., Object.hashCode). 3694 const TypePtr* native_call_addr = TypeMetadataPtr::make(method); 3695 3696 Node* native_call = makecon(native_call_addr); 3697 Node* chk_native = _gvn.transform(new(C) CmpPNode(target_call, native_call)); 3698 Node* test_native = _gvn.transform(new(C) BoolNode(chk_native, BoolTest::ne)); 3699 3700 return generate_slow_guard(test_native, slow_region); 3701 } 3702 3703 //-----------------------generate_method_call---------------------------- 3704 // Use generate_method_call to make a slow-call to the real 3705 // method if the fast path fails. An alternative would be to 3716 // ensure the JVMS we have will be correct for this call 3717 guarantee(method_id == method->intrinsic_id(), "must match"); 3718 3719 const TypeFunc* tf = TypeFunc::make(method); 3720 CallJavaNode* slow_call; 3721 if (is_static) { 3722 assert(!is_virtual, ""); 3723 slow_call = new(C) CallStaticJavaNode(C, tf, 3724 SharedRuntime::get_resolve_static_call_stub(), 3725 method, bci()); 3726 } else if (is_virtual) { 3727 null_check_receiver(); 3728 int vtable_index = Method::invalid_vtable_index; 3729 if (UseInlineCaches) { 3730 // Suppress the vtable call 3731 } else { 3732 // hashCode and clone are not a miranda methods, 3733 // so the vtable index is fixed. 3734 // No need to use the linkResolver to get it. 3735 vtable_index = method->vtable_index(); 3736 } 3737 slow_call = new(C) CallDynamicJavaNode(tf, 3738 SharedRuntime::get_resolve_virtual_call_stub(), 3739 method, vtable_index, bci()); 3740 } else { // neither virtual nor static: opt_virtual 3741 null_check_receiver(); 3742 slow_call = new(C) CallStaticJavaNode(C, tf, 3743 SharedRuntime::get_resolve_opt_virtual_call_stub(), 3744 method, bci()); 3745 slow_call->set_optimized_virtual(true); 3746 } 3747 set_arguments_for_java_call(slow_call); 3748 set_edges_for_java_call(slow_call); 3749 return slow_call; 3750 } 3751 3752 3753 //------------------------------inline_native_hashcode-------------------- 3754 // Build special case code for calls to hashCode on an object. 3755 bool LibraryCallKit::inline_native_hashcode(bool is_virtual, bool is_static) { | 3666 generate_arraycopy(TypeAryPtr::OOPS, T_OBJECT, 3667 original, start, newcopy, intcon(0), moved, 3668 disjoint_bases, length_never_negative); 3669 } 3670 } // original reexecute is set back here 3671 3672 C->set_has_split_ifs(true); // Has chance for split-if optimization 3673 if (!stopped()) { 3674 set_result(newcopy); 3675 } 3676 return true; 3677 } 3678 3679 3680 //----------------------generate_virtual_guard--------------------------- 3681 // Helper for hashCode and clone. Peeks inside the vtable to avoid a call. 3682 Node* LibraryCallKit::generate_virtual_guard(Node* obj_klass, 3683 RegionNode* slow_region) { 3684 ciMethod* method = callee(); 3685 int vtable_index = method->vtable_index(); 3686 assert(vtable_index >= 0 || vtable_index == Method::nonvirtual_vtable_index, 3687 err_msg_res("bad index %d", vtable_index)); 3688 // Get the Method* out of the appropriate vtable entry. 3689 int entry_offset = (InstanceKlass::vtable_start_offset() + 3690 vtable_index*vtableEntry::size()) * wordSize + 3691 vtableEntry::method_offset_in_bytes(); 3692 Node* entry_addr = basic_plus_adr(obj_klass, entry_offset); 3693 Node* target_call = make_load(NULL, entry_addr, TypePtr::NOTNULL, T_ADDRESS); 3694 3695 // Compare the target method with the expected method (e.g., Object.hashCode). 3696 const TypePtr* native_call_addr = TypeMetadataPtr::make(method); 3697 3698 Node* native_call = makecon(native_call_addr); 3699 Node* chk_native = _gvn.transform(new(C) CmpPNode(target_call, native_call)); 3700 Node* test_native = _gvn.transform(new(C) BoolNode(chk_native, BoolTest::ne)); 3701 3702 return generate_slow_guard(test_native, slow_region); 3703 } 3704 3705 //-----------------------generate_method_call---------------------------- 3706 // Use generate_method_call to make a slow-call to the real 3707 // method if the fast path fails. An alternative would be to 3718 // ensure the JVMS we have will be correct for this call 3719 guarantee(method_id == method->intrinsic_id(), "must match"); 3720 3721 const TypeFunc* tf = TypeFunc::make(method); 3722 CallJavaNode* slow_call; 3723 if (is_static) { 3724 assert(!is_virtual, ""); 3725 slow_call = new(C) CallStaticJavaNode(C, tf, 3726 SharedRuntime::get_resolve_static_call_stub(), 3727 method, bci()); 3728 } else if (is_virtual) { 3729 null_check_receiver(); 3730 int vtable_index = Method::invalid_vtable_index; 3731 if (UseInlineCaches) { 3732 // Suppress the vtable call 3733 } else { 3734 // hashCode and clone are not a miranda methods, 3735 // so the vtable index is fixed. 3736 // No need to use the linkResolver to get it. 3737 vtable_index = method->vtable_index(); 3738 assert(vtable_index >= 0 || vtable_index == Method::nonvirtual_vtable_index, 3739 err_msg_res("bad index %d", vtable_index)); 3740 } 3741 slow_call = new(C) CallDynamicJavaNode(tf, 3742 SharedRuntime::get_resolve_virtual_call_stub(), 3743 method, vtable_index, bci()); 3744 } else { // neither virtual nor static: opt_virtual 3745 null_check_receiver(); 3746 slow_call = new(C) CallStaticJavaNode(C, tf, 3747 SharedRuntime::get_resolve_opt_virtual_call_stub(), 3748 method, bci()); 3749 slow_call->set_optimized_virtual(true); 3750 } 3751 set_arguments_for_java_call(slow_call); 3752 set_edges_for_java_call(slow_call); 3753 return slow_call; 3754 } 3755 3756 3757 //------------------------------inline_native_hashcode-------------------- 3758 // Build special case code for calls to hashCode on an object. 3759 bool LibraryCallKit::inline_native_hashcode(bool is_virtual, bool is_static) { |