871 if (cg != NULL) { 872 if (!delayed_forbidden && AlwaysIncrementalInline) { 873 return CallGenerator::for_late_inline(callee, cg); 874 } else { 875 return cg; 876 } 877 } 878 int bci = jvms->bci(); 879 ciCallProfile profile = caller->call_profile_at_bci(bci); 880 int call_site_count = caller->scale_count(profile.count()); 881 882 if (IncrementalInline && (AlwaysIncrementalInline || 883 (call_site_count > 0 && (input_not_const || !C->inlining_incrementally() || C->over_inlining_cutoff())))) { 884 return CallGenerator::for_mh_late_inline(caller, callee, input_not_const); 885 } else { 886 // Out-of-line call. 887 return CallGenerator::for_direct_call(callee); 888 } 889 } 890 891 CallGenerator* CallGenerator::for_method_handle_inline(JVMState* jvms, ciMethod* caller, ciMethod* callee, bool& input_not_const, bool delayed_forbidden) { 892 GraphKit kit(jvms); 893 PhaseGVN& gvn = kit.gvn(); 894 Compile* C = kit.C; 895 vmIntrinsics::ID iid = callee->intrinsic_id(); 896 input_not_const = true; 897 switch (iid) { 898 case vmIntrinsics::_invokeBasic: 899 { 900 // Get MethodHandle receiver: 901 Node* receiver = kit.argument(0); 902 if (receiver->Opcode() == Op_ConP) { 903 input_not_const = false; 904 const TypeOopPtr* oop_ptr = receiver->bottom_type()->is_oopptr(); 905 ciMethod* target = oop_ptr->const_oop()->as_method_handle()->get_vmtarget(); 906 const int vtable_index = Method::invalid_vtable_index; 907 908 if (!ciMethod::is_consistent_info(callee, target)) { 909 print_inlining_failure(C, callee, jvms->depth() - 1, jvms->bci(), 910 "signatures mismatch"); 936 Node* member_name = kit.argument(callee->arg_size() - 1); 937 if (member_name->Opcode() == Op_ConP) { 938 input_not_const = false; 939 const TypeOopPtr* oop_ptr = member_name->bottom_type()->is_oopptr(); 940 ciMethod* target = oop_ptr->const_oop()->as_member_name()->get_vmtarget(); 941 942 if (!ciMethod::is_consistent_info(callee, target)) { 943 print_inlining_failure(C, callee, jvms->depth() - 1, jvms->bci(), 944 "signatures mismatch"); 945 return NULL; 946 } 947 948 // In lambda forms we erase signature types to avoid resolving issues 949 // involving class loaders. When we optimize a method handle invoke 950 // to a direct call we must cast the receiver and arguments to its 951 // actual types. 952 ciSignature* signature = target->signature(); 953 const int receiver_skip = target->is_static() ? 0 : 1; 954 // Cast receiver to its type. 955 if (!target->is_static()) { 956 Node* arg = kit.argument(0); 957 const TypeOopPtr* arg_type = arg->bottom_type()->isa_oopptr(); 958 const Type* sig_type = TypeOopPtr::make_from_klass(signature->accessing_klass()); 959 if (arg_type != NULL && !arg_type->higher_equal(sig_type)) { 960 Node* cast_obj = gvn.transform(new CheckCastPPNode(kit.control(), arg, sig_type)); 961 kit.set_argument(0, cast_obj); 962 } 963 } 964 // Cast reference arguments to its type. 965 for (int i = 0, j = 0; i < signature->count(); i++) { 966 ciType* t = signature->type_at(i); 967 if (t->is_klass()) { 968 Node* arg = kit.argument(receiver_skip + j); 969 const TypeOopPtr* arg_type = arg->bottom_type()->isa_oopptr(); 970 const Type* sig_type = TypeOopPtr::make_from_klass(t->as_klass()); 971 if (arg_type != NULL && !arg_type->higher_equal(sig_type)) { 972 Node* cast_obj = gvn.transform(new CheckCastPPNode(kit.control(), arg, sig_type)); 973 kit.set_argument(receiver_skip + j, cast_obj); 974 } 975 } 976 j += t->size(); // long and double take two slots 977 } 978 979 // Try to get the most accurate receiver type 980 const bool is_virtual = (iid == vmIntrinsics::_linkToVirtual); 981 const bool is_virtual_or_interface = (is_virtual || iid == vmIntrinsics::_linkToInterface); 982 int vtable_index = Method::invalid_vtable_index; 983 bool call_does_dispatch = false; 984 985 ciKlass* speculative_receiver_type = NULL; 986 if (is_virtual_or_interface) { 987 ciInstanceKlass* klass = target->holder(); 988 Node* receiver_node = kit.argument(0); 989 const TypeOopPtr* receiver_type = gvn.type(receiver_node)->isa_oopptr(); 990 // call_does_dispatch and vtable_index are out-parameters. They might be changed. 991 // optimize_virtual_call() takes 2 different holder 992 // arguments for a corner case that doesn't apply here (see 993 // Parse::do_call()) 994 target = C->optimize_virtual_call(caller, jvms->bci(), klass, klass, | 871 if (cg != NULL) { 872 if (!delayed_forbidden && AlwaysIncrementalInline) { 873 return CallGenerator::for_late_inline(callee, cg); 874 } else { 875 return cg; 876 } 877 } 878 int bci = jvms->bci(); 879 ciCallProfile profile = caller->call_profile_at_bci(bci); 880 int call_site_count = caller->scale_count(profile.count()); 881 882 if (IncrementalInline && (AlwaysIncrementalInline || 883 (call_site_count > 0 && (input_not_const || !C->inlining_incrementally() || C->over_inlining_cutoff())))) { 884 return CallGenerator::for_mh_late_inline(caller, callee, input_not_const); 885 } else { 886 // Out-of-line call. 887 return CallGenerator::for_direct_call(callee); 888 } 889 } 890 891 static void cast_argument(int arg_nb, ciType* t, GraphKit& kit) { 892 PhaseGVN& gvn = kit.gvn(); 893 Node* arg = kit.argument(arg_nb); 894 const Type* arg_type = arg->bottom_type(); 895 const Type* sig_type = TypeOopPtr::make_from_klass(t->as_klass()); 896 if (t->is_valuetype()) { 897 assert(!(arg_type->isa_valuetype() && t == kit.C->env()->___Value_klass()), "need to a pointer to the value type"); 898 if (arg_type->isa_valuetypeptr() && t != kit.C->env()->___Value_klass()) { 899 const Type* sig_type = TypeOopPtr::make_from_klass(t->as_klass()); 900 Node* cast = gvn.transform(new CheckCastPPNode(kit.control(), arg, sig_type)); 901 Node* vt = ValueTypeNode::make(gvn, kit.merged_memory(), cast); 902 kit.set_argument(arg_nb, vt); 903 } 904 } else { 905 if (arg_type->isa_oopptr() && !arg_type->higher_equal(sig_type)) { 906 Node* cast_obj = gvn.transform(new CheckCastPPNode(kit.control(), arg, sig_type)); 907 kit.set_argument(arg_nb, cast_obj); 908 } 909 } 910 } 911 912 CallGenerator* CallGenerator::for_method_handle_inline(JVMState* jvms, ciMethod* caller, ciMethod* callee, bool& input_not_const, bool delayed_forbidden) { 913 GraphKit kit(jvms); 914 PhaseGVN& gvn = kit.gvn(); 915 Compile* C = kit.C; 916 vmIntrinsics::ID iid = callee->intrinsic_id(); 917 input_not_const = true; 918 switch (iid) { 919 case vmIntrinsics::_invokeBasic: 920 { 921 // Get MethodHandle receiver: 922 Node* receiver = kit.argument(0); 923 if (receiver->Opcode() == Op_ConP) { 924 input_not_const = false; 925 const TypeOopPtr* oop_ptr = receiver->bottom_type()->is_oopptr(); 926 ciMethod* target = oop_ptr->const_oop()->as_method_handle()->get_vmtarget(); 927 const int vtable_index = Method::invalid_vtable_index; 928 929 if (!ciMethod::is_consistent_info(callee, target)) { 930 print_inlining_failure(C, callee, jvms->depth() - 1, jvms->bci(), 931 "signatures mismatch"); 957 Node* member_name = kit.argument(callee->arg_size() - 1); 958 if (member_name->Opcode() == Op_ConP) { 959 input_not_const = false; 960 const TypeOopPtr* oop_ptr = member_name->bottom_type()->is_oopptr(); 961 ciMethod* target = oop_ptr->const_oop()->as_member_name()->get_vmtarget(); 962 963 if (!ciMethod::is_consistent_info(callee, target)) { 964 print_inlining_failure(C, callee, jvms->depth() - 1, jvms->bci(), 965 "signatures mismatch"); 966 return NULL; 967 } 968 969 // In lambda forms we erase signature types to avoid resolving issues 970 // involving class loaders. When we optimize a method handle invoke 971 // to a direct call we must cast the receiver and arguments to its 972 // actual types. 973 ciSignature* signature = target->signature(); 974 const int receiver_skip = target->is_static() ? 0 : 1; 975 // Cast receiver to its type. 976 if (!target->is_static()) { 977 cast_argument(0, signature->accessing_klass(), kit); 978 } 979 // Cast reference arguments to its type. 980 for (int i = 0, j = 0; i < signature->count(); i++) { 981 ciType* t = signature->type_at(i); 982 if (t->is_klass()) { 983 cast_argument(receiver_skip + j, t, kit); 984 } 985 j += t->size(); // long and double take two slots 986 } 987 988 // Try to get the most accurate receiver type 989 const bool is_virtual = (iid == vmIntrinsics::_linkToVirtual); 990 const bool is_virtual_or_interface = (is_virtual || iid == vmIntrinsics::_linkToInterface); 991 int vtable_index = Method::invalid_vtable_index; 992 bool call_does_dispatch = false; 993 994 ciKlass* speculative_receiver_type = NULL; 995 if (is_virtual_or_interface) { 996 ciInstanceKlass* klass = target->holder(); 997 Node* receiver_node = kit.argument(0); 998 const TypeOopPtr* receiver_type = gvn.type(receiver_node)->isa_oopptr(); 999 // call_does_dispatch and vtable_index are out-parameters. They might be changed. 1000 // optimize_virtual_call() takes 2 different holder 1001 // arguments for a corner case that doesn't apply here (see 1002 // Parse::do_call()) 1003 target = C->optimize_virtual_call(caller, jvms->bci(), klass, klass, |