src/share/vm/opto/callGenerator.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File
*** old/src/share/vm/opto/callGenerator.cpp	Fri Feb  3 17:38:08 2017
--- new/src/share/vm/opto/callGenerator.cpp	Fri Feb  3 17:38:08 2017

*** 44,54 **** --- 44,54 ---- // Utility function. const TypeFunc* CallGenerator::tf() const { return TypeFunc::make(method()); } ! bool CallGenerator::is_inlined_mh_linker(JVMState* jvms, ciMethod* callee) { ! bool CallGenerator::is_inlined_method_handle_intrinsic(JVMState* jvms, ciMethod* callee) { ciMethod* symbolic_info = jvms->method()->get_method_at_bci(jvms->bci()); return symbolic_info->is_method_handle_intrinsic() && !callee->is_method_handle_intrinsic(); } //-----------------------------ParseGenerator---------------------------------
*** 140,150 **** --- 140,150 ---- if (kit.C->log() != NULL) { kit.C->log()->elem("direct_call bci='%d'", jvms->bci()); } CallStaticJavaNode *call = new CallStaticJavaNode(kit.C, tf(), target, method(), kit.bci()); ! if (is_inlined_mh_linker(jvms, method())) { ! if (is_inlined_method_handle_intrinsic(jvms, method())) { // To be able to issue a direct call and skip a call to MH.linkTo*/invokeBasic adapter, // additional information about the method being invoked should be attached // to the call site to make resolution logic work // (see SharedRuntime::resolve_static_call_C). call->set_override_symbolic_info(true);
*** 239,249 **** --- 239,249 ---- assert(_vtable_index == Method::invalid_vtable_index || !UseInlineCaches, "no vtable calls if +UseInlineCaches "); address target = SharedRuntime::get_resolve_virtual_call_stub(); // Normal inline cache used for call CallDynamicJavaNode *call = new CallDynamicJavaNode(tf(), target, method(), _vtable_index, kit.bci()); ! if (is_inlined_mh_linker(jvms, method())) { ! if (is_inlined_method_handle_intrinsic(jvms, method())) { // To be able to issue a direct call (optimized virtual or virtual) // and skip a call to MH.linkTo*/invokeBasic adapter, additional information // about the method being invoked should be attached to the call site to // make resolution logic work (see SharedRuntime::resolve_{virtual,opt_virtual}_call_C). call->set_override_symbolic_info(true);
*** 783,794 **** --- 783,793 ---- return kit.transfer_exceptions_into_jvms(); } CallGenerator* CallGenerator::for_method_handle_call(JVMState* jvms, ciMethod* caller, ciMethod* callee, bool delayed_forbidden) { ! assert(callee->is_method_handle_intrinsic() || callee->is_compiled_lambda_form(), "for_method_handle_call mismatch"); ! assert(callee->is_method_handle_intrinsic(), "for_method_handle_call mismatch"); bool input_not_const; CallGenerator* cg = CallGenerator::for_method_handle_inline(jvms, caller, callee, input_not_const); Compile* C = Compile::current(); if (cg != NULL) { if (!delayed_forbidden && AlwaysIncrementalInline) {
*** 808,817 **** --- 807,891 ---- // Out-of-line call. return CallGenerator::for_direct_call(callee); } } + static BasicType erase_to_word_type(BasicType bt) { + if (is_subword_type(bt)) return T_INT; + if (bt == T_ARRAY) return T_OBJECT; + return bt; + } + + static bool basic_types_match(ciType* t1, ciType* t2) { + if (t1 == t2) return true; + return erase_to_word_type(t1->basic_type()) == erase_to_word_type(t2->basic_type()); + } + + bool CallGenerator::ensure_mh_intrinsic_matches_target_method(ciMethod* linker, ciMethod* target) { + assert(linker->is_method_handle_intrinsic(), "sanity"); + assert(!target->is_method_handle_intrinsic(), "sanity"); + + // Linkers have appendix argument which is not passed to callee. + int has_appendix = MethodHandles::has_member_arg(linker->intrinsic_id()) ? 1 : 0; + if (linker->arg_size() != (target->arg_size() + has_appendix)) { + return false; // argument slot count mismatch + } + + ciSignature* linker_sig = linker->signature(); + ciSignature* target_sig = target->signature(); + + if (linker_sig->count() + (linker->is_static() ? 0 : 1) != + target_sig->count() + (target->is_static() ? 0 : 1) + has_appendix) { + return false; // argument count mismatch + } + + int sbase = 0, rbase = 0; + switch (linker->intrinsic_id()) { + case vmIntrinsics::_linkToVirtual: + case vmIntrinsics::_linkToInterface: + case vmIntrinsics::_linkToSpecial: { + if (target->is_static()) { + return false; + } + if (linker_sig->type_at(0)->is_primitive_type()) { + return false; // receiver should be an oop + } + sbase = 1; // skip receiver + break; + } + case vmIntrinsics::_linkToStatic: { + if (!target->is_static()) { + return false; + } + break; + } + case vmIntrinsics::_invokeBasic: { + if (target->is_static()) { + if (target_sig->type_at(0)->is_primitive_type()) { + return false; // receiver should be an oop + } + rbase = 1; // skip receiver + } + break; + } + } + assert(target_sig->count() - rbase == linker_sig->count() - sbase - has_appendix, "argument count mismatch"); + int arg_count = target_sig->count() - rbase; + for (int i = 0; i < arg_count; i++) { + if (!basic_types_match(linker_sig->type_at(sbase + i), target_sig->type_at(rbase + i))) { + return false; + } + } + // Only check the return type if the symbolic info has non-void return type. + // I.e. the return value of the resolved method can be dropped. + if (!linker->return_type()->is_void() && + !basic_types_match(linker->return_type(), target->return_type())) { + return false; + } + return true; // no mismatch found + } + CallGenerator* CallGenerator::for_method_handle_inline(JVMState* jvms, ciMethod* caller, ciMethod* callee, bool& input_not_const) { GraphKit kit(jvms); PhaseGVN& gvn = kit.gvn(); Compile* C = kit.C; vmIntrinsics::ID iid = callee->intrinsic_id();
*** 824,843 **** --- 898,923 ---- if (receiver->Opcode() == Op_ConP) { input_not_const = false; const TypeOopPtr* oop_ptr = receiver->bottom_type()->is_oopptr(); ciMethod* target = oop_ptr->const_oop()->as_method_handle()->get_vmtarget(); const int vtable_index = Method::invalid_vtable_index; + + if (!ensure_mh_intrinsic_matches_target_method(callee, target)) { + print_inlining_failure(C, callee, jvms->depth() - 1, jvms->bci(), + "signatures mismatch"); + return NULL; + } + CallGenerator* cg = C->call_generator(target, vtable_index, false /* call_does_dispatch */, jvms, true /* allow_inline */, PROB_ALWAYS); return cg; } else { const char* msg = "receiver not constant"; ! if (PrintInlining) C->print_inlining(callee, jvms->depth() - 1, jvms->bci(), msg); C->log_inline_failure(msg); + print_inlining_failure(C, callee, jvms->depth() - 1, jvms->bci(), ! "receiver not constant"); } } break; case vmIntrinsics::_linkToVirtual:
*** 850,859 **** --- 930,945 ---- if (member_name->Opcode() == Op_ConP) { input_not_const = false; const TypeOopPtr* oop_ptr = member_name->bottom_type()->is_oopptr(); ciMethod* target = oop_ptr->const_oop()->as_member_name()->get_vmtarget(); + if (!ensure_mh_intrinsic_matches_target_method(callee, target)) { + print_inlining_failure(C, callee, jvms->depth() - 1, jvms->bci(), + "signatures mismatch"); + return NULL; + } + // In lambda forms we erase signature types to avoid resolving issues // involving class loaders. When we optimize a method handle invoke // to a direct call we must cast the receiver and arguments to its // actual types. ciSignature* signature = target->signature();
*** 910,922 **** --- 996,1007 ---- true /* allow_inline */, PROB_ALWAYS, speculative_receiver_type); return cg; } else { const char* msg = "member_name not constant"; ! if (PrintInlining) C->print_inlining(callee, jvms->depth() - 1, jvms->bci(), msg); C->log_inline_failure(msg); + print_inlining_failure(C, callee, jvms->depth() - 1, jvms->bci(), ! "member_name not constant"); } } break; default:

src/share/vm/opto/callGenerator.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File