< prev index next >

src/hotspot/share/opto/callGenerator.cpp

Print this page




 169     }
 170     if (kit.stopped()) {
 171       // And dump it back to the caller, decorated with any exceptions:
 172       return kit.transfer_exceptions_into_jvms();
 173     }
 174     // Mark the call node as virtual, sort of:
 175     call->set_optimized_virtual(true);
 176     if (method()->is_method_handle_intrinsic() ||
 177         method()->is_compiled_lambda_form()) {
 178       call->set_method_handle_invoke(true);
 179     }
 180   }
 181   kit.set_arguments_for_java_call(call);
 182   kit.set_edges_for_java_call(call, false, _separate_io_proj);
 183   Node* ret = kit.set_results_for_java_call(call, _separate_io_proj);
 184   // Check if return value is a value type pointer
 185   const TypeValueTypePtr* vtptr = gvn.type(ret)->isa_valuetypeptr();
 186   if (vtptr != NULL) {
 187     if (!vtptr->is__Value()) {
 188       // Create ValueTypeNode from the oop and replace the return value
 189       Node* vt = ValueTypeNode::make(&kit, ret);
 190       kit.push_node(T_VALUETYPE, vt);
 191     } else {
 192       kit.push_node(T_VALUETYPE, ret);
 193     }
 194   } else {
 195     kit.push_node(method()->return_type()->basic_type(), ret);
 196   }
 197   return kit.transfer_exceptions_into_jvms();
 198 }
 199 
 200 //--------------------------VirtualCallGenerator------------------------------
 201 // Internal class which handles all out-of-line calls checking receiver type.
 202 class VirtualCallGenerator : public CallGenerator {
 203 private:
 204   int _vtable_index;
 205 public:
 206   VirtualCallGenerator(ciMethod* method, int vtable_index)
 207     : CallGenerator(method), _vtable_index(vtable_index)
 208   {
 209     assert(vtable_index == Method::invalid_vtable_index ||


 263   assert(!method()->is_private(), "virtual call should not be to private");
 264   assert(_vtable_index == Method::invalid_vtable_index || !UseInlineCaches,
 265          "no vtable calls if +UseInlineCaches ");
 266   address target = SharedRuntime::get_resolve_virtual_call_stub();
 267   // Normal inline cache used for call
 268   CallDynamicJavaNode *call = new CallDynamicJavaNode(tf(), target, method(), _vtable_index, kit.bci());
 269   if (is_inlined_method_handle_intrinsic(jvms, method())) {
 270     // To be able to issue a direct call (optimized virtual or virtual)
 271     // and skip a call to MH.linkTo*/invokeBasic adapter, additional information
 272     // about the method being invoked should be attached to the call site to
 273     // make resolution logic work (see SharedRuntime::resolve_{virtual,opt_virtual}_call_C).
 274     call->set_override_symbolic_info(true);
 275   }
 276   kit.set_arguments_for_java_call(call);
 277   kit.set_edges_for_java_call(call);
 278   Node* ret = kit.set_results_for_java_call(call);
 279   // Check if return value is a value type pointer
 280   if (gvn.type(ret)->isa_valuetypeptr()) {
 281     // Create ValueTypeNode from the oop and replace the return value
 282     Node* ctl = kit.control();
 283     Node* vt = ValueTypeNode::make(&kit, ret);
 284     kit.set_control(ctl);
 285     kit.push_node(T_VALUETYPE, vt);
 286   } else {
 287     kit.push_node(method()->return_type()->basic_type(), ret);
 288   }
 289 
 290   // Represent the effect of an implicit receiver null_check
 291   // as part of this call.  Since we share a map with the caller,
 292   // his JVMS gets adjusted.
 293   kit.cast_not_null(receiver);
 294   return kit.transfer_exceptions_into_jvms();
 295 }
 296 
 297 CallGenerator* CallGenerator::for_inline(ciMethod* m, float expected_uses) {
 298   if (InlineTree::check_can_parse(m) != NULL)  return NULL;
 299   return new ParseGenerator(m, expected_uses);
 300 }
 301 
 302 // As a special case, the JVMS passed to this CallGenerator is
 303 // for the method execution already in progress, not just the JVMS


 423   Node* top = C->top();
 424   for (uint i1 = TypeFunc::Parms; i1 < call->_tf->domain_cc()->cnt(); i1++) {
 425     map->set_req(i1, top);
 426   }
 427   jvms->set_map(map);
 428 
 429   // Make enough space in the expression stack to transfer
 430   // the incoming arguments and return value.
 431   map->ensure_stack(jvms, jvms->method()->max_stack());
 432   const TypeTuple *domain_sig = call->_tf->domain_sig();
 433   uint nargs = method()->arg_size();
 434   assert(domain_sig->cnt() - TypeFunc::Parms == nargs, "inconsistent signature");
 435 
 436   uint j = TypeFunc::Parms;
 437   for (uint i1 = 0; i1 < nargs; i1++) {
 438     const Type* t = domain_sig->field_at(TypeFunc::Parms + i1);
 439     if (!ValueTypePassFieldsAsArgs) {
 440       Node* arg = call->in(TypeFunc::Parms + i1);
 441       if (t->isa_valuetypeptr()) {
 442         Node* ctl = map->control();
 443         arg = ValueTypeNode::make(gvn, ctl, map->memory(), arg);
 444         map->set_control(ctl);
 445       }
 446       map->set_argument(jvms, i1, arg);
 447     } else {
 448       if (t->isa_valuetypeptr() && !t->is_valuetypeptr()->is__Value()) {
 449         ciValueKlass* vk = t->is_valuetypeptr()->value_type()->value_klass();
 450         Node* ctl = map->control();
 451         Node* vt = ValueTypeNode::make(gvn, ctl, map->memory(), call, vk, j, true);
 452         map->set_control(ctl);
 453         map->set_argument(jvms, i1, gvn.transform(vt));
 454         j += vk->value_arg_slots();
 455       } else {
 456         map->set_argument(jvms, i1, call->in(j));
 457         j++;
 458       }
 459     }
 460   }
 461 
 462   C->print_inlining_assert_ready();
 463 
 464   C->print_inlining_move_to(this);
 465 
 466   C->log_late_inline(this);
 467 
 468   // This check is done here because for_method_handle_inline() method
 469   // needs jvms for inlined state.
 470   if (!do_late_inline_check(jvms)) {
 471     map->disconnect_inputs(NULL, C);
 472     return;
 473   }


 489   GraphKit kit(new_jvms);
 490 
 491   // Find the result object
 492   Node* result = C->top();
 493   ciType* return_type = _inline_cg->method()->return_type();
 494   int result_size = return_type->size();
 495   if (result_size != 0 && !kit.stopped()) {
 496     result = (result_size == 1) ? kit.pop() : kit.pop_pair();
 497   }
 498 
 499   C->set_has_loops(C->has_loops() || _inline_cg->method()->has_loops());
 500   C->env()->notice_inlined_method(_inline_cg->method());
 501   C->set_inlining_progress(true);
 502 
 503   if (return_type->is_valuetype()) {
 504     const Type* vt_t = call->_tf->range_sig()->field_at(TypeFunc::Parms);
 505     bool returned_as_fields = call->tf()->returns_value_type_as_fields();
 506     if (result->is_ValueType()) {
 507       ValueTypeNode* vt = result->as_ValueType();
 508       if (!returned_as_fields) {
 509         result = vt->allocate(&kit)->get_oop();
 510         result = gvn.transform(new ValueTypePtrNode(vt, result, C));
 511       } else {
 512         // Return of multiple values (the fields of a value type)
 513         vt->replace_call_results(&kit, call, C);
 514         if (gvn.type(vt->get_oop()) == TypePtr::NULL_PTR) {
 515           result = vt->tagged_klass(gvn);
 516         } else {
 517           result = vt->get_oop();
 518         }
 519       }
 520     } else if (gvn.type(result)->isa_valuetypeptr() && returned_as_fields) {
 521       assert(!vt_t->is_valuetypeptr()->is__Value(), "__Value not supported");
 522       Node* cast = new CheckCastPPNode(NULL, result, vt_t);
 523       gvn.record_for_igvn(cast);
 524       Node* ctl = kit.control();
 525       ValueTypePtrNode* vtptr = ValueTypePtrNode::make(gvn, ctl, kit.merged_memory(), gvn.transform(cast));
 526       kit.set_control(ctl);
 527       vtptr->replace_call_results(&kit, call, C);
 528       result = cast;
 529     } else if (!return_type->is__Value()) {
 530       assert(result->is_top(), "what else?");
 531       for (DUIterator_Fast imax, i = call->fast_outs(imax); i < imax; i++) {
 532         ProjNode *pn = call->fast_out(i)->as_Proj();
 533         uint con = pn->_con;
 534         if (con >= TypeFunc::Parms) {
 535           gvn.hash_delete(pn);
 536           pn->set_req(0, C->top());
 537           --i; --imax;
 538         }
 539       }
 540     }
 541   }
 542 
 543   kit.replace_call(call, result, true);
 544 }
 545 


 901   ciCallProfile profile = caller->call_profile_at_bci(bci);
 902   int call_site_count = caller->scale_count(profile.count());
 903 
 904   if (IncrementalInline && (AlwaysIncrementalInline ||
 905                             (call_site_count > 0 && (input_not_const || !C->inlining_incrementally() || C->over_inlining_cutoff())))) {
 906     return CallGenerator::for_mh_late_inline(caller, callee, input_not_const);
 907   } else {
 908     // Out-of-line call.
 909     return CallGenerator::for_direct_call(callee);
 910   }
 911 }
 912 
 913 static void cast_argument(int arg_nb, ciType* t, GraphKit& kit) {
 914   PhaseGVN& gvn = kit.gvn();
 915   Node* arg = kit.argument(arg_nb);
 916   const Type* arg_type = arg->bottom_type();
 917   const Type* sig_type = TypeOopPtr::make_from_klass(t->as_klass());
 918   if (t->is_valuetype()) {
 919     assert(!(arg_type->isa_valuetype() && t->is__Value()), "need a pointer to the value type");
 920     if (arg_type->isa_valuetypeptr() && !t->is__Value()) {


 921       Node* cast = gvn.transform(new CheckCastPPNode(kit.control(), arg, sig_type));
 922       Node* vt = ValueTypeNode::make(&kit, cast);
 923       kit.set_argument(arg_nb, vt);
 924     } else {
 925       assert(t->is__Value() || arg->is_ValueType(), "inconsistent argument");
 926     }
 927   } else {
 928     if (arg_type->isa_oopptr() && !arg_type->higher_equal(sig_type)) {
 929       Node* cast_obj = gvn.transform(new CheckCastPPNode(kit.control(), arg, sig_type));
 930       kit.set_argument(arg_nb, cast_obj);
 931     }
 932   }
 933 }
 934 
 935 CallGenerator* CallGenerator::for_method_handle_inline(JVMState* jvms, ciMethod* caller, ciMethod* callee, bool& input_not_const, bool delayed_forbidden) {
 936   GraphKit kit(jvms);
 937   PhaseGVN& gvn = kit.gvn();
 938   Compile* C = kit.C;
 939   vmIntrinsics::ID iid = callee->intrinsic_id();
 940   input_not_const = true;
 941   switch (iid) {
 942   case vmIntrinsics::_invokeBasic:
 943     {
 944       // Get MethodHandle receiver:
 945       Node* receiver = kit.argument(0);
 946       if (receiver->Opcode() == Op_ConP) {
 947         input_not_const = false;
 948         const TypeOopPtr* oop_ptr = receiver->bottom_type()->is_oopptr();
 949         ciMethod* target = oop_ptr->const_oop()->as_method_handle()->get_vmtarget();
 950         const int vtable_index = Method::invalid_vtable_index;
 951 




 169     }
 170     if (kit.stopped()) {
 171       // And dump it back to the caller, decorated with any exceptions:
 172       return kit.transfer_exceptions_into_jvms();
 173     }
 174     // Mark the call node as virtual, sort of:
 175     call->set_optimized_virtual(true);
 176     if (method()->is_method_handle_intrinsic() ||
 177         method()->is_compiled_lambda_form()) {
 178       call->set_method_handle_invoke(true);
 179     }
 180   }
 181   kit.set_arguments_for_java_call(call);
 182   kit.set_edges_for_java_call(call, false, _separate_io_proj);
 183   Node* ret = kit.set_results_for_java_call(call, _separate_io_proj);
 184   // Check if return value is a value type pointer
 185   const TypeValueTypePtr* vtptr = gvn.type(ret)->isa_valuetypeptr();
 186   if (vtptr != NULL) {
 187     if (!vtptr->is__Value()) {
 188       // Create ValueTypeNode from the oop and replace the return value
 189       ValueTypeNode* vt = ValueTypeNode::make_from_oop(&kit, ret);
 190       kit.push_node(T_VALUETYPE, vt);
 191     } else {
 192       kit.push_node(T_VALUETYPE, ret);
 193     }
 194   } else {
 195     kit.push_node(method()->return_type()->basic_type(), ret);
 196   }
 197   return kit.transfer_exceptions_into_jvms();
 198 }
 199 
 200 //--------------------------VirtualCallGenerator------------------------------
 201 // Internal class which handles all out-of-line calls checking receiver type.
 202 class VirtualCallGenerator : public CallGenerator {
 203 private:
 204   int _vtable_index;
 205 public:
 206   VirtualCallGenerator(ciMethod* method, int vtable_index)
 207     : CallGenerator(method), _vtable_index(vtable_index)
 208   {
 209     assert(vtable_index == Method::invalid_vtable_index ||


 263   assert(!method()->is_private(), "virtual call should not be to private");
 264   assert(_vtable_index == Method::invalid_vtable_index || !UseInlineCaches,
 265          "no vtable calls if +UseInlineCaches ");
 266   address target = SharedRuntime::get_resolve_virtual_call_stub();
 267   // Normal inline cache used for call
 268   CallDynamicJavaNode *call = new CallDynamicJavaNode(tf(), target, method(), _vtable_index, kit.bci());
 269   if (is_inlined_method_handle_intrinsic(jvms, method())) {
 270     // To be able to issue a direct call (optimized virtual or virtual)
 271     // and skip a call to MH.linkTo*/invokeBasic adapter, additional information
 272     // about the method being invoked should be attached to the call site to
 273     // make resolution logic work (see SharedRuntime::resolve_{virtual,opt_virtual}_call_C).
 274     call->set_override_symbolic_info(true);
 275   }
 276   kit.set_arguments_for_java_call(call);
 277   kit.set_edges_for_java_call(call);
 278   Node* ret = kit.set_results_for_java_call(call);
 279   // Check if return value is a value type pointer
 280   if (gvn.type(ret)->isa_valuetypeptr()) {
 281     // Create ValueTypeNode from the oop and replace the return value
 282     Node* ctl = kit.control();
 283     ValueTypeNode* vt = ValueTypeNode::make_from_oop(&kit, ret);
 284     kit.set_control(ctl);
 285     kit.push_node(T_VALUETYPE, vt);
 286   } else {
 287     kit.push_node(method()->return_type()->basic_type(), ret);
 288   }
 289 
 290   // Represent the effect of an implicit receiver null_check
 291   // as part of this call.  Since we share a map with the caller,
 292   // his JVMS gets adjusted.
 293   kit.cast_not_null(receiver);
 294   return kit.transfer_exceptions_into_jvms();
 295 }
 296 
 297 CallGenerator* CallGenerator::for_inline(ciMethod* m, float expected_uses) {
 298   if (InlineTree::check_can_parse(m) != NULL)  return NULL;
 299   return new ParseGenerator(m, expected_uses);
 300 }
 301 
 302 // As a special case, the JVMS passed to this CallGenerator is
 303 // for the method execution already in progress, not just the JVMS


 423   Node* top = C->top();
 424   for (uint i1 = TypeFunc::Parms; i1 < call->_tf->domain_cc()->cnt(); i1++) {
 425     map->set_req(i1, top);
 426   }
 427   jvms->set_map(map);
 428 
 429   // Make enough space in the expression stack to transfer
 430   // the incoming arguments and return value.
 431   map->ensure_stack(jvms, jvms->method()->max_stack());
 432   const TypeTuple *domain_sig = call->_tf->domain_sig();
 433   uint nargs = method()->arg_size();
 434   assert(domain_sig->cnt() - TypeFunc::Parms == nargs, "inconsistent signature");
 435 
 436   uint j = TypeFunc::Parms;
 437   for (uint i1 = 0; i1 < nargs; i1++) {
 438     const Type* t = domain_sig->field_at(TypeFunc::Parms + i1);
 439     if (!ValueTypePassFieldsAsArgs) {
 440       Node* arg = call->in(TypeFunc::Parms + i1);
 441       if (t->isa_valuetypeptr()) {
 442         Node* ctl = map->control();
 443         arg = ValueTypeNode::make_from_oop(gvn, ctl, map->memory(), arg);
 444         map->set_control(ctl);
 445       }
 446       map->set_argument(jvms, i1, arg);
 447     } else {
 448       if (t->isa_valuetypeptr() && !t->is_valuetypeptr()->is__Value()) {
 449         ciValueKlass* vk = t->is_valuetypeptr()->value_klass();
 450         Node* ctl = map->control();
 451         ValueTypeNode* vt = ValueTypeNode::make_from_multi(gvn, ctl, map->memory(), call, vk, j, true);
 452         map->set_control(ctl);
 453         map->set_argument(jvms, i1, vt);
 454         j += vk->value_arg_slots();
 455       } else {
 456         map->set_argument(jvms, i1, call->in(j));
 457         j++;
 458       }
 459     }
 460   }
 461 
 462   C->print_inlining_assert_ready();
 463 
 464   C->print_inlining_move_to(this);
 465 
 466   C->log_late_inline(this);
 467 
 468   // This check is done here because for_method_handle_inline() method
 469   // needs jvms for inlined state.
 470   if (!do_late_inline_check(jvms)) {
 471     map->disconnect_inputs(NULL, C);
 472     return;
 473   }


 489   GraphKit kit(new_jvms);
 490 
 491   // Find the result object
 492   Node* result = C->top();
 493   ciType* return_type = _inline_cg->method()->return_type();
 494   int result_size = return_type->size();
 495   if (result_size != 0 && !kit.stopped()) {
 496     result = (result_size == 1) ? kit.pop() : kit.pop_pair();
 497   }
 498 
 499   C->set_has_loops(C->has_loops() || _inline_cg->method()->has_loops());
 500   C->env()->notice_inlined_method(_inline_cg->method());
 501   C->set_inlining_progress(true);
 502 
 503   if (return_type->is_valuetype()) {
 504     const Type* vt_t = call->_tf->range_sig()->field_at(TypeFunc::Parms);
 505     bool returned_as_fields = call->tf()->returns_value_type_as_fields();
 506     if (result->is_ValueType()) {
 507       ValueTypeNode* vt = result->as_ValueType();
 508       if (!returned_as_fields) {
 509         vt = vt->allocate(&kit)->as_ValueType();
 510         result = ValueTypePtrNode::make_from_value_type(gvn, vt);
 511       } else {
 512         // Return of multiple values (the fields of a value type)
 513         vt->replace_call_results(&kit, call, C);
 514         if (gvn.type(vt->get_oop()) == TypePtr::NULL_PTR) {
 515           result = vt->tagged_klass(gvn);
 516         } else {
 517           result = vt->get_oop();
 518         }
 519       }
 520     } else if (gvn.type(result)->isa_valuetypeptr() && returned_as_fields) {
 521       assert(!vt_t->is_valuetypeptr()->is__Value(), "__Value not supported");
 522       Node* cast = new CheckCastPPNode(NULL, result, vt_t);
 523       gvn.record_for_igvn(cast);
 524       Node* ctl = kit.control();
 525       ValueTypePtrNode* vtptr = ValueTypePtrNode::make_from_oop(gvn, ctl, kit.merged_memory(), gvn.transform(cast));
 526       kit.set_control(ctl);
 527       vtptr->replace_call_results(&kit, call, C);
 528       result = cast;
 529     } else if (!return_type->is__Value()) {
 530       assert(result->is_top(), "what else?");
 531       for (DUIterator_Fast imax, i = call->fast_outs(imax); i < imax; i++) {
 532         ProjNode *pn = call->fast_out(i)->as_Proj();
 533         uint con = pn->_con;
 534         if (con >= TypeFunc::Parms) {
 535           gvn.hash_delete(pn);
 536           pn->set_req(0, C->top());
 537           --i; --imax;
 538         }
 539       }
 540     }
 541   }
 542 
 543   kit.replace_call(call, result, true);
 544 }
 545 


 901   ciCallProfile profile = caller->call_profile_at_bci(bci);
 902   int call_site_count = caller->scale_count(profile.count());
 903 
 904   if (IncrementalInline && (AlwaysIncrementalInline ||
 905                             (call_site_count > 0 && (input_not_const || !C->inlining_incrementally() || C->over_inlining_cutoff())))) {
 906     return CallGenerator::for_mh_late_inline(caller, callee, input_not_const);
 907   } else {
 908     // Out-of-line call.
 909     return CallGenerator::for_direct_call(callee);
 910   }
 911 }
 912 
 913 static void cast_argument(int arg_nb, ciType* t, GraphKit& kit) {
 914   PhaseGVN& gvn = kit.gvn();
 915   Node* arg = kit.argument(arg_nb);
 916   const Type* arg_type = arg->bottom_type();
 917   const Type* sig_type = TypeOopPtr::make_from_klass(t->as_klass());
 918   if (t->is_valuetype()) {
 919     assert(!(arg_type->isa_valuetype() && t->is__Value()), "need a pointer to the value type");
 920     if (arg_type->isa_valuetypeptr() && !t->is__Value()) {
 921       // Value type arguments cannot be NULL
 922       sig_type = sig_type->join_speculative(TypePtr::NOTNULL);
 923       Node* cast = gvn.transform(new CheckCastPPNode(kit.control(), arg, sig_type));
 924       ValueTypeNode* vt = ValueTypeNode::make_from_oop(&kit, cast);
 925       kit.set_argument(arg_nb, vt);
 926     } else {
 927       assert(t->is__Value() || arg->is_ValueType(), "inconsistent argument");
 928     }
 929   } else if (arg_type->isa_oopptr() && !arg_type->higher_equal(sig_type)) {

 930     Node* cast_obj = gvn.transform(new CheckCastPPNode(kit.control(), arg, sig_type));
 931     kit.set_argument(arg_nb, cast_obj);

 932   }
 933 }
 934 
 935 CallGenerator* CallGenerator::for_method_handle_inline(JVMState* jvms, ciMethod* caller, ciMethod* callee, bool& input_not_const, bool delayed_forbidden) {
 936   GraphKit kit(jvms);
 937   PhaseGVN& gvn = kit.gvn();
 938   Compile* C = kit.C;
 939   vmIntrinsics::ID iid = callee->intrinsic_id();
 940   input_not_const = true;
 941   switch (iid) {
 942   case vmIntrinsics::_invokeBasic:
 943     {
 944       // Get MethodHandle receiver:
 945       Node* receiver = kit.argument(0);
 946       if (receiver->Opcode() == Op_ConP) {
 947         input_not_const = false;
 948         const TypeOopPtr* oop_ptr = receiver->bottom_type()->is_oopptr();
 949         ciMethod* target = oop_ptr->const_oop()->as_method_handle()->get_vmtarget();
 950         const int vtable_index = Method::invalid_vtable_index;
 951 


< prev index next >