469 Compile::current()->inc_number_of_mh_late_inlines();
470 CallGenerator* cg = new LateInlineMHCallGenerator(caller, callee, input_not_const);
471 return cg;
472 }
473
474 class LateInlineStringCallGenerator : public LateInlineCallGenerator {
475
476 public:
477 LateInlineStringCallGenerator(ciMethod* method, CallGenerator* inline_cg) :
478 LateInlineCallGenerator(method, inline_cg) {}
479
480 virtual JVMState* generate(JVMState* jvms, Parse* parent_parser) {
481 Compile *C = Compile::current();
482 C->print_inlining_skip(this);
483
484 C->add_string_late_inline(this);
485
486 JVMState* new_jvms = DirectCallGenerator::generate(jvms, parent_parser);
487 return new_jvms;
488 }
489 };
490
491 CallGenerator* CallGenerator::for_string_late_inline(ciMethod* method, CallGenerator* inline_cg) {
492 return new LateInlineStringCallGenerator(method, inline_cg);
493 }
494
495 class LateInlineBoxingCallGenerator : public LateInlineCallGenerator {
496
497 public:
498 LateInlineBoxingCallGenerator(ciMethod* method, CallGenerator* inline_cg) :
499 LateInlineCallGenerator(method, inline_cg) {}
500
501 virtual JVMState* generate(JVMState* jvms, Parse* parent_parser) {
502 Compile *C = Compile::current();
503 C->print_inlining_skip(this);
504
505 C->add_boxing_late_inline(this);
506
507 JVMState* new_jvms = DirectCallGenerator::generate(jvms, parent_parser);
508 return new_jvms;
756 }
757 }
758
759 CallGenerator* CallGenerator::for_method_handle_inline(JVMState* jvms, ciMethod* caller, ciMethod* callee, bool& input_not_const) {
760 GraphKit kit(jvms);
761 PhaseGVN& gvn = kit.gvn();
762 Compile* C = kit.C;
763 vmIntrinsics::ID iid = callee->intrinsic_id();
764 input_not_const = true;
765 switch (iid) {
766 case vmIntrinsics::_invokeBasic:
767 {
768 // Get MethodHandle receiver:
769 Node* receiver = kit.argument(0);
770 if (receiver->Opcode() == Op_ConP) {
771 input_not_const = false;
772 const TypeOopPtr* oop_ptr = receiver->bottom_type()->is_oopptr();
773 ciMethod* target = oop_ptr->const_oop()->as_method_handle()->get_vmtarget();
774 guarantee(!target->is_method_handle_intrinsic(), "should not happen"); // XXX remove
775 const int vtable_index = Method::invalid_vtable_index;
776 CallGenerator* cg = C->call_generator(target, vtable_index, false, jvms, true, PROB_ALWAYS, true, true);
777 assert(!cg->is_late_inline() || cg->is_mh_late_inline(), "no late inline here");
778 if (cg != NULL && cg->is_inline())
779 return cg;
780 }
781 }
782 break;
783
784 case vmIntrinsics::_linkToVirtual:
785 case vmIntrinsics::_linkToStatic:
786 case vmIntrinsics::_linkToSpecial:
787 case vmIntrinsics::_linkToInterface:
788 {
789 // Get MemberName argument:
790 Node* member_name = kit.argument(callee->arg_size() - 1);
791 if (member_name->Opcode() == Op_ConP) {
792 input_not_const = false;
793 const TypeOopPtr* oop_ptr = member_name->bottom_type()->is_oopptr();
794 ciMethod* target = oop_ptr->const_oop()->as_member_name()->get_vmtarget();
795
796 // In lamda forms we erase signature types to avoid resolving issues
812 // Cast reference arguments to its type.
813 for (int i = 0; i < signature->count(); i++) {
814 ciType* t = signature->type_at(i);
815 if (t->is_klass()) {
816 Node* arg = kit.argument(receiver_skip + i);
817 const TypeOopPtr* arg_type = arg->bottom_type()->isa_oopptr();
818 const Type* sig_type = TypeOopPtr::make_from_klass(t->as_klass());
819 if (arg_type != NULL && !arg_type->higher_equal(sig_type)) {
820 Node* cast_obj = gvn.transform(new (C) CheckCastPPNode(kit.control(), arg, sig_type));
821 kit.set_argument(receiver_skip + i, cast_obj);
822 }
823 }
824 }
825
826 // Try to get the most accurate receiver type
827 const bool is_virtual = (iid == vmIntrinsics::_linkToVirtual);
828 const bool is_virtual_or_interface = (is_virtual || iid == vmIntrinsics::_linkToInterface);
829 int vtable_index = Method::invalid_vtable_index;
830 bool call_does_dispatch = false;
831
832 if (is_virtual_or_interface) {
833 ciInstanceKlass* klass = target->holder();
834 Node* receiver_node = kit.argument(0);
835 const TypeOopPtr* receiver_type = gvn.type(receiver_node)->isa_oopptr();
836 // call_does_dispatch and vtable_index are out-parameters. They might be changed.
837 target = C->optimize_virtual_call(caller, jvms->bci(), klass, target, receiver_type,
838 is_virtual,
839 call_does_dispatch, vtable_index); // out-parameters
840 }
841
842 CallGenerator* cg = C->call_generator(target, vtable_index, call_does_dispatch, jvms, true, PROB_ALWAYS, true, true);
843 assert(!cg->is_late_inline() || cg->is_mh_late_inline(), "no late inline here");
844 if (cg != NULL && cg->is_inline())
845 return cg;
846 }
847 }
848 break;
849
850 default:
851 fatal(err_msg_res("unexpected intrinsic %d: %s", iid, vmIntrinsics::name_at(iid)));
852 break;
853 }
854 return NULL;
855 }
856
857
858 //------------------------PredictedIntrinsicGenerator------------------------------
859 // Internal class which handles all predicted Intrinsic calls.
860 class PredictedIntrinsicGenerator : public CallGenerator {
861 CallGenerator* _intrinsic;
862 CallGenerator* _cg;
|
469 Compile::current()->inc_number_of_mh_late_inlines();
470 CallGenerator* cg = new LateInlineMHCallGenerator(caller, callee, input_not_const);
471 return cg;
472 }
473
474 class LateInlineStringCallGenerator : public LateInlineCallGenerator {
475
476 public:
477 LateInlineStringCallGenerator(ciMethod* method, CallGenerator* inline_cg) :
478 LateInlineCallGenerator(method, inline_cg) {}
479
480 virtual JVMState* generate(JVMState* jvms, Parse* parent_parser) {
481 Compile *C = Compile::current();
482 C->print_inlining_skip(this);
483
484 C->add_string_late_inline(this);
485
486 JVMState* new_jvms = DirectCallGenerator::generate(jvms, parent_parser);
487 return new_jvms;
488 }
489
490 virtual bool is_string_late_inline() const { return true; }
491 };
492
493 CallGenerator* CallGenerator::for_string_late_inline(ciMethod* method, CallGenerator* inline_cg) {
494 return new LateInlineStringCallGenerator(method, inline_cg);
495 }
496
497 class LateInlineBoxingCallGenerator : public LateInlineCallGenerator {
498
499 public:
500 LateInlineBoxingCallGenerator(ciMethod* method, CallGenerator* inline_cg) :
501 LateInlineCallGenerator(method, inline_cg) {}
502
503 virtual JVMState* generate(JVMState* jvms, Parse* parent_parser) {
504 Compile *C = Compile::current();
505 C->print_inlining_skip(this);
506
507 C->add_boxing_late_inline(this);
508
509 JVMState* new_jvms = DirectCallGenerator::generate(jvms, parent_parser);
510 return new_jvms;
758 }
759 }
760
761 CallGenerator* CallGenerator::for_method_handle_inline(JVMState* jvms, ciMethod* caller, ciMethod* callee, bool& input_not_const) {
762 GraphKit kit(jvms);
763 PhaseGVN& gvn = kit.gvn();
764 Compile* C = kit.C;
765 vmIntrinsics::ID iid = callee->intrinsic_id();
766 input_not_const = true;
767 switch (iid) {
768 case vmIntrinsics::_invokeBasic:
769 {
770 // Get MethodHandle receiver:
771 Node* receiver = kit.argument(0);
772 if (receiver->Opcode() == Op_ConP) {
773 input_not_const = false;
774 const TypeOopPtr* oop_ptr = receiver->bottom_type()->is_oopptr();
775 ciMethod* target = oop_ptr->const_oop()->as_method_handle()->get_vmtarget();
776 guarantee(!target->is_method_handle_intrinsic(), "should not happen"); // XXX remove
777 const int vtable_index = Method::invalid_vtable_index;
778 CallGenerator* cg = C->call_generator(target, vtable_index, false, jvms, true, PROB_ALWAYS, NULL, true, true);
779 assert(!cg->is_late_inline() || cg->is_mh_late_inline(), "no late inline here");
780 if (cg != NULL && cg->is_inline())
781 return cg;
782 }
783 }
784 break;
785
786 case vmIntrinsics::_linkToVirtual:
787 case vmIntrinsics::_linkToStatic:
788 case vmIntrinsics::_linkToSpecial:
789 case vmIntrinsics::_linkToInterface:
790 {
791 // Get MemberName argument:
792 Node* member_name = kit.argument(callee->arg_size() - 1);
793 if (member_name->Opcode() == Op_ConP) {
794 input_not_const = false;
795 const TypeOopPtr* oop_ptr = member_name->bottom_type()->is_oopptr();
796 ciMethod* target = oop_ptr->const_oop()->as_member_name()->get_vmtarget();
797
798 // In lamda forms we erase signature types to avoid resolving issues
814 // Cast reference arguments to its type.
815 for (int i = 0; i < signature->count(); i++) {
816 ciType* t = signature->type_at(i);
817 if (t->is_klass()) {
818 Node* arg = kit.argument(receiver_skip + i);
819 const TypeOopPtr* arg_type = arg->bottom_type()->isa_oopptr();
820 const Type* sig_type = TypeOopPtr::make_from_klass(t->as_klass());
821 if (arg_type != NULL && !arg_type->higher_equal(sig_type)) {
822 Node* cast_obj = gvn.transform(new (C) CheckCastPPNode(kit.control(), arg, sig_type));
823 kit.set_argument(receiver_skip + i, cast_obj);
824 }
825 }
826 }
827
828 // Try to get the most accurate receiver type
829 const bool is_virtual = (iid == vmIntrinsics::_linkToVirtual);
830 const bool is_virtual_or_interface = (is_virtual || iid == vmIntrinsics::_linkToInterface);
831 int vtable_index = Method::invalid_vtable_index;
832 bool call_does_dispatch = false;
833
834 ciKlass* spec_receiver_type = NULL;
835 if (is_virtual_or_interface) {
836 ciInstanceKlass* klass = target->holder();
837 Node* receiver_node = kit.argument(0);
838 const TypeOopPtr* receiver_type = gvn.type(receiver_node)->isa_oopptr();
839 // call_does_dispatch and vtable_index are out-parameters. They might be changed.
840 target = C->optimize_virtual_call(caller, jvms->bci(), klass, target, receiver_type,
841 is_virtual,
842 call_does_dispatch, vtable_index); // out-parameters
843 // We lack profiling at this call but type speculation may
844 // provide us with a type
845 spec_receiver_type = receiver_type->speculative_type();
846 }
847
848 CallGenerator* cg = C->call_generator(target, vtable_index, call_does_dispatch, jvms, true, PROB_ALWAYS, spec_receiver_type, true, true);
849 assert(!cg->is_late_inline() || cg->is_mh_late_inline(), "no late inline here");
850 if (cg != NULL && cg->is_inline())
851 return cg;
852 }
853 }
854 break;
855
856 default:
857 fatal(err_msg_res("unexpected intrinsic %d: %s", iid, vmIntrinsics::name_at(iid)));
858 break;
859 }
860 return NULL;
861 }
862
863
864 //------------------------PredictedIntrinsicGenerator------------------------------
865 // Internal class which handles all predicted Intrinsic calls.
866 class PredictedIntrinsicGenerator : public CallGenerator {
867 CallGenerator* _intrinsic;
868 CallGenerator* _cg;
|