643 // or result projection is there are several CheckCastPP 644 // or returns NULL if there is no one. 645 Node *result_cast(); 646 // Does this node returns pointer? 647 bool returns_pointer() const { 648 const TypeTuple *r = tf()->range_sig(); 649 return (!tf()->returns_value_type_as_fields() && 650 r->cnt() > TypeFunc::Parms && 651 r->field_at(TypeFunc::Parms)->isa_ptr()); 652 } 653 654 // Collect all the interesting edges from a call for use in 655 // replacing the call by something else. Used by macro expansion 656 // and the late inlining support. 657 CallProjections* extract_projections(bool separate_io_proj, bool do_asserts = true); 658 659 virtual uint match_edge(uint idx) const; 660 661 bool is_call_to_arraycopystub() const; 662 663 #ifndef PRODUCT 664 virtual void dump_req(outputStream *st = tty) const; 665 virtual void dump_spec(outputStream *st) const; 666 #endif 667 }; 668 669 670 //------------------------------CallJavaNode----------------------------------- 671 // Make a static or dynamic subroutine call node using Java calling 672 // convention. (The "Java" calling convention is the compiler's calling 673 // convention, as opposed to the interpreter's or that of native C.) 674 class CallJavaNode : public CallNode { 675 friend class VMStructs; 676 protected: 677 virtual bool cmp( const Node &n ) const; 678 virtual uint size_of() const; // Size is bigger 679 680 bool _optimized_virtual; 681 bool _method_handle_invoke; 682 bool _override_symbolic_info; // Override symbolic call site info from bytecode 686 CallJavaNode(const TypeFunc* tf , address addr, ciMethod* method, int bci) 687 : CallNode(tf, addr, TypePtr::BOTTOM), 688 _optimized_virtual(false), 689 _method_handle_invoke(false), 690 _override_symbolic_info(false), 691 _method(method), _bci(bci) 692 { 693 init_class_id(Class_CallJava); 694 } 695 696 virtual int Opcode() const; 697 ciMethod* method() const { return _method; } 698 void set_method(ciMethod *m) { _method = m; } 699 void set_optimized_virtual(bool f) { _optimized_virtual = f; } 700 bool is_optimized_virtual() const { return _optimized_virtual; } 701 void set_method_handle_invoke(bool f) { _method_handle_invoke = f; } 702 bool is_method_handle_invoke() const { return _method_handle_invoke; } 703 void set_override_symbolic_info(bool f) { _override_symbolic_info = f; } 704 bool override_symbolic_info() const { return _override_symbolic_info; } 705 706 DEBUG_ONLY( bool validate_symbolic_info() const; ) 707 708 #ifndef PRODUCT 709 virtual void dump_spec(outputStream *st) const; 710 virtual void dump_compact_spec(outputStream *st) const; 711 #endif 712 }; 713 714 //------------------------------CallStaticJavaNode----------------------------- 715 // Make a direct subroutine call using Java calling convention (for static 716 // calls and optimized virtual calls, plus calls to wrappers for run-time 717 // routines); generates static stub. 718 class CallStaticJavaNode : public CallJavaNode { 719 virtual bool cmp( const Node &n ) const; 720 virtual uint size_of() const; // Size is bigger 721 public: 722 CallStaticJavaNode(Compile* C, const TypeFunc* tf, address addr, ciMethod* method, int bci) 723 : CallJavaNode(tf, addr, method, bci) { 724 init_class_id(Class_CallStaticJava); 725 if (C->eliminate_boxing() && (method != NULL) && method->is_boxing_method()) { 726 init_flags(Flag_is_macro); 727 C->add_macro_node(this); 728 } 729 const TypeTuple *r = tf->range_sig(); 730 if (ValueTypeReturnedAsFields && 731 method != NULL && 732 method->is_method_handle_intrinsic() && 733 r->cnt() > TypeFunc::Parms && 734 r->field_at(TypeFunc::Parms)->isa_oopptr() && 735 r->field_at(TypeFunc::Parms)->is_oopptr()->can_be_value_type()) { 736 // Make sure this call is processed by PhaseMacroExpand::expand_mh_intrinsic_return 737 init_flags(Flag_is_macro); 738 C->add_macro_node(this); 739 } 740 755 // Result of Escape Analysis 756 bool _is_scalar_replaceable; 757 bool _is_non_escaping; 758 759 // If this is an uncommon trap, return the request code, else zero. 760 int uncommon_trap_request() const; 761 static int extract_uncommon_trap_request(const Node* call); 762 763 bool is_boxing_method() const { 764 return is_macro() && (method() != NULL) && method()->is_boxing_method(); 765 } 766 // Later inlining modifies the JVMState, so we need to clone it 767 // when the call node is cloned (because it is macro node). 768 virtual void clone_jvms(Compile* C) { 769 if ((jvms() != NULL) && is_boxing_method()) { 770 set_jvms(jvms()->clone_deep(C)); 771 jvms()->set_map_deep(this); 772 } 773 } 774 775 virtual int Opcode() const; 776 #ifndef PRODUCT 777 virtual void dump_spec(outputStream *st) const; 778 virtual void dump_compact_spec(outputStream *st) const; 779 #endif 780 }; 781 782 //------------------------------CallDynamicJavaNode---------------------------- 783 // Make a dispatched call using Java calling convention. 784 class CallDynamicJavaNode : public CallJavaNode { 785 virtual bool cmp( const Node &n ) const; 786 virtual uint size_of() const; // Size is bigger 787 public: 788 CallDynamicJavaNode( const TypeFunc *tf , address addr, ciMethod* method, int vtable_index, int bci ) : CallJavaNode(tf,addr,method,bci), _vtable_index(vtable_index) { 789 init_class_id(Class_CallDynamicJava); 790 } 791 792 int _vtable_index; 793 virtual int Opcode() const; 794 #ifndef PRODUCT | 643 // or result projection is there are several CheckCastPP 644 // or returns NULL if there is no one. 645 Node *result_cast(); 646 // Does this node returns pointer? 647 bool returns_pointer() const { 648 const TypeTuple *r = tf()->range_sig(); 649 return (!tf()->returns_value_type_as_fields() && 650 r->cnt() > TypeFunc::Parms && 651 r->field_at(TypeFunc::Parms)->isa_ptr()); 652 } 653 654 // Collect all the interesting edges from a call for use in 655 // replacing the call by something else. Used by macro expansion 656 // and the late inlining support. 657 CallProjections* extract_projections(bool separate_io_proj, bool do_asserts = true); 658 659 virtual uint match_edge(uint idx) const; 660 661 bool is_call_to_arraycopystub() const; 662 663 virtual void copy_call_debug_info(PhaseIterGVN* phase, CallNode *oldcall) {} 664 665 #ifndef PRODUCT 666 virtual void dump_req(outputStream *st = tty) const; 667 virtual void dump_spec(outputStream *st) const; 668 #endif 669 }; 670 671 672 //------------------------------CallJavaNode----------------------------------- 673 // Make a static or dynamic subroutine call node using Java calling 674 // convention. (The "Java" calling convention is the compiler's calling 675 // convention, as opposed to the interpreter's or that of native C.) 676 class CallJavaNode : public CallNode { 677 friend class VMStructs; 678 protected: 679 virtual bool cmp( const Node &n ) const; 680 virtual uint size_of() const; // Size is bigger 681 682 bool _optimized_virtual; 683 bool _method_handle_invoke; 684 bool _override_symbolic_info; // Override symbolic call site info from bytecode 688 CallJavaNode(const TypeFunc* tf , address addr, ciMethod* method, int bci) 689 : CallNode(tf, addr, TypePtr::BOTTOM), 690 _optimized_virtual(false), 691 _method_handle_invoke(false), 692 _override_symbolic_info(false), 693 _method(method), _bci(bci) 694 { 695 init_class_id(Class_CallJava); 696 } 697 698 virtual int Opcode() const; 699 ciMethod* method() const { return _method; } 700 void set_method(ciMethod *m) { _method = m; } 701 void set_optimized_virtual(bool f) { _optimized_virtual = f; } 702 bool is_optimized_virtual() const { return _optimized_virtual; } 703 void set_method_handle_invoke(bool f) { _method_handle_invoke = f; } 704 bool is_method_handle_invoke() const { return _method_handle_invoke; } 705 void set_override_symbolic_info(bool f) { _override_symbolic_info = f; } 706 bool override_symbolic_info() const { return _override_symbolic_info; } 707 708 void copy_call_debug_info(PhaseIterGVN* phase, CallNode *oldcall); 709 710 DEBUG_ONLY( bool validate_symbolic_info() const; ) 711 712 #ifndef PRODUCT 713 virtual void dump_spec(outputStream *st) const; 714 virtual void dump_compact_spec(outputStream *st) const; 715 #endif 716 }; 717 718 //------------------------------CallStaticJavaNode----------------------------- 719 // Make a direct subroutine call using Java calling convention (for static 720 // calls and optimized virtual calls, plus calls to wrappers for run-time 721 // routines); generates static stub. 722 class CallStaticJavaNode : public CallJavaNode { 723 virtual bool cmp( const Node &n ) const; 724 virtual uint size_of() const; // Size is bigger 725 726 bool remove_useless_allocation(PhaseGVN *phase, Node* ctl, Node* mem, Node* unc_arg); 727 728 public: 729 CallStaticJavaNode(Compile* C, const TypeFunc* tf, address addr, ciMethod* method, int bci) 730 : CallJavaNode(tf, addr, method, bci) { 731 init_class_id(Class_CallStaticJava); 732 if (C->eliminate_boxing() && (method != NULL) && method->is_boxing_method()) { 733 init_flags(Flag_is_macro); 734 C->add_macro_node(this); 735 } 736 const TypeTuple *r = tf->range_sig(); 737 if (ValueTypeReturnedAsFields && 738 method != NULL && 739 method->is_method_handle_intrinsic() && 740 r->cnt() > TypeFunc::Parms && 741 r->field_at(TypeFunc::Parms)->isa_oopptr() && 742 r->field_at(TypeFunc::Parms)->is_oopptr()->can_be_value_type()) { 743 // Make sure this call is processed by PhaseMacroExpand::expand_mh_intrinsic_return 744 init_flags(Flag_is_macro); 745 C->add_macro_node(this); 746 } 747 762 // Result of Escape Analysis 763 bool _is_scalar_replaceable; 764 bool _is_non_escaping; 765 766 // If this is an uncommon trap, return the request code, else zero. 767 int uncommon_trap_request() const; 768 static int extract_uncommon_trap_request(const Node* call); 769 770 bool is_boxing_method() const { 771 return is_macro() && (method() != NULL) && method()->is_boxing_method(); 772 } 773 // Later inlining modifies the JVMState, so we need to clone it 774 // when the call node is cloned (because it is macro node). 775 virtual void clone_jvms(Compile* C) { 776 if ((jvms() != NULL) && is_boxing_method()) { 777 set_jvms(jvms()->clone_deep(C)); 778 jvms()->set_map_deep(this); 779 } 780 } 781 782 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); 783 784 virtual int Opcode() const; 785 #ifndef PRODUCT 786 virtual void dump_spec(outputStream *st) const; 787 virtual void dump_compact_spec(outputStream *st) const; 788 #endif 789 }; 790 791 //------------------------------CallDynamicJavaNode---------------------------- 792 // Make a dispatched call using Java calling convention. 793 class CallDynamicJavaNode : public CallJavaNode { 794 virtual bool cmp( const Node &n ) const; 795 virtual uint size_of() const; // Size is bigger 796 public: 797 CallDynamicJavaNode( const TypeFunc *tf , address addr, ciMethod* method, int vtable_index, int bci ) : CallJavaNode(tf,addr,method,bci), _vtable_index(vtable_index) { 798 init_class_id(Class_CallDynamicJava); 799 } 800 801 int _vtable_index; 802 virtual int Opcode() const; 803 #ifndef PRODUCT |