548 Node* resproj;
549 Node* exobj;
550 };
551
552 class CallGenerator;
553
554 //------------------------------CallNode---------------------------------------
555 // Call nodes now subsume the function of debug nodes at callsites, so they
556 // contain the functionality of a full scope chain of debug nodes.
557 class CallNode : public SafePointNode {
558 friend class VMStructs;
559
560 protected:
561 bool may_modify_arraycopy_helper(const TypeOopPtr* dest_t, const TypeOopPtr *t_oop, PhaseTransform *phase);
562
563 public:
564 const TypeFunc *_tf; // Function type
565 address _entry_point; // Address of method being called
566 float _cnt; // Estimate of number of times called
567 CallGenerator* _generator; // corresponding CallGenerator for some late inline calls
568
569 CallNode(const TypeFunc* tf, address addr, const TypePtr* adr_type)
570 : SafePointNode(tf->domain()->cnt(), NULL, adr_type),
571 _tf(tf),
572 _entry_point(addr),
573 _cnt(COUNT_UNKNOWN),
574 _generator(NULL)
575 {
576 init_class_id(Class_Call);
577 }
578
579 const TypeFunc* tf() const { return _tf; }
580 const address entry_point() const { return _entry_point; }
581 const float cnt() const { return _cnt; }
582 CallGenerator* generator() const { return _generator; }
583
584 void set_tf(const TypeFunc* tf) { _tf = tf; }
585 void set_entry_point(address p) { _entry_point = p; }
586 void set_cnt(float c) { _cnt = c; }
587 void set_generator(CallGenerator* cg) { _generator = cg; }
588
589 virtual const Type *bottom_type() const;
590 virtual const Type *Value( PhaseTransform *phase ) const;
591 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
592 virtual Node *Identity( PhaseTransform *phase ) { return this; }
593 virtual uint cmp( const Node &n ) const;
594 virtual uint size_of() const = 0;
613 // Does this node have a use of n other than in debug information?
614 bool has_non_debug_use(Node *n);
615 // Returns the unique CheckCastPP of a call
616 // or result projection is there are several CheckCastPP
617 // or returns NULL if there is no one.
618 Node *result_cast();
619 // Does this node returns pointer?
620 bool returns_pointer() const {
621 const TypeTuple *r = tf()->range();
622 return (r->cnt() > TypeFunc::Parms &&
623 r->field_at(TypeFunc::Parms)->isa_ptr());
624 }
625
626 // Collect all the interesting edges from a call for use in
627 // replacing the call by something else. Used by macro expansion
628 // and the late inlining support.
629 void extract_projections(CallProjections* projs, bool separate_io_proj);
630
631 virtual uint match_edge(uint idx) const;
632
633 #ifndef PRODUCT
634 virtual void dump_req(outputStream *st = tty) const;
635 virtual void dump_spec(outputStream *st) const;
636 #endif
637 };
638
639
640 //------------------------------CallJavaNode-----------------------------------
641 // Make a static or dynamic subroutine call node using Java calling
642 // convention. (The "Java" calling convention is the compiler's calling
643 // convention, as opposed to the interpreter's or that of native C.)
644 class CallJavaNode : public CallNode {
645 friend class VMStructs;
646 protected:
647 virtual uint cmp( const Node &n ) const;
648 virtual uint size_of() const; // Size is bigger
649
650 bool _optimized_virtual;
651 bool _method_handle_invoke;
652 ciMethod* _method; // Method being direct called
666 void set_method(ciMethod *m) { _method = m; }
667 void set_optimized_virtual(bool f) { _optimized_virtual = f; }
668 bool is_optimized_virtual() const { return _optimized_virtual; }
669 void set_method_handle_invoke(bool f) { _method_handle_invoke = f; }
670 bool is_method_handle_invoke() const { return _method_handle_invoke; }
671
672 #ifndef PRODUCT
673 virtual void dump_spec(outputStream *st) const;
674 #endif
675 };
676
677 //------------------------------CallStaticJavaNode-----------------------------
678 // Make a direct subroutine call using Java calling convention (for static
679 // calls and optimized virtual calls, plus calls to wrappers for run-time
680 // routines); generates static stub.
681 class CallStaticJavaNode : public CallJavaNode {
682 virtual uint cmp( const Node &n ) const;
683 virtual uint size_of() const; // Size is bigger
684 public:
685 CallStaticJavaNode(Compile* C, const TypeFunc* tf, address addr, ciMethod* method, int bci)
686 : CallJavaNode(tf, addr, method, bci), _name(NULL) {
687 init_class_id(Class_CallStaticJava);
688 if (C->eliminate_boxing() && (method != NULL) && method->is_boxing_method()) {
689 init_flags(Flag_is_macro);
690 C->add_macro_node(this);
691 }
692 _is_scalar_replaceable = false;
693 _is_non_escaping = false;
694 }
695 CallStaticJavaNode(const TypeFunc* tf, address addr, const char* name, int bci,
696 const TypePtr* adr_type)
697 : CallJavaNode(tf, addr, NULL, bci), _name(name) {
698 init_class_id(Class_CallStaticJava);
699 // This node calls a runtime stub, which often has narrow memory effects.
700 _adr_type = adr_type;
701 _is_scalar_replaceable = false;
702 _is_non_escaping = false;
703 }
704 const char *_name; // Runtime wrapper name
705
706 // Result of Escape Analysis
707 bool _is_scalar_replaceable;
708 bool _is_non_escaping;
709
710 // If this is an uncommon trap, return the request code, else zero.
711 int uncommon_trap_request() const;
712 static int extract_uncommon_trap_request(const Node* call);
713
714 bool is_boxing_method() const {
715 return is_macro() && (method() != NULL) && method()->is_boxing_method();
716 }
717 // Later inlining modifies the JVMState, so we need to clone it
718 // when the call node is cloned (because it is macro node).
719 virtual void clone_jvms(Compile* C) {
720 if ((jvms() != NULL) && is_boxing_method()) {
721 set_jvms(jvms()->clone_deep(C));
722 jvms()->set_map_deep(this);
723 }
724 }
737 public:
738 CallDynamicJavaNode( const TypeFunc *tf , address addr, ciMethod* method, int vtable_index, int bci ) : CallJavaNode(tf,addr,method,bci), _vtable_index(vtable_index) {
739 init_class_id(Class_CallDynamicJava);
740 }
741
742 int _vtable_index;
743 virtual int Opcode() const;
744 #ifndef PRODUCT
745 virtual void dump_spec(outputStream *st) const;
746 #endif
747 };
748
749 //------------------------------CallRuntimeNode--------------------------------
750 // Make a direct subroutine call node into compiled C++ code.
751 class CallRuntimeNode : public CallNode {
752 virtual uint cmp( const Node &n ) const;
753 virtual uint size_of() const; // Size is bigger
754 public:
755 CallRuntimeNode(const TypeFunc* tf, address addr, const char* name,
756 const TypePtr* adr_type)
757 : CallNode(tf, addr, adr_type),
758 _name(name)
759 {
760 init_class_id(Class_CallRuntime);
761 }
762
763 const char *_name; // Printable name, if _method is NULL
764 virtual int Opcode() const;
765 virtual void calling_convention( BasicType* sig_bt, VMRegPair *parm_regs, uint argcnt ) const;
766
767 #ifndef PRODUCT
768 virtual void dump_spec(outputStream *st) const;
769 #endif
770 };
771
772 //------------------------------CallLeafNode-----------------------------------
773 // Make a direct subroutine call node into compiled C++ code, without
774 // safepoints
775 class CallLeafNode : public CallRuntimeNode {
776 public:
777 CallLeafNode(const TypeFunc* tf, address addr, const char* name,
778 const TypePtr* adr_type)
779 : CallRuntimeNode(tf, addr, name, adr_type)
780 {
781 init_class_id(Class_CallLeaf);
782 }
783 virtual int Opcode() const;
784 virtual bool guaranteed_safepoint() { return false; }
785 #ifndef PRODUCT
786 virtual void dump_spec(outputStream *st) const;
787 #endif
788 bool is_call_to_arraycopystub() const;
789 virtual bool may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase);
790 };
791
792 //------------------------------CallLeafNoFPNode-------------------------------
793 // CallLeafNode, not using floating point or using it in the same manner as
794 // the generated code
795 class CallLeafNoFPNode : public CallLeafNode {
796 public:
797 CallLeafNoFPNode(const TypeFunc* tf, address addr, const char* name,
798 const TypePtr* adr_type)
799 : CallLeafNode(tf, addr, name, adr_type)
800 {
801 }
802 virtual int Opcode() const;
803 };
804
805
806 //------------------------------Allocate---------------------------------------
807 // High-level memory allocation
808 //
809 // AllocateNode and AllocateArrayNode are subclasses of CallNode because they will
|
548 Node* resproj;
549 Node* exobj;
550 };
551
552 class CallGenerator;
553
554 //------------------------------CallNode---------------------------------------
555 // Call nodes now subsume the function of debug nodes at callsites, so they
556 // contain the functionality of a full scope chain of debug nodes.
557 class CallNode : public SafePointNode {
558 friend class VMStructs;
559
560 protected:
561 bool may_modify_arraycopy_helper(const TypeOopPtr* dest_t, const TypeOopPtr *t_oop, PhaseTransform *phase);
562
563 public:
564 const TypeFunc *_tf; // Function type
565 address _entry_point; // Address of method being called
566 float _cnt; // Estimate of number of times called
567 CallGenerator* _generator; // corresponding CallGenerator for some late inline calls
568 const char *_name; // Printable name, if _method is NULL
569
570 CallNode(const TypeFunc* tf, address addr, const TypePtr* adr_type)
571 : SafePointNode(tf->domain()->cnt(), NULL, adr_type),
572 _tf(tf),
573 _entry_point(addr),
574 _cnt(COUNT_UNKNOWN),
575 _generator(NULL),
576 _name(NULL)
577 {
578 init_class_id(Class_Call);
579 }
580
581 const TypeFunc* tf() const { return _tf; }
582 const address entry_point() const { return _entry_point; }
583 const float cnt() const { return _cnt; }
584 CallGenerator* generator() const { return _generator; }
585
586 void set_tf(const TypeFunc* tf) { _tf = tf; }
587 void set_entry_point(address p) { _entry_point = p; }
588 void set_cnt(float c) { _cnt = c; }
589 void set_generator(CallGenerator* cg) { _generator = cg; }
590
591 virtual const Type *bottom_type() const;
592 virtual const Type *Value( PhaseTransform *phase ) const;
593 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
594 virtual Node *Identity( PhaseTransform *phase ) { return this; }
595 virtual uint cmp( const Node &n ) const;
596 virtual uint size_of() const = 0;
615 // Does this node have a use of n other than in debug information?
616 bool has_non_debug_use(Node *n);
617 // Returns the unique CheckCastPP of a call
618 // or result projection is there are several CheckCastPP
619 // or returns NULL if there is no one.
620 Node *result_cast();
621 // Does this node returns pointer?
622 bool returns_pointer() const {
623 const TypeTuple *r = tf()->range();
624 return (r->cnt() > TypeFunc::Parms &&
625 r->field_at(TypeFunc::Parms)->isa_ptr());
626 }
627
628 // Collect all the interesting edges from a call for use in
629 // replacing the call by something else. Used by macro expansion
630 // and the late inlining support.
631 void extract_projections(CallProjections* projs, bool separate_io_proj);
632
633 virtual uint match_edge(uint idx) const;
634
635 bool is_call_to_arraycopystub() const;
636
637 #ifndef PRODUCT
638 virtual void dump_req(outputStream *st = tty) const;
639 virtual void dump_spec(outputStream *st) const;
640 #endif
641 };
642
643
644 //------------------------------CallJavaNode-----------------------------------
645 // Make a static or dynamic subroutine call node using Java calling
646 // convention. (The "Java" calling convention is the compiler's calling
647 // convention, as opposed to the interpreter's or that of native C.)
648 class CallJavaNode : public CallNode {
649 friend class VMStructs;
650 protected:
651 virtual uint cmp( const Node &n ) const;
652 virtual uint size_of() const; // Size is bigger
653
654 bool _optimized_virtual;
655 bool _method_handle_invoke;
656 ciMethod* _method; // Method being direct called
670 void set_method(ciMethod *m) { _method = m; }
671 void set_optimized_virtual(bool f) { _optimized_virtual = f; }
672 bool is_optimized_virtual() const { return _optimized_virtual; }
673 void set_method_handle_invoke(bool f) { _method_handle_invoke = f; }
674 bool is_method_handle_invoke() const { return _method_handle_invoke; }
675
676 #ifndef PRODUCT
677 virtual void dump_spec(outputStream *st) const;
678 #endif
679 };
680
681 //------------------------------CallStaticJavaNode-----------------------------
682 // Make a direct subroutine call using Java calling convention (for static
683 // calls and optimized virtual calls, plus calls to wrappers for run-time
684 // routines); generates static stub.
685 class CallStaticJavaNode : public CallJavaNode {
686 virtual uint cmp( const Node &n ) const;
687 virtual uint size_of() const; // Size is bigger
688 public:
689 CallStaticJavaNode(Compile* C, const TypeFunc* tf, address addr, ciMethod* method, int bci)
690 : CallJavaNode(tf, addr, method, bci) {
691 init_class_id(Class_CallStaticJava);
692 if (C->eliminate_boxing() && (method != NULL) && method->is_boxing_method()) {
693 init_flags(Flag_is_macro);
694 C->add_macro_node(this);
695 }
696 _is_scalar_replaceable = false;
697 _is_non_escaping = false;
698 }
699 CallStaticJavaNode(const TypeFunc* tf, address addr, const char* name, int bci,
700 const TypePtr* adr_type)
701 : CallJavaNode(tf, addr, NULL, bci) {
702 init_class_id(Class_CallStaticJava);
703 // This node calls a runtime stub, which often has narrow memory effects.
704 _adr_type = adr_type;
705 _is_scalar_replaceable = false;
706 _is_non_escaping = false;
707 _name = name;
708 }
709
710 // Result of Escape Analysis
711 bool _is_scalar_replaceable;
712 bool _is_non_escaping;
713
714 // If this is an uncommon trap, return the request code, else zero.
715 int uncommon_trap_request() const;
716 static int extract_uncommon_trap_request(const Node* call);
717
718 bool is_boxing_method() const {
719 return is_macro() && (method() != NULL) && method()->is_boxing_method();
720 }
721 // Later inlining modifies the JVMState, so we need to clone it
722 // when the call node is cloned (because it is macro node).
723 virtual void clone_jvms(Compile* C) {
724 if ((jvms() != NULL) && is_boxing_method()) {
725 set_jvms(jvms()->clone_deep(C));
726 jvms()->set_map_deep(this);
727 }
728 }
741 public:
742 CallDynamicJavaNode( const TypeFunc *tf , address addr, ciMethod* method, int vtable_index, int bci ) : CallJavaNode(tf,addr,method,bci), _vtable_index(vtable_index) {
743 init_class_id(Class_CallDynamicJava);
744 }
745
746 int _vtable_index;
747 virtual int Opcode() const;
748 #ifndef PRODUCT
749 virtual void dump_spec(outputStream *st) const;
750 #endif
751 };
752
753 //------------------------------CallRuntimeNode--------------------------------
754 // Make a direct subroutine call node into compiled C++ code.
755 class CallRuntimeNode : public CallNode {
756 virtual uint cmp( const Node &n ) const;
757 virtual uint size_of() const; // Size is bigger
758 public:
759 CallRuntimeNode(const TypeFunc* tf, address addr, const char* name,
760 const TypePtr* adr_type)
761 : CallNode(tf, addr, adr_type)
762 {
763 init_class_id(Class_CallRuntime);
764 _name = name;
765 }
766
767 virtual int Opcode() const;
768 virtual void calling_convention( BasicType* sig_bt, VMRegPair *parm_regs, uint argcnt ) const;
769
770 #ifndef PRODUCT
771 virtual void dump_spec(outputStream *st) const;
772 #endif
773 };
774
775 //------------------------------CallLeafNode-----------------------------------
776 // Make a direct subroutine call node into compiled C++ code, without
777 // safepoints
778 class CallLeafNode : public CallRuntimeNode {
779 public:
780 CallLeafNode(const TypeFunc* tf, address addr, const char* name,
781 const TypePtr* adr_type)
782 : CallRuntimeNode(tf, addr, name, adr_type)
783 {
784 init_class_id(Class_CallLeaf);
785 }
786 virtual int Opcode() const;
787 virtual bool guaranteed_safepoint() { return false; }
788 #ifndef PRODUCT
789 virtual void dump_spec(outputStream *st) const;
790 #endif
791 };
792
793 //------------------------------CallLeafNoFPNode-------------------------------
794 // CallLeafNode, not using floating point or using it in the same manner as
795 // the generated code
796 class CallLeafNoFPNode : public CallLeafNode {
797 public:
798 CallLeafNoFPNode(const TypeFunc* tf, address addr, const char* name,
799 const TypePtr* adr_type)
800 : CallLeafNode(tf, addr, name, adr_type)
801 {
802 }
803 virtual int Opcode() const;
804 };
805
806
807 //------------------------------Allocate---------------------------------------
808 // High-level memory allocation
809 //
810 // AllocateNode and AllocateArrayNode are subclasses of CallNode because they will
|