src/share/vm/opto/callnode.hpp

Print this page
rev 3361 : 7173584: Implement arraycopy as a macro node
Summary: delay the conversion of arraycopy to stub calls to macro expansion
Reviewed-by:


 953 
 954   bool is_nested_lock_region(); // Is this Lock nested?
 955 };
 956 
 957 //------------------------------Unlock---------------------------------------
 958 // High-level unlock operation
 959 class UnlockNode : public AbstractLockNode {
 960 public:
 961   virtual int Opcode() const;
 962   virtual uint size_of() const; // Size is bigger
 963   UnlockNode(Compile* C, const TypeFunc *tf) : AbstractLockNode( tf ) {
 964     init_class_id(Class_Unlock);
 965     init_flags(Flag_is_macro);
 966     C->add_macro_node(this);
 967   }
 968   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 969   // unlock is never a safepoint
 970   virtual bool        guaranteed_safepoint()  { return false; }
 971 };
 972 

















































































 973 #endif // SHARE_VM_OPTO_CALLNODE_HPP


 953 
 954   bool is_nested_lock_region(); // Is this Lock nested?
 955 };
 956 
 957 //------------------------------Unlock---------------------------------------
 958 // High-level unlock operation
 959 class UnlockNode : public AbstractLockNode {
 960 public:
 961   virtual int Opcode() const;
 962   virtual uint size_of() const; // Size is bigger
 963   UnlockNode(Compile* C, const TypeFunc *tf) : AbstractLockNode( tf ) {
 964     init_class_id(Class_Unlock);
 965     init_flags(Flag_is_macro);
 966     C->add_macro_node(this);
 967   }
 968   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 969   // unlock is never a safepoint
 970   virtual bool        guaranteed_safepoint()  { return false; }
 971 };
 972 
 973 class ArrayCopyNode : public CallNode {
 974 private:
 975 
 976   // What kind of arraycopy variant is this?
 977   enum {
 978     ArrayCopy,       // System.arraycopy()
 979     ArrayCopyNoTest, // System.arraycopy(), all arguments validated
 980     CloneBasic,      // A clone that can be copied by 64 bit chunks
 981     CloneOop,        // An oop array clone
 982     CopyOf           // Arrays.copyOf()
 983   } _kind;
 984 
 985   // Is the alloc obtained with
 986   // AllocateArrayNode::Ideal_array_allocation() tighly coupled
 987   // (arraycopy follows immediately the allocation)?
 988   // We cache the result of LibraryCallKit::tightly_coupled_allocation
 989   // here because it's much easier to find whether there's a tightly
 990   // couple allocation at parse time than at macro expansion time. At
 991   // macro expansion time, for every use of the allocation node we
 992   // would need to figure out whether it happens after the arraycopy (and
 993   // can be ignored) or between the allocation and the arraycopy. At
 994   // parse time, it's straightforward because whatever happens after
 995   // the arraycopy is not parsed yet so doesn't exist when
 996   // LibraryCallKit::tightly_coupled_allocation() is called.
 997   bool _alloc_tightly_coupled;
 998 
 999   static const TypeFunc* arraycopy_type() {
1000     const Type** fields = TypeTuple::fields(ParmLimit - TypeFunc::Parms);
1001     fields[Src]     = TypeInstPtr::BOTTOM;
1002     fields[SrcPos]  = TypeInt::INT;
1003     fields[Dest]    = TypeInstPtr::BOTTOM;
1004     fields[DestPos] = TypeInt::INT;
1005     fields[Length]  = TypeInt::INT;
1006     const TypeTuple *domain = TypeTuple::make(ParmLimit, fields);
1007 
1008     // create result type (range)
1009     fields = TypeTuple::fields(0);
1010 
1011     const TypeTuple *range = TypeTuple::make(TypeFunc::Parms+0, fields);
1012 
1013     return TypeFunc::make(domain, range);
1014   }
1015 
1016   ArrayCopyNode(Compile* C, bool alloc_tightly_coupled);
1017 
1018 public:
1019 
1020   enum {
1021     Src   = TypeFunc::Parms,
1022     SrcPos,
1023     Dest,
1024     DestPos,
1025     Length,
1026     ParmLimit
1027   };
1028 
1029   static ArrayCopyNode* make(GraphKit* kit, bool may_throw,
1030                              Node* src, Node* src_offset, Node* dest, Node* dest_offset, Node* length,
1031                              bool alloc_tightly_coupled);
1032 
1033   void connect_outputs(GraphKit* kit);
1034 
1035   bool is_arraycopy()         const { return _kind == ArrayCopy; }
1036   bool is_arraycopy_notest()  const { return _kind == ArrayCopyNoTest; }
1037   bool is_clonebasic()        const { return _kind == CloneBasic; }
1038   bool is_cloneoop()          const { return _kind == CloneOop; }
1039   bool is_copyof()            const { return _kind == CopyOf; }
1040 
1041   void set_arraycopy()         { _kind = ArrayCopy; }
1042   void set_arraycopy_notest()  { _kind = ArrayCopyNoTest; }
1043   void set_clonebasic()        { _kind = CloneBasic; }
1044   void set_cloneoop()          { _kind = CloneOop; }
1045   void set_copyof()            { _kind = CopyOf; }
1046 
1047   virtual int Opcode() const;
1048   virtual uint size_of() const; // Size is bigger
1049   virtual bool guaranteed_safepoint()  { return false; }
1050 
1051   bool is_alloc_tightly_coupled() const { return _alloc_tightly_coupled; }
1052 
1053 };
1054 #endif // SHARE_VM_OPTO_CALLNODE_HPP