src/share/vm/opto/callnode.hpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/share/vm/opto

src/share/vm/opto/callnode.hpp

Print this page
rev 7268 : 6700100: optimize inline_native_clone() for small objects with exact klass
Summary: optimize small instance clones as loads/stores
Reviewed-by:


1053 public:
1054   virtual int Opcode() const;
1055   virtual uint size_of() const; // Size is bigger
1056   UnlockNode(Compile* C, const TypeFunc *tf) : AbstractLockNode( tf ) {
1057     init_class_id(Class_Unlock);
1058     init_flags(Flag_is_macro);
1059     C->add_macro_node(this);
1060   }
1061   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
1062   // unlock is never a safepoint
1063   virtual bool        guaranteed_safepoint()  { return false; }
1064 };
1065 
1066 class GraphKit;
1067 
1068 class ArrayCopyNode : public CallNode {
1069 private:
1070 
1071   // What kind of arraycopy variant is this?
1072   enum {

1073     ArrayCopy,       // System.arraycopy()
1074     ArrayCopyNoTest, // System.arraycopy(), all arguments validated
1075     CloneBasic,      // A clone that can be copied by 64 bit chunks
1076     CloneOop,        // An oop array clone
1077     CopyOf,          // Arrays.copyOf()
1078     CopyOfRange      // Arrays.copyOfRange()
1079   } _kind;
1080 
1081 #ifndef PRODUCT
1082   static const char* _kind_names[CopyOfRange+1];
1083 #endif
1084   // Is the alloc obtained with
1085   // AllocateArrayNode::Ideal_array_allocation() tighly coupled
1086   // (arraycopy follows immediately the allocation)?
1087   // We cache the result of LibraryCallKit::tightly_coupled_allocation
1088   // here because it's much easier to find whether there's a tightly
1089   // couple allocation at parse time than at macro expansion time. At
1090   // macro expansion time, for every use of the allocation node we
1091   // would need to figure out whether it happens after the arraycopy (and
1092   // can be ignored) or between the allocation and the arraycopy. At
1093   // parse time, it's straightforward because whatever happens after
1094   // the arraycopy is not parsed yet so doesn't exist when
1095   // LibraryCallKit::tightly_coupled_allocation() is called.
1096   bool _alloc_tightly_coupled;
1097 


1098   static const TypeFunc* arraycopy_type() {
1099     const Type** fields = TypeTuple::fields(ParmLimit - TypeFunc::Parms);
1100     fields[Src]       = TypeInstPtr::BOTTOM;
1101     fields[SrcPos]    = TypeInt::INT;
1102     fields[Dest]      = TypeInstPtr::BOTTOM;
1103     fields[DestPos]   = TypeInt::INT;
1104     fields[Length]    = TypeInt::INT;
1105     fields[SrcLen]    = TypeInt::INT;
1106     fields[DestLen]   = TypeInt::INT;
1107     fields[SrcKlass]  = TypeKlassPtr::BOTTOM;
1108     fields[DestKlass] = TypeKlassPtr::BOTTOM;
1109     const TypeTuple *domain = TypeTuple::make(ParmLimit, fields);
1110 
1111     // create result type (range)
1112     fields = TypeTuple::fields(0);
1113 
1114     const TypeTuple *range = TypeTuple::make(TypeFunc::Parms+0, fields);
1115 
1116     return TypeFunc::make(domain, range);
1117   }
1118 
1119   ArrayCopyNode(Compile* C, bool alloc_tightly_coupled);
1120 







1121 public:
1122 
1123   enum {
1124     Src   = TypeFunc::Parms,
1125     SrcPos,
1126     Dest,
1127     DestPos,
1128     Length,
1129     SrcLen,
1130     DestLen,
1131     SrcKlass,
1132     DestKlass,
1133     ParmLimit
1134   };
1135 
1136   static ArrayCopyNode* make(GraphKit* kit, bool may_throw,
1137                              Node* src, Node* src_offset,
1138                              Node* dest,  Node* dest_offset,
1139                              Node* length,
1140                              bool alloc_tightly_coupled,
1141                              Node* src_klass = NULL, Node* dest_klass = NULL,
1142                              Node* src_length = NULL, Node* dest_length = NULL);
1143 
1144   void connect_outputs(GraphKit* kit);
1145 
1146   bool is_arraycopy()         const { return _kind == ArrayCopy; }
1147   bool is_arraycopy_notest()  const { return _kind == ArrayCopyNoTest; }
1148   bool is_clonebasic()        const { return _kind == CloneBasic; }
1149   bool is_cloneoop()          const { return _kind == CloneOop; }
1150   bool is_copyof()            const { return _kind == CopyOf; }
1151   bool is_copyofrange()       const { return _kind == CopyOfRange; }
1152 
1153   void set_arraycopy()         { _kind = ArrayCopy; }
1154   void set_arraycopy_notest()  { _kind = ArrayCopyNoTest; }
1155   void set_clonebasic()        { _kind = CloneBasic; }
1156   void set_cloneoop()          { _kind = CloneOop; }
1157   void set_copyof()            { _kind = CopyOf; }
1158   void set_copyofrange()       { _kind = CopyOfRange; }
1159 
1160   virtual int Opcode() const;
1161   virtual uint size_of() const; // Size is bigger
1162   virtual bool guaranteed_safepoint()  { return false; }

1163 
1164   bool is_alloc_tightly_coupled() const { return _alloc_tightly_coupled; }
1165 
1166 #ifndef PRODUCT
1167   virtual void dump_spec(outputStream *st) const;
1168 #endif
1169 };
1170 #endif // SHARE_VM_OPTO_CALLNODE_HPP


1053 public:
1054   virtual int Opcode() const;
1055   virtual uint size_of() const; // Size is bigger
1056   UnlockNode(Compile* C, const TypeFunc *tf) : AbstractLockNode( tf ) {
1057     init_class_id(Class_Unlock);
1058     init_flags(Flag_is_macro);
1059     C->add_macro_node(this);
1060   }
1061   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
1062   // unlock is never a safepoint
1063   virtual bool        guaranteed_safepoint()  { return false; }
1064 };
1065 
1066 class GraphKit;
1067 
1068 class ArrayCopyNode : public CallNode {
1069 private:
1070 
1071   // What kind of arraycopy variant is this?
1072   enum {
1073     None,            // not set yet
1074     ArrayCopy,       // System.arraycopy()

1075     CloneBasic,      // A clone that can be copied by 64 bit chunks
1076     CloneOop,        // An oop array clone
1077     CopyOf,          // Arrays.copyOf()
1078     CopyOfRange      // Arrays.copyOfRange()
1079   } _kind;
1080 
1081 #ifndef PRODUCT
1082   static const char* _kind_names[CopyOfRange+1];
1083 #endif
1084   // Is the alloc obtained with
1085   // AllocateArrayNode::Ideal_array_allocation() tighly coupled
1086   // (arraycopy follows immediately the allocation)?
1087   // We cache the result of LibraryCallKit::tightly_coupled_allocation
1088   // here because it's much easier to find whether there's a tightly
1089   // couple allocation at parse time than at macro expansion time. At
1090   // macro expansion time, for every use of the allocation node we
1091   // would need to figure out whether it happens after the arraycopy (and
1092   // can be ignored) or between the allocation and the arraycopy. At
1093   // parse time, it's straightforward because whatever happens after
1094   // the arraycopy is not parsed yet so doesn't exist when
1095   // LibraryCallKit::tightly_coupled_allocation() is called.
1096   bool _alloc_tightly_coupled;
1097 
1098   bool _arguments_validated;
1099 
1100   static const TypeFunc* arraycopy_type() {
1101     const Type** fields = TypeTuple::fields(ParmLimit - TypeFunc::Parms);
1102     fields[Src]       = TypeInstPtr::BOTTOM;
1103     fields[SrcPos]    = TypeInt::INT;
1104     fields[Dest]      = TypeInstPtr::BOTTOM;
1105     fields[DestPos]   = TypeInt::INT;
1106     fields[Length]    = TypeInt::INT;
1107     fields[SrcLen]    = TypeInt::INT;
1108     fields[DestLen]   = TypeInt::INT;
1109     fields[SrcKlass]  = TypeKlassPtr::BOTTOM;
1110     fields[DestKlass] = TypeKlassPtr::BOTTOM;
1111     const TypeTuple *domain = TypeTuple::make(ParmLimit, fields);
1112 
1113     // create result type (range)
1114     fields = TypeTuple::fields(0);
1115 
1116     const TypeTuple *range = TypeTuple::make(TypeFunc::Parms+0, fields);
1117 
1118     return TypeFunc::make(domain, range);
1119   }
1120 
1121   ArrayCopyNode(Compile* C, bool alloc_tightly_coupled);
1122 
1123   int get_count(PhaseGVN *phase) const;
1124   static const TypePtr* get_address_type(PhaseGVN *phase, Node* n);
1125 
1126   Node* try_clone_instance(PhaseGVN *phase, bool can_reshape, int count);
1127   bool finish_transform(PhaseGVN *phase, bool can_reshape,
1128                         Node* ctl, Node *mem);
1129 
1130 public:
1131 
1132   enum {
1133     Src   = TypeFunc::Parms,
1134     SrcPos,
1135     Dest,
1136     DestPos,
1137     Length,
1138     SrcLen,
1139     DestLen,
1140     SrcKlass,
1141     DestKlass,
1142     ParmLimit
1143   };
1144 
1145   static ArrayCopyNode* make(GraphKit* kit, bool may_throw,
1146                              Node* src, Node* src_offset,
1147                              Node* dest,  Node* dest_offset,
1148                              Node* length,
1149                              bool alloc_tightly_coupled,
1150                              Node* src_klass = NULL, Node* dest_klass = NULL,
1151                              Node* src_length = NULL, Node* dest_length = NULL);
1152 
1153   void connect_outputs(GraphKit* kit);
1154 
1155   bool is_arraycopy()             const  { assert(_kind != None, "should bet set"); return _kind == ArrayCopy; }
1156   bool is_arraycopy_validated()   const  { assert(_kind != None, "should bet set"); return _kind == ArrayCopy && _arguments_validated; }
1157   bool is_clonebasic()            const  { assert(_kind != None, "should bet set"); return _kind == CloneBasic; }
1158   bool is_cloneoop()              const  { assert(_kind != None, "should bet set"); return _kind == CloneOop; }
1159   bool is_copyof()                const  { assert(_kind != None, "should bet set"); return _kind == CopyOf; }
1160   bool is_copyofrange()           const  { assert(_kind != None, "should bet set"); return _kind == CopyOfRange; }
1161 
1162   void set_arraycopy(bool validated)   { assert(_kind == None, "shouldn't bet set yet"); _kind = ArrayCopy; _arguments_validated = validated; }
1163   void set_clonebasic()                { assert(_kind == None, "shouldn't bet set yet"); _kind = CloneBasic; }
1164   void set_cloneoop()                  { assert(_kind == None, "shouldn't bet set yet"); _kind = CloneOop; }
1165   void set_copyof()                    { assert(_kind == None, "shouldn't bet set yet"); _kind = CopyOf; _arguments_validated = false; }
1166   void set_copyofrange()               { assert(_kind == None, "shouldn't bet set yet"); _kind = CopyOfRange; _arguments_validated = false; }

1167 
1168   virtual int Opcode() const;
1169   virtual uint size_of() const; // Size is bigger
1170   virtual bool guaranteed_safepoint()  { return false; }
1171   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
1172 
1173   bool is_alloc_tightly_coupled() const { return _alloc_tightly_coupled; }
1174 
1175 #ifndef PRODUCT
1176   virtual void dump_spec(outputStream *st) const;
1177 #endif
1178 };
1179 #endif // SHARE_VM_OPTO_CALLNODE_HPP
src/share/vm/opto/callnode.hpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File