< prev index next >

src/hotspot/share/opto/memnode.hpp

Print this page
rev 54995 : 8224675: Late GC barrier insertion for ZGC
Reviewed-by:


 147 #endif
 148 };
 149 
 150 //------------------------------LoadNode---------------------------------------
 151 // Load value; requires Memory and Address
 152 class LoadNode : public MemNode {
 153 public:
 154   // Some loads (from unsafe) should be pinned: they don't depend only
 155   // on the dominating test.  The field _control_dependency below records
 156   // whether that node depends only on the dominating test.
 157   // Methods used to build LoadNodes pass an argument of type enum
 158   // ControlDependency instead of a boolean because those methods
 159   // typically have multiple boolean parameters with default values:
 160   // passing the wrong boolean to one of these parameters by mistake
 161   // goes easily unnoticed. Using an enum, the compiler can check that
 162   // the type of a value and the type of the parameter match.
 163   enum ControlDependency {
 164     Pinned,
 165     DependsOnlyOnTest
 166   };








 167 private:
 168   // LoadNode::hash() doesn't take the _control_dependency field
 169   // into account: If the graph already has a non-pinned LoadNode and
 170   // we add a pinned LoadNode with the same inputs, it's safe for GVN
 171   // to replace the pinned LoadNode with the non-pinned LoadNode,
 172   // otherwise it wouldn't be safe to have a non pinned LoadNode with
 173   // those inputs in the first place. If the graph already has a
 174   // pinned LoadNode and we add a non pinned LoadNode with the same
 175   // inputs, it's safe (but suboptimal) for GVN to replace the
 176   // non-pinned LoadNode by the pinned LoadNode.
 177   ControlDependency _control_dependency;
 178 
 179   // On platforms with weak memory ordering (e.g., PPC, Ia64) we distinguish
 180   // loads that can be reordered, and such requiring acquire semantics to
 181   // adhere to the Java specification.  The required behaviour is stored in
 182   // this field.
 183   const MemOrd _mo;
 184 


 185 protected:
 186   virtual bool cmp(const Node &n) const;
 187   virtual uint size_of() const; // Size is bigger
 188   // Should LoadNode::Ideal() attempt to remove control edges?
 189   virtual bool can_remove_control() const;
 190   const Type* const _type;      // What kind of value is loaded?
 191 
 192   virtual Node* find_previous_arraycopy(PhaseTransform* phase, Node* ld_alloc, Node*& mem, bool can_see_stored_value) const;
 193 public:
 194 
 195   LoadNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const Type *rt, MemOrd mo, ControlDependency control_dependency)
 196     : MemNode(c,mem,adr,at), _control_dependency(control_dependency), _mo(mo), _type(rt) {
 197     init_class_id(Class_Load);
 198   }
 199   inline bool is_unordered() const { return !is_acquire(); }
 200   inline bool is_acquire() const {
 201     assert(_mo == unordered || _mo == acquire, "unexpected");
 202     return _mo == acquire;
 203   }
 204   inline bool is_unsigned() const {
 205     int lop = Opcode();
 206     return (lop == Op_LoadUB) || (lop == Op_LoadUS);
 207   }
 208 
 209   // Polymorphic factory method:
 210   static Node* make(PhaseGVN& gvn, Node *c, Node *mem, Node *adr,
 211                     const TypePtr* at, const Type *rt, BasicType bt,
 212                     MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest,
 213                     bool unaligned = false, bool mismatched = false, bool unsafe = false);
 214 
 215   virtual uint hash()   const;  // Check the type
 216 


 245     assert(t != NULL, "sanity");
 246     debug_only(uint check_hash = (VerifyHashTableKeys && _hash_lock) ? hash() : NO_HASH);
 247     *(const Type**)&_type = t;   // cast away const-ness
 248     // If this node is in the hash table, make sure it doesn't need a rehash.
 249     assert(check_hash == NO_HASH || check_hash == hash(), "type change must preserve hash code");
 250   }
 251   const Type* type() const { assert(_type != NULL, "sanity"); return _type; };
 252 
 253   // Do not match memory edge
 254   virtual uint match_edge(uint idx) const;
 255 
 256   // Map a load opcode to its corresponding store opcode.
 257   virtual int store_Opcode() const = 0;
 258 
 259   // Check if the load's memory input is a Phi node with the same control.
 260   bool is_instance_field_load_with_local_phi(Node* ctrl);
 261 
 262   Node* convert_to_unsigned_load(PhaseGVN& gvn);
 263   Node* convert_to_signed_load(PhaseGVN& gvn);
 264 








 265 #ifndef PRODUCT
 266   virtual void dump_spec(outputStream *st) const;
 267 #endif
 268 #ifdef ASSERT
 269   // Helper function to allow a raw load without control edge for some cases
 270   static bool is_immutable_value(Node* adr);
 271 #endif
 272 protected:
 273   const Type* load_array_final_field(const TypeKlassPtr *tkls,
 274                                      ciKlass* klass) const;
 275 
 276   Node* can_see_arraycopy_value(Node* st, PhaseGVN* phase) const;
 277 
 278   // depends_only_on_test is almost always true, and needs to be almost always
 279   // true to enable key hoisting & commoning optimizations.  However, for the
 280   // special case of RawPtr loads from TLS top & end, and other loads performed by
 281   // GC barriers, the control edge carries the dependence preventing hoisting past
 282   // a Safepoint instead of the memory edge.  (An unfortunate consequence of having
 283   // Safepoints not set Raw Memory; itself an unfortunate consequence of having Nodes
 284   // which produce results (new raw memory state) inside of loops preventing all


 793   virtual bool      is_CFG() const  { return false; }
 794   virtual const Type *bottom_type() const {return Type::MEMORY;}
 795   virtual const TypePtr *adr_type() const {
 796     Node* ctrl = in(0);
 797     if (ctrl == NULL)  return NULL; // node is dead
 798     return ctrl->in(MemNode::Memory)->adr_type();
 799   }
 800   virtual uint ideal_reg() const { return 0;} // memory projections don't have a register
 801   virtual const Type* Value(PhaseGVN* phase) const;
 802 #ifndef PRODUCT
 803   virtual void dump_spec(outputStream *st) const {};
 804 #endif
 805 };
 806 
 807 //------------------------------LoadStoreNode---------------------------
 808 // Note: is_Mem() method returns 'true' for this class.
 809 class LoadStoreNode : public Node {
 810 private:
 811   const Type* const _type;      // What kind of value is loaded?
 812   const TypePtr* _adr_type;     // What kind of memory is being addressed?

 813   virtual uint size_of() const; // Size is bigger
 814 public:
 815   LoadStoreNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at, const Type* rt, uint required );
 816   virtual bool depends_only_on_test() const { return false; }
 817   virtual uint match_edge(uint idx) const { return idx == MemNode::Address || idx == MemNode::ValueIn; }
 818 
 819   virtual const Type *bottom_type() const { return _type; }
 820   virtual uint ideal_reg() const;
 821   virtual const class TypePtr *adr_type() const { return _adr_type; }  // returns bottom_type of address
 822 
 823   bool result_not_used() const;
 824   MemBarNode* trailing_membar() const;


 825 };
 826 
 827 class LoadStoreConditionalNode : public LoadStoreNode {
 828 public:
 829   enum {
 830     ExpectedIn = MemNode::ValueIn+1 // One more input than MemNode
 831   };
 832   LoadStoreConditionalNode(Node *c, Node *mem, Node *adr, Node *val, Node *ex);
 833 };
 834 
 835 //------------------------------StorePConditionalNode---------------------------
 836 // Conditionally store pointer to memory, if no change since prior
 837 // load-locked.  Sets flags for success or failure of the store.
 838 class StorePConditionalNode : public LoadStoreConditionalNode {
 839 public:
 840   StorePConditionalNode( Node *c, Node *mem, Node *adr, Node *val, Node *ll ) : LoadStoreConditionalNode(c, mem, adr, val, ll) { }
 841   virtual int Opcode() const;
 842   // Produces flags
 843   virtual uint ideal_reg() const { return Op_RegFlags; }
 844 };




 147 #endif
 148 };
 149 
 150 //------------------------------LoadNode---------------------------------------
 151 // Load value; requires Memory and Address
 152 class LoadNode : public MemNode {
 153 public:
 154   // Some loads (from unsafe) should be pinned: they don't depend only
 155   // on the dominating test.  The field _control_dependency below records
 156   // whether that node depends only on the dominating test.
 157   // Methods used to build LoadNodes pass an argument of type enum
 158   // ControlDependency instead of a boolean because those methods
 159   // typically have multiple boolean parameters with default values:
 160   // passing the wrong boolean to one of these parameters by mistake
 161   // goes easily unnoticed. Using an enum, the compiler can check that
 162   // the type of a value and the type of the parameter match.
 163   enum ControlDependency {
 164     Pinned,
 165     DependsOnlyOnTest
 166   };
 167 
 168   enum LoadBarrier {
 169     UnProcessed     = 0,
 170     RequireBarrier  = 1,
 171     WeakBarrier     = 3,  // Inclusive with RequireBarrier
 172     ExpandedBarrier = 4
 173   };
 174 
 175 private:
 176   // LoadNode::hash() doesn't take the _control_dependency field
 177   // into account: If the graph already has a non-pinned LoadNode and
 178   // we add a pinned LoadNode with the same inputs, it's safe for GVN
 179   // to replace the pinned LoadNode with the non-pinned LoadNode,
 180   // otherwise it wouldn't be safe to have a non pinned LoadNode with
 181   // those inputs in the first place. If the graph already has a
 182   // pinned LoadNode and we add a non pinned LoadNode with the same
 183   // inputs, it's safe (but suboptimal) for GVN to replace the
 184   // non-pinned LoadNode by the pinned LoadNode.
 185   ControlDependency _control_dependency;
 186 
 187   // On platforms with weak memory ordering (e.g., PPC, Ia64) we distinguish
 188   // loads that can be reordered, and such requiring acquire semantics to
 189   // adhere to the Java specification.  The required behaviour is stored in
 190   // this field.
 191   const MemOrd _mo;
 192 
 193   uint _barrier; // Bit field with barrier information
 194 
 195 protected:
 196   virtual bool cmp(const Node &n) const;
 197   virtual uint size_of() const; // Size is bigger
 198   // Should LoadNode::Ideal() attempt to remove control edges?
 199   virtual bool can_remove_control() const;
 200   const Type* const _type;      // What kind of value is loaded?
 201 
 202   virtual Node* find_previous_arraycopy(PhaseTransform* phase, Node* ld_alloc, Node*& mem, bool can_see_stored_value) const;
 203 public:
 204 
 205   LoadNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const Type *rt, MemOrd mo, ControlDependency control_dependency)
 206     : MemNode(c,mem,adr,at), _control_dependency(control_dependency), _mo(mo), _barrier(LoadNode::UnProcessed), _type(rt) {
 207     init_class_id(Class_Load);
 208   }
 209   inline bool is_unordered() const { return !is_acquire(); }
 210   inline bool is_acquire() const {
 211     assert(_mo == unordered || _mo == acquire, "unexpected");
 212     return _mo == acquire;
 213   }
 214   inline bool is_unsigned() const {
 215     int lop = Opcode();
 216     return (lop == Op_LoadUB) || (lop == Op_LoadUS);
 217   }
 218 
 219   // Polymorphic factory method:
 220   static Node* make(PhaseGVN& gvn, Node *c, Node *mem, Node *adr,
 221                     const TypePtr* at, const Type *rt, BasicType bt,
 222                     MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest,
 223                     bool unaligned = false, bool mismatched = false, bool unsafe = false);
 224 
 225   virtual uint hash()   const;  // Check the type
 226 


 255     assert(t != NULL, "sanity");
 256     debug_only(uint check_hash = (VerifyHashTableKeys && _hash_lock) ? hash() : NO_HASH);
 257     *(const Type**)&_type = t;   // cast away const-ness
 258     // If this node is in the hash table, make sure it doesn't need a rehash.
 259     assert(check_hash == NO_HASH || check_hash == hash(), "type change must preserve hash code");
 260   }
 261   const Type* type() const { assert(_type != NULL, "sanity"); return _type; };
 262 
 263   // Do not match memory edge
 264   virtual uint match_edge(uint idx) const;
 265 
 266   // Map a load opcode to its corresponding store opcode.
 267   virtual int store_Opcode() const = 0;
 268 
 269   // Check if the load's memory input is a Phi node with the same control.
 270   bool is_instance_field_load_with_local_phi(Node* ctrl);
 271 
 272   Node* convert_to_unsigned_load(PhaseGVN& gvn);
 273   Node* convert_to_signed_load(PhaseGVN& gvn);
 274 
 275   void copy_barrier_info(const Node* n) { _barrier = n->as_Load()->_barrier; }
 276 
 277   bool is_barrier_required()      { return _barrier & RequireBarrier; }  // load has any type of barrier
 278   bool is_barrier_weak()          { return _barrier & WeakBarrier; }     // only some barriers are weak
 279   bool is_barrier_expanded()      { return _barrier & ExpandedBarrier; }
 280   void set_barrier(bool weak)     { weak ? _barrier |= WeakBarrier : _barrier |= RequireBarrier; }
 281   void set_barrier_expanded()     { _barrier |= ExpandedBarrier; }
 282 
 283 #ifndef PRODUCT
 284   virtual void dump_spec(outputStream *st) const;
 285 #endif
 286 #ifdef ASSERT
 287   // Helper function to allow a raw load without control edge for some cases
 288   static bool is_immutable_value(Node* adr);
 289 #endif
 290 protected:
 291   const Type* load_array_final_field(const TypeKlassPtr *tkls,
 292                                      ciKlass* klass) const;
 293 
 294   Node* can_see_arraycopy_value(Node* st, PhaseGVN* phase) const;
 295 
 296   // depends_only_on_test is almost always true, and needs to be almost always
 297   // true to enable key hoisting & commoning optimizations.  However, for the
 298   // special case of RawPtr loads from TLS top & end, and other loads performed by
 299   // GC barriers, the control edge carries the dependence preventing hoisting past
 300   // a Safepoint instead of the memory edge.  (An unfortunate consequence of having
 301   // Safepoints not set Raw Memory; itself an unfortunate consequence of having Nodes
 302   // which produce results (new raw memory state) inside of loops preventing all


 811   virtual bool      is_CFG() const  { return false; }
 812   virtual const Type *bottom_type() const {return Type::MEMORY;}
 813   virtual const TypePtr *adr_type() const {
 814     Node* ctrl = in(0);
 815     if (ctrl == NULL)  return NULL; // node is dead
 816     return ctrl->in(MemNode::Memory)->adr_type();
 817   }
 818   virtual uint ideal_reg() const { return 0;} // memory projections don't have a register
 819   virtual const Type* Value(PhaseGVN* phase) const;
 820 #ifndef PRODUCT
 821   virtual void dump_spec(outputStream *st) const {};
 822 #endif
 823 };
 824 
 825 //------------------------------LoadStoreNode---------------------------
 826 // Note: is_Mem() method returns 'true' for this class.
 827 class LoadStoreNode : public Node {
 828 private:
 829   const Type* const _type;      // What kind of value is loaded?
 830   const TypePtr* _adr_type;     // What kind of memory is being addressed?
 831   bool _has_barrier;
 832   virtual uint size_of() const; // Size is bigger
 833 public:
 834   LoadStoreNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at, const Type* rt, uint required );
 835   virtual bool depends_only_on_test() const { return false; }
 836   virtual uint match_edge(uint idx) const { return idx == MemNode::Address || idx == MemNode::ValueIn; }
 837 
 838   virtual const Type *bottom_type() const { return _type; }
 839   virtual uint ideal_reg() const;
 840   virtual const class TypePtr *adr_type() const { return _adr_type; }  // returns bottom_type of address
 841 
 842   bool result_not_used() const;
 843   MemBarNode* trailing_membar() const;
 844   void set_has_barrier() { _has_barrier = true; };
 845   bool has_barrier() const { return _has_barrier; };
 846 };
 847 
 848 class LoadStoreConditionalNode : public LoadStoreNode {
 849 public:
 850   enum {
 851     ExpectedIn = MemNode::ValueIn+1 // One more input than MemNode
 852   };
 853   LoadStoreConditionalNode(Node *c, Node *mem, Node *adr, Node *val, Node *ex);
 854 };
 855 
 856 //------------------------------StorePConditionalNode---------------------------
 857 // Conditionally store pointer to memory, if no change since prior
 858 // load-locked.  Sets flags for success or failure of the store.
 859 class StorePConditionalNode : public LoadStoreConditionalNode {
 860 public:
 861   StorePConditionalNode( Node *c, Node *mem, Node *adr, Node *val, Node *ll ) : LoadStoreConditionalNode(c, mem, adr, val, ll) { }
 862   virtual int Opcode() const;
 863   // Produces flags
 864   virtual uint ideal_reg() const { return Op_RegFlags; }
 865 };


< prev index next >