< prev index next >

src/share/vm/opto/memnode.hpp

Print this page




 569   // zero out the control input.
 570   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 571 
 572   // Compute a new Type for this node.  Basically we just do the pre-check,
 573   // then call the virtual add() to set the type.
 574   virtual const Type *Value( PhaseTransform *phase ) const;
 575 
 576   // Check for identity function on memory (Load then Store at same address)
 577   virtual Node *Identity( PhaseTransform *phase );
 578 
 579   // Do not match memory edge
 580   virtual uint match_edge(uint idx) const;
 581 
 582   virtual const Type *bottom_type() const;  // returns Type::MEMORY
 583 
 584   // Map a store opcode to its corresponding own opcode, trivially.
 585   virtual int store_Opcode() const { return Opcode(); }
 586 
 587   // have all possible loads of the value stored been optimized away?
 588   bool value_never_loaded(PhaseTransform *phase) const;


 589 };
 590 
 591 //------------------------------StoreBNode-------------------------------------
 592 // Store byte to memory
 593 class StoreBNode : public StoreNode {
 594 public:
 595   StoreBNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
 596     : StoreNode(c, mem, adr, at, val, mo) {}
 597   virtual int Opcode() const;
 598   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 599   virtual BasicType memory_type() const { return T_BYTE; }
 600 };
 601 
 602 //------------------------------StoreCNode-------------------------------------
 603 // Store char/short to memory
 604 class StoreCNode : public StoreNode {
 605 public:
 606   StoreCNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
 607     : StoreNode(c, mem, adr, at, val, mo) {}
 608   virtual int Opcode() const;


 774 #endif
 775 };
 776 
 777 //------------------------------LoadStoreNode---------------------------
 778 // Note: is_Mem() method returns 'true' for this class.
 779 class LoadStoreNode : public Node {
 780 private:
 781   const Type* const _type;      // What kind of value is loaded?
 782   const TypePtr* _adr_type;     // What kind of memory is being addressed?
 783   virtual uint size_of() const; // Size is bigger
 784 public:
 785   LoadStoreNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at, const Type* rt, uint required );
 786   virtual bool depends_only_on_test() const { return false; }
 787   virtual uint match_edge(uint idx) const { return idx == MemNode::Address || idx == MemNode::ValueIn; }
 788 
 789   virtual const Type *bottom_type() const { return _type; }
 790   virtual uint ideal_reg() const;
 791   virtual const class TypePtr *adr_type() const { return _adr_type; }  // returns bottom_type of address
 792 
 793   bool result_not_used() const;

 794 };
 795 
 796 class LoadStoreConditionalNode : public LoadStoreNode {
 797 public:
 798   enum {
 799     ExpectedIn = MemNode::ValueIn+1 // One more input than MemNode
 800   };
 801   LoadStoreConditionalNode(Node *c, Node *mem, Node *adr, Node *val, Node *ex);
 802 };
 803 
 804 //------------------------------StorePConditionalNode---------------------------
 805 // Conditionally store pointer to memory, if no change since prior
 806 // load-locked.  Sets flags for success or failure of the store.
 807 class StorePConditionalNode : public LoadStoreConditionalNode {
 808 public:
 809   StorePConditionalNode( Node *c, Node *mem, Node *adr, Node *val, Node *ll ) : LoadStoreConditionalNode(c, mem, adr, val, ll) { }
 810   virtual int Opcode() const;
 811   // Produces flags
 812   virtual uint ideal_reg() const { return Op_RegFlags; }
 813 };


1027   virtual const Type *Value(PhaseTransform *phase) const;
1028 };
1029 
1030 //------------------------------MemBar-----------------------------------------
1031 // There are different flavors of Memory Barriers to match the Java Memory
1032 // Model.  Monitor-enter and volatile-load act as Aquires: no following ref
1033 // can be moved to before them.  We insert a MemBar-Acquire after a FastLock or
1034 // volatile-load.  Monitor-exit and volatile-store act as Release: no
1035 // preceding ref can be moved to after them.  We insert a MemBar-Release
1036 // before a FastUnlock or volatile-store.  All volatiles need to be
1037 // serialized, so we follow all volatile-stores with a MemBar-Volatile to
1038 // separate it from any following volatile-load.
1039 class MemBarNode: public MultiNode {
1040   virtual uint hash() const ;                  // { return NO_HASH; }
1041   virtual uint cmp( const Node &n ) const ;    // Always fail, except on self
1042 
1043   virtual uint size_of() const { return sizeof(*this); }
1044   // Memory type this node is serializing.  Usually either rawptr or bottom.
1045   const TypePtr* _adr_type;
1046 














1047 public:
1048   enum {
1049     Precedent = TypeFunc::Parms  // optional edge to force precedence
1050   };
1051   MemBarNode(Compile* C, int alias_idx, Node* precedent);
1052   virtual int Opcode() const = 0;
1053   virtual const class TypePtr *adr_type() const { return _adr_type; }
1054   virtual const Type *Value( PhaseTransform *phase ) const;
1055   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
1056   virtual uint match_edge(uint idx) const { return 0; }
1057   virtual const Type *bottom_type() const { return TypeTuple::MEMBAR; }
1058   virtual Node *match( const ProjNode *proj, const Matcher *m );
1059   // Factory method.  Builds a wide or narrow membar.
1060   // Optional 'precedent' becomes an extra edge if not null.
1061   static MemBarNode* make(Compile* C, int opcode,
1062                           int alias_idx = Compile::AliasIdxBot,
1063                           Node* precedent = NULL);


















1064 };
1065 
1066 // "Acquire" - no following ref can move before (but earlier refs can
1067 // follow, like an early Load stalled in cache).  Requires multi-cpu
1068 // visibility.  Inserted after a volatile load.
1069 class MemBarAcquireNode: public MemBarNode {
1070 public:
1071   MemBarAcquireNode(Compile* C, int alias_idx, Node* precedent)
1072     : MemBarNode(C, alias_idx, precedent) {}
1073   virtual int Opcode() const;
1074 };
1075 
1076 // "Acquire" - no following ref can move before (but earlier refs can
1077 // follow, like an early Load stalled in cache).  Requires multi-cpu
1078 // visibility.  Inserted independ of any load, as required
1079 // for intrinsic sun.misc.Unsafe.loadFence().
1080 class LoadFenceNode: public MemBarNode {
1081 public:
1082   LoadFenceNode(Compile* C, int alias_idx, Node* precedent)
1083     : MemBarNode(C, alias_idx, precedent) {}




 569   // zero out the control input.
 570   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 571 
 572   // Compute a new Type for this node.  Basically we just do the pre-check,
 573   // then call the virtual add() to set the type.
 574   virtual const Type *Value( PhaseTransform *phase ) const;
 575 
 576   // Check for identity function on memory (Load then Store at same address)
 577   virtual Node *Identity( PhaseTransform *phase );
 578 
 579   // Do not match memory edge
 580   virtual uint match_edge(uint idx) const;
 581 
 582   virtual const Type *bottom_type() const;  // returns Type::MEMORY
 583 
 584   // Map a store opcode to its corresponding own opcode, trivially.
 585   virtual int store_Opcode() const { return Opcode(); }
 586 
 587   // have all possible loads of the value stored been optimized away?
 588   bool value_never_loaded(PhaseTransform *phase) const;
 589 
 590   MemBarNode* trailing_membar() const;
 591 };
 592 
 593 //------------------------------StoreBNode-------------------------------------
 594 // Store byte to memory
 595 class StoreBNode : public StoreNode {
 596 public:
 597   StoreBNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
 598     : StoreNode(c, mem, adr, at, val, mo) {}
 599   virtual int Opcode() const;
 600   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 601   virtual BasicType memory_type() const { return T_BYTE; }
 602 };
 603 
 604 //------------------------------StoreCNode-------------------------------------
 605 // Store char/short to memory
 606 class StoreCNode : public StoreNode {
 607 public:
 608   StoreCNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
 609     : StoreNode(c, mem, adr, at, val, mo) {}
 610   virtual int Opcode() const;


 776 #endif
 777 };
 778 
 779 //------------------------------LoadStoreNode---------------------------
 780 // Note: is_Mem() method returns 'true' for this class.
 781 class LoadStoreNode : public Node {
 782 private:
 783   const Type* const _type;      // What kind of value is loaded?
 784   const TypePtr* _adr_type;     // What kind of memory is being addressed?
 785   virtual uint size_of() const; // Size is bigger
 786 public:
 787   LoadStoreNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at, const Type* rt, uint required );
 788   virtual bool depends_only_on_test() const { return false; }
 789   virtual uint match_edge(uint idx) const { return idx == MemNode::Address || idx == MemNode::ValueIn; }
 790 
 791   virtual const Type *bottom_type() const { return _type; }
 792   virtual uint ideal_reg() const;
 793   virtual const class TypePtr *adr_type() const { return _adr_type; }  // returns bottom_type of address
 794 
 795   bool result_not_used() const;
 796   MemBarNode* trailing_membar() const;
 797 };
 798 
 799 class LoadStoreConditionalNode : public LoadStoreNode {
 800 public:
 801   enum {
 802     ExpectedIn = MemNode::ValueIn+1 // One more input than MemNode
 803   };
 804   LoadStoreConditionalNode(Node *c, Node *mem, Node *adr, Node *val, Node *ex);
 805 };
 806 
 807 //------------------------------StorePConditionalNode---------------------------
 808 // Conditionally store pointer to memory, if no change since prior
 809 // load-locked.  Sets flags for success or failure of the store.
 810 class StorePConditionalNode : public LoadStoreConditionalNode {
 811 public:
 812   StorePConditionalNode( Node *c, Node *mem, Node *adr, Node *val, Node *ll ) : LoadStoreConditionalNode(c, mem, adr, val, ll) { }
 813   virtual int Opcode() const;
 814   // Produces flags
 815   virtual uint ideal_reg() const { return Op_RegFlags; }
 816 };


1030   virtual const Type *Value(PhaseTransform *phase) const;
1031 };
1032 
1033 //------------------------------MemBar-----------------------------------------
1034 // There are different flavors of Memory Barriers to match the Java Memory
1035 // Model.  Monitor-enter and volatile-load act as Aquires: no following ref
1036 // can be moved to before them.  We insert a MemBar-Acquire after a FastLock or
1037 // volatile-load.  Monitor-exit and volatile-store act as Release: no
1038 // preceding ref can be moved to after them.  We insert a MemBar-Release
1039 // before a FastUnlock or volatile-store.  All volatiles need to be
1040 // serialized, so we follow all volatile-stores with a MemBar-Volatile to
1041 // separate it from any following volatile-load.
1042 class MemBarNode: public MultiNode {
1043   virtual uint hash() const ;                  // { return NO_HASH; }
1044   virtual uint cmp( const Node &n ) const ;    // Always fail, except on self
1045 
1046   virtual uint size_of() const { return sizeof(*this); }
1047   // Memory type this node is serializing.  Usually either rawptr or bottom.
1048   const TypePtr* _adr_type;
1049 
1050   // How is this membar related to a nearby memory access?
1051   enum {
1052     Standalone,
1053     TrailingLoad,
1054     TrailingStore,
1055     LeadingStore,
1056     TrailingLoadStore,
1057     LeadingLoadStore
1058   } _kind;
1059 
1060 #ifdef ASSERT
1061   uint _pair_idx;
1062 #endif
1063 
1064 public:
1065   enum {
1066     Precedent = TypeFunc::Parms  // optional edge to force precedence
1067   };
1068   MemBarNode(Compile* C, int alias_idx, Node* precedent);
1069   virtual int Opcode() const = 0;
1070   virtual const class TypePtr *adr_type() const { return _adr_type; }
1071   virtual const Type *Value( PhaseTransform *phase ) const;
1072   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
1073   virtual uint match_edge(uint idx) const { return 0; }
1074   virtual const Type *bottom_type() const { return TypeTuple::MEMBAR; }
1075   virtual Node *match( const ProjNode *proj, const Matcher *m );
1076   // Factory method.  Builds a wide or narrow membar.
1077   // Optional 'precedent' becomes an extra edge if not null.
1078   static MemBarNode* make(Compile* C, int opcode,
1079                           int alias_idx = Compile::AliasIdxBot,
1080                           Node* precedent = NULL);
1081 
1082   MemBarNode* trailing_membar() const;
1083   MemBarNode* leading_membar() const;
1084 
1085   void set_trailing_load() { _kind = TrailingLoad; }
1086   bool trailing_load() const { return _kind == TrailingLoad; }
1087   bool trailing_store() const { return _kind == TrailingStore; }
1088   bool leading_store() const { return _kind == LeadingStore; }
1089   bool trailing_load_store() const { return _kind == TrailingLoadStore; }
1090   bool leading_load_store() const { return _kind == LeadingLoadStore; }
1091   bool trailing() const { return _kind == TrailingLoad || _kind == TrailingStore || _kind == TrailingLoadStore; }
1092   bool leading() const { return _kind == LeadingStore || _kind == LeadingLoadStore; }
1093   bool standalone() const { return _kind == Standalone; }
1094 
1095   static void set_store_pair(MemBarNode* leading, MemBarNode* trailing);
1096   static void set_load_store_pair(MemBarNode* leading, MemBarNode* trailing);
1097 
1098   void remove(PhaseIterGVN *igvn);
1099 };
1100 
1101 // "Acquire" - no following ref can move before (but earlier refs can
1102 // follow, like an early Load stalled in cache).  Requires multi-cpu
1103 // visibility.  Inserted after a volatile load.
1104 class MemBarAcquireNode: public MemBarNode {
1105 public:
1106   MemBarAcquireNode(Compile* C, int alias_idx, Node* precedent)
1107     : MemBarNode(C, alias_idx, precedent) {}
1108   virtual int Opcode() const;
1109 };
1110 
1111 // "Acquire" - no following ref can move before (but earlier refs can
1112 // follow, like an early Load stalled in cache).  Requires multi-cpu
1113 // visibility.  Inserted independ of any load, as required
1114 // for intrinsic sun.misc.Unsafe.loadFence().
1115 class LoadFenceNode: public MemBarNode {
1116 public:
1117   LoadFenceNode(Compile* C, int alias_idx, Node* precedent)
1118     : MemBarNode(C, alias_idx, precedent) {}


< prev index next >