< prev index next >

src/share/vm/opto/memnode.hpp

Print this page

        

*** 584,593 **** --- 584,595 ---- // Map a store opcode to its corresponding own opcode, trivially. virtual int store_Opcode() const { return Opcode(); } // have all possible loads of the value stored been optimized away? bool value_never_loaded(PhaseTransform *phase) const; + + MemBarNode* trailing_membar() const; }; //------------------------------StoreBNode------------------------------------- // Store byte to memory class StoreBNode : public StoreNode {
*** 789,798 **** --- 791,801 ---- virtual const Type *bottom_type() const { return _type; } virtual uint ideal_reg() const; virtual const class TypePtr *adr_type() const { return _adr_type; } // returns bottom_type of address bool result_not_used() const; + MemBarNode* trailing_membar() const; }; class LoadStoreConditionalNode : public LoadStoreNode { public: enum {
*** 1042,1051 **** --- 1045,1068 ---- virtual uint size_of() const { return sizeof(*this); } // Memory type this node is serializing. Usually either rawptr or bottom. const TypePtr* _adr_type; + // How is this membar related to a nearby memory access? + enum { + Standalone, + TrailingLoad, + TrailingStore, + LeadingStore, + TrailingLoadStore, + LeadingLoadStore + } _kind; + + #ifdef ASSERT + uint _pair_idx; + #endif + public: enum { Precedent = TypeFunc::Parms // optional edge to force precedence }; MemBarNode(Compile* C, int alias_idx, Node* precedent);
*** 1059,1068 **** --- 1076,1103 ---- // Factory method. Builds a wide or narrow membar. // Optional 'precedent' becomes an extra edge if not null. static MemBarNode* make(Compile* C, int opcode, int alias_idx = Compile::AliasIdxBot, Node* precedent = NULL); + + MemBarNode* trailing_membar() const; + MemBarNode* leading_membar() const; + + void set_trailing_load() { _kind = TrailingLoad; } + bool trailing_load() const { return _kind == TrailingLoad; } + bool trailing_store() const { return _kind == TrailingStore; } + bool leading_store() const { return _kind == LeadingStore; } + bool trailing_load_store() const { return _kind == TrailingLoadStore; } + bool leading_load_store() const { return _kind == LeadingLoadStore; } + bool trailing() const { return _kind == TrailingLoad || _kind == TrailingStore || _kind == TrailingLoadStore; } + bool leading() const { return _kind == LeadingStore || _kind == LeadingLoadStore; } + bool standalone() const { return _kind == Standalone; } + + static void set_store_pair(MemBarNode* leading, MemBarNode* trailing); + static void set_load_store_pair(MemBarNode* leading, MemBarNode* trailing); + + void remove(PhaseIterGVN *igvn); }; // "Acquire" - no following ref can move before (but earlier refs can // follow, like an early Load stalled in cache). Requires multi-cpu // visibility. Inserted after a volatile load.
< prev index next >