< prev index next >

src/hotspot/share/opto/memnode.hpp

Print this page

        

*** 41,50 **** --- 41,52 ---- class MemNode : public Node { private: bool _unaligned_access; // Unaligned access from unsafe bool _mismatched_access; // Mismatched access from unsafe: byte read in integer array for instance bool _unsafe_access; // Access of unsafe origin. + uint8_t _barrier; // Bit field with barrier information + protected: #ifdef ASSERT const TypePtr* _adr_type; // What kind of memory is being addressed? #endif virtual uint size_of() const;
*** 60,81 **** release, // Store has to release or be preceded by MemBarRelease. seqcst, // LoadStore has to have both acquire and release semantics. unset // The memory ordering is not set (used for testing) } MemOrd; protected: ! MemNode( Node *c0, Node *c1, Node *c2, const TypePtr* at ) ! : Node(c0,c1,c2 ), _unaligned_access(false), _mismatched_access(false), _unsafe_access(false) { init_class_id(Class_Mem); debug_only(_adr_type=at; adr_type();) } ! MemNode( Node *c0, Node *c1, Node *c2, const TypePtr* at, Node *c3 ) ! : Node(c0,c1,c2,c3), _unaligned_access(false), _mismatched_access(false), _unsafe_access(false) { init_class_id(Class_Mem); debug_only(_adr_type=at; adr_type();) } ! MemNode( Node *c0, Node *c1, Node *c2, const TypePtr* at, Node *c3, Node *c4) ! : Node(c0,c1,c2,c3,c4), _unaligned_access(false), _mismatched_access(false), _unsafe_access(false) { init_class_id(Class_Mem); debug_only(_adr_type=at; adr_type();) } virtual Node* find_previous_arraycopy(PhaseTransform* phase, Node* ld_alloc, Node*& mem, bool can_see_stored_value) const { return NULL; } --- 62,95 ---- release, // Store has to release or be preceded by MemBarRelease. seqcst, // LoadStore has to have both acquire and release semantics. unset // The memory ordering is not set (used for testing) } MemOrd; protected: ! MemNode( Node *c0, Node *c1, Node *c2, const TypePtr* at ) : ! Node(c0,c1,c2), ! _unaligned_access(false), ! _mismatched_access(false), ! _unsafe_access(false), ! _barrier(0) { init_class_id(Class_Mem); debug_only(_adr_type=at; adr_type();) } ! MemNode( Node *c0, Node *c1, Node *c2, const TypePtr* at, Node *c3 ) : ! Node(c0,c1,c2,c3), ! _unaligned_access(false), ! _mismatched_access(false), ! _unsafe_access(false), ! _barrier(0) { init_class_id(Class_Mem); debug_only(_adr_type=at; adr_type();) } ! MemNode( Node *c0, Node *c1, Node *c2, const TypePtr* at, Node *c3, Node *c4) : ! Node(c0,c1,c2,c3,c4), ! _unaligned_access(false), ! _mismatched_access(false), ! _unsafe_access(false), ! _barrier(0) { init_class_id(Class_Mem); debug_only(_adr_type=at; adr_type();) } virtual Node* find_previous_arraycopy(PhaseTransform* phase, Node* ld_alloc, Node*& mem, bool can_see_stored_value) const { return NULL; }
*** 123,132 **** --- 137,149 ---- #else return type2aelembytes(memory_type()); #endif } + uint8_t barrier_data() { return _barrier; } + void set_barrier_data(uint8_t barrier_data) { _barrier = barrier_data; } + // Search through memory states which precede this node (load or store). // Look for an exact match for the address, with no intervening // aliased stores. Node* find_previous_store(PhaseTransform* phase);
*** 179,190 **** // loads that can be reordered, and such requiring acquire semantics to // adhere to the Java specification. The required behaviour is stored in // this field. const MemOrd _mo; - uint _barrier; // Bit field with barrier information - AllocateNode* is_new_object_mark_load(PhaseGVN *phase) const; protected: virtual bool cmp(const Node &n) const; virtual uint size_of() const; // Size is bigger --- 196,205 ----
*** 194,204 **** virtual Node* find_previous_arraycopy(PhaseTransform* phase, Node* ld_alloc, Node*& mem, bool can_see_stored_value) const; public: LoadNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const Type *rt, MemOrd mo, ControlDependency control_dependency) ! : MemNode(c,mem,adr,at), _control_dependency(control_dependency), _mo(mo), _barrier(0), _type(rt) { init_class_id(Class_Load); } inline bool is_unordered() const { return !is_acquire(); } inline bool is_acquire() const { assert(_mo == unordered || _mo == acquire, "unexpected"); --- 209,219 ---- virtual Node* find_previous_arraycopy(PhaseTransform* phase, Node* ld_alloc, Node*& mem, bool can_see_stored_value) const; public: LoadNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const Type *rt, MemOrd mo, ControlDependency control_dependency) ! : MemNode(c,mem,adr,at), _control_dependency(control_dependency), _mo(mo), _type(rt) { init_class_id(Class_Load); } inline bool is_unordered() const { return !is_acquire(); } inline bool is_acquire() const { assert(_mo == unordered || _mo == acquire, "unexpected");
*** 263,276 **** bool is_instance_field_load_with_local_phi(Node* ctrl); Node* convert_to_unsigned_load(PhaseGVN& gvn); Node* convert_to_signed_load(PhaseGVN& gvn); - void copy_barrier_info(const Node* src) { _barrier = src->as_Load()->_barrier; } - uint barrier_data() { return _barrier; } - void set_barrier_data(uint barrier_data) { _barrier |= barrier_data; } - void pin() { _control_dependency = Pinned; } bool has_unknown_control_dependency() const { return _control_dependency == UnknownControl; } #ifndef PRODUCT virtual void dump_spec(outputStream *st) const; --- 278,287 ----
*** 818,828 **** // Note: is_Mem() method returns 'true' for this class. class LoadStoreNode : public Node { private: const Type* const _type; // What kind of value is loaded? const TypePtr* _adr_type; // What kind of memory is being addressed? ! bool _has_barrier; virtual uint size_of() const; // Size is bigger public: LoadStoreNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at, const Type* rt, uint required ); virtual bool depends_only_on_test() const { return false; } virtual uint match_edge(uint idx) const { return idx == MemNode::Address || idx == MemNode::ValueIn; } --- 829,839 ---- // Note: is_Mem() method returns 'true' for this class. class LoadStoreNode : public Node { private: const Type* const _type; // What kind of value is loaded? const TypePtr* _adr_type; // What kind of memory is being addressed? ! uint8_t _barrier; // Bit field with barrier information virtual uint size_of() const; // Size is bigger public: LoadStoreNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at, const Type* rt, uint required ); virtual bool depends_only_on_test() const { return false; } virtual uint match_edge(uint idx) const { return idx == MemNode::Address || idx == MemNode::ValueIn; }
*** 831,842 **** virtual uint ideal_reg() const; virtual const class TypePtr *adr_type() const { return _adr_type; } // returns bottom_type of address bool result_not_used() const; MemBarNode* trailing_membar() const; ! void set_has_barrier() { _has_barrier = true; }; ! bool has_barrier() const { return _has_barrier; }; }; class LoadStoreConditionalNode : public LoadStoreNode { public: enum { --- 842,854 ---- virtual uint ideal_reg() const; virtual const class TypePtr *adr_type() const { return _adr_type; } // returns bottom_type of address bool result_not_used() const; MemBarNode* trailing_membar() const; ! ! uint8_t barrier_data() { return _barrier; } ! void set_barrier_data(uint8_t barrier_data) { _barrier = barrier_data; } }; class LoadStoreConditionalNode : public LoadStoreNode { public: enum {
*** 884,893 **** --- 896,906 ---- public: CompareAndSwapNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : LoadStoreConditionalNode(c, mem, adr, val, ex), _mem_ord(mem_ord) {} MemNode::MemOrd order() const { return _mem_ord; } + virtual uint size_of() const { return sizeof(*this); } }; class CompareAndExchangeNode : public LoadStoreNode { private: const MemNode::MemOrd _mem_ord;
*** 901,910 **** --- 914,924 ---- } MemNode::MemOrd order() const { return _mem_ord; } + virtual uint size_of() const { return sizeof(*this); } }; //------------------------------CompareAndSwapBNode--------------------------- class CompareAndSwapBNode : public CompareAndSwapNode { public:
< prev index next >