44 const DecoratorSet C2_CONTROL_DEPENDENT_LOAD = DECORATOR_LAST << 4;
45 // This denotes that a load that must be pinned.
46 const DecoratorSet C2_PINNED_LOAD = DECORATOR_LAST << 5;
47 // This denotes that the access is produced from the sun.misc.Unsafe intrinsics.
48 const DecoratorSet C2_UNSAFE_ACCESS = DECORATOR_LAST << 6;
49 // This denotes that the access mutates state.
50 const DecoratorSet C2_WRITE_ACCESS = DECORATOR_LAST << 7;
51 // This denotes that the access reads state.
52 const DecoratorSet C2_READ_ACCESS = DECORATOR_LAST << 8;
53 // A nearby allocation?
54 const DecoratorSet C2_TIGHLY_COUPLED_ALLOC = DECORATOR_LAST << 9;
55 // Loads and stores from an arraycopy being optimized
56 const DecoratorSet C2_ARRAY_COPY = DECORATOR_LAST << 10;
57
58 class Compile;
59 class ConnectionGraph;
60 class GraphKit;
61 class IdealKit;
62 class Node;
63 class PhaseGVN;
64 class PhaseMacroExpand;
65 class Type;
66 class TypePtr;
67 class Unique_Node_List;
68
69 // This class wraps a node and a type.
70 class C2AccessValue: public StackObj {
71 protected:
72 Node* _node;
73 const Type* _type;
74
75 public:
76 C2AccessValue(Node* node, const Type* type) :
77 _node(node),
78 _type(type) {}
79
80 Node* node() const { return _node; }
81 const Type* type() const { return _type; }
82
83 void set_node(Node* node) { _node = node; }
292
293 virtual bool flatten_gc_alias_type(const TypePtr*& adr_type) const { return false; }
294 #ifdef ASSERT
295 virtual bool verify_gc_alias_type(const TypePtr* adr_type, int offset) const { return false; }
296 #endif
297
298 virtual bool final_graph_reshaping(Compile* compile, Node* n, uint opcode) const { return false; }
299
300 virtual bool escape_add_to_con_graph(ConnectionGraph* conn_graph, PhaseGVN* gvn, Unique_Node_List* delayed_worklist, Node* n, uint opcode) const { return false; }
301 virtual bool escape_add_final_edges(ConnectionGraph* conn_graph, PhaseGVN* gvn, Node* n, uint opcode) const { return false; }
302 virtual bool escape_has_out_with_unsafe_object(Node* n) const { return false; }
303 virtual bool escape_is_barrier_node(Node* n) const { return false; }
304
305 virtual bool matcher_find_shared_visit(Matcher* matcher, Matcher::MStack& mstack, Node* n, uint opcode, bool& mem_op, int& mem_addr_idx) const { return false; };
306 virtual bool matcher_find_shared_post_visit(Matcher* matcher, Node* n, uint opcode) const { return false; };
307 virtual bool matcher_is_store_load_barrier(Node* x, uint xop) const { return false; }
308
309 virtual void igvn_add_users_to_worklist(PhaseIterGVN* igvn, Node* use) const {}
310 virtual void ccp_analyze(PhaseCCP* ccp, Unique_Node_List& worklist, Node* use) const {}
311
312 };
313
314 #endif // SHARE_GC_SHARED_C2_BARRIERSETC2_HPP
|
44 const DecoratorSet C2_CONTROL_DEPENDENT_LOAD = DECORATOR_LAST << 4;
45 // This denotes that a load that must be pinned.
46 const DecoratorSet C2_PINNED_LOAD = DECORATOR_LAST << 5;
47 // This denotes that the access is produced from the sun.misc.Unsafe intrinsics.
48 const DecoratorSet C2_UNSAFE_ACCESS = DECORATOR_LAST << 6;
49 // This denotes that the access mutates state.
50 const DecoratorSet C2_WRITE_ACCESS = DECORATOR_LAST << 7;
51 // This denotes that the access reads state.
52 const DecoratorSet C2_READ_ACCESS = DECORATOR_LAST << 8;
53 // A nearby allocation?
54 const DecoratorSet C2_TIGHLY_COUPLED_ALLOC = DECORATOR_LAST << 9;
55 // Loads and stores from an arraycopy being optimized
56 const DecoratorSet C2_ARRAY_COPY = DECORATOR_LAST << 10;
57
58 class Compile;
59 class ConnectionGraph;
60 class GraphKit;
61 class IdealKit;
62 class Node;
63 class PhaseGVN;
64 class PhaseIdealLoop;
65 class PhaseMacroExpand;
66 class Type;
67 class TypePtr;
68 class Unique_Node_List;
69
70 // This class wraps a node and a type.
71 class C2AccessValue: public StackObj {
72 protected:
73 Node* _node;
74 const Type* _type;
75
76 public:
77 C2AccessValue(Node* node, const Type* type) :
78 _node(node),
79 _type(type) {}
80
81 Node* node() const { return _node; }
82 const Type* type() const { return _type; }
83
84 void set_node(Node* node) { _node = node; }
293
294 virtual bool flatten_gc_alias_type(const TypePtr*& adr_type) const { return false; }
295 #ifdef ASSERT
296 virtual bool verify_gc_alias_type(const TypePtr* adr_type, int offset) const { return false; }
297 #endif
298
299 virtual bool final_graph_reshaping(Compile* compile, Node* n, uint opcode) const { return false; }
300
301 virtual bool escape_add_to_con_graph(ConnectionGraph* conn_graph, PhaseGVN* gvn, Unique_Node_List* delayed_worklist, Node* n, uint opcode) const { return false; }
302 virtual bool escape_add_final_edges(ConnectionGraph* conn_graph, PhaseGVN* gvn, Node* n, uint opcode) const { return false; }
303 virtual bool escape_has_out_with_unsafe_object(Node* n) const { return false; }
304 virtual bool escape_is_barrier_node(Node* n) const { return false; }
305
306 virtual bool matcher_find_shared_visit(Matcher* matcher, Matcher::MStack& mstack, Node* n, uint opcode, bool& mem_op, int& mem_addr_idx) const { return false; };
307 virtual bool matcher_find_shared_post_visit(Matcher* matcher, Node* n, uint opcode) const { return false; };
308 virtual bool matcher_is_store_load_barrier(Node* x, uint xop) const { return false; }
309
310 virtual void igvn_add_users_to_worklist(PhaseIterGVN* igvn, Node* use) const {}
311 virtual void ccp_analyze(PhaseCCP* ccp, Unique_Node_List& worklist, Node* use) const {}
312
313 virtual Node* split_if_pre(PhaseIdealLoop* phase, Node* n) const { return NULL; }
314 };
315
316 #endif // SHARE_GC_SHARED_C2_BARRIERSETC2_HPP
|