24
25 #include "precompiled.hpp"
26 #include "memory/allocation.inline.hpp"
27 #include "memory/resourceArea.hpp"
28 #include "opto/ad.hpp"
29 #include "opto/addnode.hpp"
30 #include "opto/callnode.hpp"
31 #include "opto/idealGraphPrinter.hpp"
32 #include "opto/matcher.hpp"
33 #include "opto/memnode.hpp"
34 #include "opto/movenode.hpp"
35 #include "opto/opcodes.hpp"
36 #include "opto/regmask.hpp"
37 #include "opto/rootnode.hpp"
38 #include "opto/runtime.hpp"
39 #include "opto/type.hpp"
40 #include "opto/vectornode.hpp"
41 #include "runtime/os.hpp"
42 #include "runtime/sharedRuntime.hpp"
43 #include "utilities/align.hpp"
44
45 OptoReg::Name OptoReg::c_frame_pointer;
46
47 const RegMask *Matcher::idealreg2regmask[_last_machine_leaf];
48 RegMask Matcher::mreg2regmask[_last_Mach_Reg];
49 RegMask Matcher::STACK_ONLY_mask;
50 RegMask Matcher::c_frame_ptr_mask;
51 const uint Matcher::_begin_rematerialize = _BEGIN_REMATERIALIZE;
52 const uint Matcher::_end_rematerialize = _END_REMATERIALIZE;
53
54 //---------------------------Matcher-------------------------------------------
55 Matcher::Matcher()
56 : PhaseTransform( Phase::Ins_Select ),
57 #ifdef ASSERT
58 _old2new_map(C->comp_arena()),
59 _new2old_map(C->comp_arena()),
60 #endif
61 _shared_nodes(C->comp_arena()),
62 _reduceOp(reduceOp), _leftOp(leftOp), _rightOp(rightOp),
63 _swallowed(swallowed),
2045 Node_State nstate = mstack.state();
2046 uint nop = n->Opcode();
2047 if (nstate == Pre_Visit) {
2048 if (address_visited.test(n->_idx)) { // Visited in address already?
2049 // Flag as visited and shared now.
2050 set_visited(n);
2051 }
2052 if (is_visited(n)) { // Visited already?
2053 // Node is shared and has no reason to clone. Flag it as shared.
2054 // This causes it to match into a register for the sharing.
2055 set_shared(n); // Flag as shared and
2056 mstack.pop(); // remove node from stack
2057 continue;
2058 }
2059 nstate = Visit; // Not already visited; so visit now
2060 }
2061 if (nstate == Visit) {
2062 mstack.set_state(Post_Visit);
2063 set_visited(n); // Flag as visited now
2064 bool mem_op = false;
2065
2066 switch( nop ) { // Handle some opcodes special
2067 case Op_Phi: // Treat Phis as shared roots
2068 case Op_Parm:
2069 case Op_Proj: // All handled specially during matching
2070 case Op_SafePointScalarObject:
2071 set_shared(n);
2072 set_dontcare(n);
2073 break;
2074 case Op_If:
2075 case Op_CountedLoopEnd:
2076 mstack.set_state(Alt_Post_Visit); // Alternative way
2077 // Convert (If (Bool (CmpX A B))) into (If (Bool) (CmpX A B)). Helps
2078 // with matching cmp/branch in 1 instruction. The Matcher needs the
2079 // Bool and CmpX side-by-side, because it can only get at constants
2080 // that are at the leaves of Match trees, and the Bool's condition acts
2081 // as a constant here.
2082 mstack.push(n->in(1), Visit); // Clone the Bool
2083 mstack.push(n->in(0), Pre_Visit); // Visit control input
2084 continue; // while (mstack.is_nonempty())
2133 if (tp->_ptr == TypePtr::AnyNull) {
2134 tn->set_type(TypePtr::NULL_PTR);
2135 }
2136 break;
2137 }
2138 case Op_ConN: { // Convert narrow pointers above the centerline to NUL
2139 TypeNode *tn = n->as_Type(); // Constants derive from type nodes
2140 const TypePtr* tp = tn->type()->make_ptr();
2141 if (tp && tp->_ptr == TypePtr::AnyNull) {
2142 tn->set_type(TypeNarrowOop::NULL_PTR);
2143 }
2144 break;
2145 }
2146 case Op_Binary: // These are introduced in the Post_Visit state.
2147 ShouldNotReachHere();
2148 break;
2149 case Op_ClearArray:
2150 case Op_SafePoint:
2151 mem_op = true;
2152 break;
2153 default:
2154 if( n->is_Store() ) {
2155 // Do match stores, despite no ideal reg
2156 mem_op = true;
2157 break;
2158 }
2159 if( n->is_Mem() ) { // Loads and LoadStores
2160 mem_op = true;
2161 // Loads must be root of match tree due to prior load conflict
2162 if( C->subsume_loads() == false )
2163 set_shared(n);
2164 }
2165 // Fall into default case
2166 if( !n->ideal_reg() )
2167 set_dontcare(n); // Unmatchable Nodes
2168 } // end_switch
2169
2170 for(int i = n->req() - 1; i >= 0; --i) { // For my children
2171 Node *m = n->in(i); // Get ith input
2172 if (m == NULL) continue; // Ignore NULLs
2182 mstack.push(m, Visit);
2183 continue; // for(int i = ...)
2184 }
2185
2186 if( mop == Op_AddP && m->in(AddPNode::Base)->is_DecodeNarrowPtr()) {
2187 // Bases used in addresses must be shared but since
2188 // they are shared through a DecodeN they may appear
2189 // to have a single use so force sharing here.
2190 set_shared(m->in(AddPNode::Base)->in(1));
2191 }
2192
2193 // if 'n' and 'm' are part of a graph for BMI instruction, clone this node.
2194 #ifdef X86
2195 if (UseBMI1Instructions && is_bmi_pattern(n, m)) {
2196 mstack.push(m, Visit);
2197 continue;
2198 }
2199 #endif
2200
2201 // Clone addressing expressions as they are "free" in memory access instructions
2202 if (mem_op && i == MemNode::Address && mop == Op_AddP &&
2203 // When there are other uses besides address expressions
2204 // put it on stack and mark as shared.
2205 !is_visited(m)) {
2206 // Some inputs for address expression are not put on stack
2207 // to avoid marking them as shared and forcing them into register
2208 // if they are used only in address expressions.
2209 // But they should be marked as shared if there are other uses
2210 // besides address expressions.
2211
2212 if (clone_address_expressions(m->as_AddP(), mstack, address_visited)) {
2213 continue;
2214 }
2215 } // if( mem_op &&
2216 mstack.push(m, Pre_Visit);
2217 } // for(int i = ...)
2218 }
2219 else if (nstate == Alt_Post_Visit) {
2220 mstack.pop(); // Remove node from stack
2221 // We cannot remove the Cmp input from the Bool here, as the Bool may be
2222 // shared and all users of the Bool need to move the Cmp in parallel.
|
24
25 #include "precompiled.hpp"
26 #include "memory/allocation.inline.hpp"
27 #include "memory/resourceArea.hpp"
28 #include "opto/ad.hpp"
29 #include "opto/addnode.hpp"
30 #include "opto/callnode.hpp"
31 #include "opto/idealGraphPrinter.hpp"
32 #include "opto/matcher.hpp"
33 #include "opto/memnode.hpp"
34 #include "opto/movenode.hpp"
35 #include "opto/opcodes.hpp"
36 #include "opto/regmask.hpp"
37 #include "opto/rootnode.hpp"
38 #include "opto/runtime.hpp"
39 #include "opto/type.hpp"
40 #include "opto/vectornode.hpp"
41 #include "runtime/os.hpp"
42 #include "runtime/sharedRuntime.hpp"
43 #include "utilities/align.hpp"
44 #if INCLUDE_ZGC
45 #include "gc/z/zBarrierSetRuntime.hpp"
46 #endif // INCLUDE_ZGC
47
48 OptoReg::Name OptoReg::c_frame_pointer;
49
50 const RegMask *Matcher::idealreg2regmask[_last_machine_leaf];
51 RegMask Matcher::mreg2regmask[_last_Mach_Reg];
52 RegMask Matcher::STACK_ONLY_mask;
53 RegMask Matcher::c_frame_ptr_mask;
54 const uint Matcher::_begin_rematerialize = _BEGIN_REMATERIALIZE;
55 const uint Matcher::_end_rematerialize = _END_REMATERIALIZE;
56
57 //---------------------------Matcher-------------------------------------------
58 Matcher::Matcher()
59 : PhaseTransform( Phase::Ins_Select ),
60 #ifdef ASSERT
61 _old2new_map(C->comp_arena()),
62 _new2old_map(C->comp_arena()),
63 #endif
64 _shared_nodes(C->comp_arena()),
65 _reduceOp(reduceOp), _leftOp(leftOp), _rightOp(rightOp),
66 _swallowed(swallowed),
2048 Node_State nstate = mstack.state();
2049 uint nop = n->Opcode();
2050 if (nstate == Pre_Visit) {
2051 if (address_visited.test(n->_idx)) { // Visited in address already?
2052 // Flag as visited and shared now.
2053 set_visited(n);
2054 }
2055 if (is_visited(n)) { // Visited already?
2056 // Node is shared and has no reason to clone. Flag it as shared.
2057 // This causes it to match into a register for the sharing.
2058 set_shared(n); // Flag as shared and
2059 mstack.pop(); // remove node from stack
2060 continue;
2061 }
2062 nstate = Visit; // Not already visited; so visit now
2063 }
2064 if (nstate == Visit) {
2065 mstack.set_state(Post_Visit);
2066 set_visited(n); // Flag as visited now
2067 bool mem_op = false;
2068 int mem_addr_idx = MemNode::Address;
2069
2070 switch( nop ) { // Handle some opcodes special
2071 case Op_Phi: // Treat Phis as shared roots
2072 case Op_Parm:
2073 case Op_Proj: // All handled specially during matching
2074 case Op_SafePointScalarObject:
2075 set_shared(n);
2076 set_dontcare(n);
2077 break;
2078 case Op_If:
2079 case Op_CountedLoopEnd:
2080 mstack.set_state(Alt_Post_Visit); // Alternative way
2081 // Convert (If (Bool (CmpX A B))) into (If (Bool) (CmpX A B)). Helps
2082 // with matching cmp/branch in 1 instruction. The Matcher needs the
2083 // Bool and CmpX side-by-side, because it can only get at constants
2084 // that are at the leaves of Match trees, and the Bool's condition acts
2085 // as a constant here.
2086 mstack.push(n->in(1), Visit); // Clone the Bool
2087 mstack.push(n->in(0), Pre_Visit); // Visit control input
2088 continue; // while (mstack.is_nonempty())
2137 if (tp->_ptr == TypePtr::AnyNull) {
2138 tn->set_type(TypePtr::NULL_PTR);
2139 }
2140 break;
2141 }
2142 case Op_ConN: { // Convert narrow pointers above the centerline to NUL
2143 TypeNode *tn = n->as_Type(); // Constants derive from type nodes
2144 const TypePtr* tp = tn->type()->make_ptr();
2145 if (tp && tp->_ptr == TypePtr::AnyNull) {
2146 tn->set_type(TypeNarrowOop::NULL_PTR);
2147 }
2148 break;
2149 }
2150 case Op_Binary: // These are introduced in the Post_Visit state.
2151 ShouldNotReachHere();
2152 break;
2153 case Op_ClearArray:
2154 case Op_SafePoint:
2155 mem_op = true;
2156 break;
2157 #if INCLUDE_ZGC
2158 case Op_CallLeaf:
2159 if (UseZGC) {
2160 if (n->as_Call()->entry_point() == ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr() ||
2161 n->as_Call()->entry_point() == ZBarrierSetRuntime::load_barrier_on_weak_oop_field_preloaded_addr()) {
2162 mem_op = true;
2163 mem_addr_idx = TypeFunc::Parms+1;
2164 }
2165 break;
2166 }
2167 #endif
2168 default:
2169 if( n->is_Store() ) {
2170 // Do match stores, despite no ideal reg
2171 mem_op = true;
2172 break;
2173 }
2174 if( n->is_Mem() ) { // Loads and LoadStores
2175 mem_op = true;
2176 // Loads must be root of match tree due to prior load conflict
2177 if( C->subsume_loads() == false )
2178 set_shared(n);
2179 }
2180 // Fall into default case
2181 if( !n->ideal_reg() )
2182 set_dontcare(n); // Unmatchable Nodes
2183 } // end_switch
2184
2185 for(int i = n->req() - 1; i >= 0; --i) { // For my children
2186 Node *m = n->in(i); // Get ith input
2187 if (m == NULL) continue; // Ignore NULLs
2197 mstack.push(m, Visit);
2198 continue; // for(int i = ...)
2199 }
2200
2201 if( mop == Op_AddP && m->in(AddPNode::Base)->is_DecodeNarrowPtr()) {
2202 // Bases used in addresses must be shared but since
2203 // they are shared through a DecodeN they may appear
2204 // to have a single use so force sharing here.
2205 set_shared(m->in(AddPNode::Base)->in(1));
2206 }
2207
2208 // if 'n' and 'm' are part of a graph for BMI instruction, clone this node.
2209 #ifdef X86
2210 if (UseBMI1Instructions && is_bmi_pattern(n, m)) {
2211 mstack.push(m, Visit);
2212 continue;
2213 }
2214 #endif
2215
2216 // Clone addressing expressions as they are "free" in memory access instructions
2217 if (mem_op && i == mem_addr_idx && mop == Op_AddP &&
2218 // When there are other uses besides address expressions
2219 // put it on stack and mark as shared.
2220 !is_visited(m)) {
2221 // Some inputs for address expression are not put on stack
2222 // to avoid marking them as shared and forcing them into register
2223 // if they are used only in address expressions.
2224 // But they should be marked as shared if there are other uses
2225 // besides address expressions.
2226
2227 if (clone_address_expressions(m->as_AddP(), mstack, address_visited)) {
2228 continue;
2229 }
2230 } // if( mem_op &&
2231 mstack.push(m, Pre_Visit);
2232 } // for(int i = ...)
2233 }
2234 else if (nstate == Alt_Post_Visit) {
2235 mstack.pop(); // Remove node from stack
2236 // We cannot remove the Cmp input from the Bool here, as the Bool may be
2237 // shared and all users of the Bool need to move the Cmp in parallel.
|