< prev index next >

src/share/vm/opto/matcher.cpp

Print this page




2211     else if (nstate == Alt_Post_Visit) {
2212       mstack.pop(); // Remove node from stack
2213       // We cannot remove the Cmp input from the Bool here, as the Bool may be
2214       // shared and all users of the Bool need to move the Cmp in parallel.
2215       // This leaves both the Bool and the If pointing at the Cmp.  To
2216       // prevent the Matcher from trying to Match the Cmp along both paths
2217       // BoolNode::match_edge always returns a zero.
2218 
2219       // We reorder the Op_If in a pre-order manner, so we can visit without
2220       // accidentally sharing the Cmp (the Bool and the If make 2 users).
2221       n->add_req( n->in(1)->in(1) ); // Add the Cmp next to the Bool
2222     }
2223     else if (nstate == Post_Visit) {
2224       mstack.pop(); // Remove node from stack
2225 
2226       // Now hack a few special opcodes
2227       switch( n->Opcode() ) {       // Handle some opcodes special
2228       case Op_StorePConditional:
2229       case Op_StoreIConditional:
2230       case Op_StoreLConditional:


2231       case Op_CompareAndExchangeI:
2232       case Op_CompareAndExchangeL:
2233       case Op_CompareAndExchangeP:
2234       case Op_CompareAndExchangeN:


2235       case Op_WeakCompareAndSwapI:
2236       case Op_WeakCompareAndSwapL:
2237       case Op_WeakCompareAndSwapP:
2238       case Op_WeakCompareAndSwapN:


2239       case Op_CompareAndSwapI:
2240       case Op_CompareAndSwapL:
2241       case Op_CompareAndSwapP:
2242       case Op_CompareAndSwapN: {   // Convert trinary to binary-tree
2243         Node *newval = n->in(MemNode::ValueIn );
2244         Node *oldval  = n->in(LoadStoreConditionalNode::ExpectedIn);
2245         Node *pair = new BinaryNode( oldval, newval );
2246         n->set_req(MemNode::ValueIn,pair);
2247         n->del_req(LoadStoreConditionalNode::ExpectedIn);
2248         break;
2249       }
2250       case Op_CMoveD:              // Convert trinary to binary-tree
2251       case Op_CMoveF:
2252       case Op_CMoveI:
2253       case Op_CMoveL:
2254       case Op_CMoveN:
2255       case Op_CMoveP:
2256       case Op_CMoveVD:  {
2257         // Restructure into a binary tree for Matching.  It's possible that
2258         // we could move this code up next to the graph reshaping for IfNodes


2436     if ((p->as_Proj()->_con == TypeFunc::Control) &&
2437         !C->node_arena()->contains(p)) { // Unmatched old-space only
2438       ctrl = p;
2439       break;
2440     }
2441   }
2442   assert((ctrl != NULL), "missing control projection");
2443 
2444   for (DUIterator_Fast jmax, j = ctrl->fast_outs(jmax); j < jmax; j++) {
2445     Node *x = ctrl->fast_out(j);
2446     int xop = x->Opcode();
2447 
2448     // We don't need current barrier if we see another or a lock
2449     // before seeing volatile load.
2450     //
2451     // Op_Fastunlock previously appeared in the Op_* list below.
2452     // With the advent of 1-0 lock operations we're no longer guaranteed
2453     // that a monitor exit operation contains a serializing instruction.
2454 
2455     if (xop == Op_MemBarVolatile ||


2456         xop == Op_CompareAndExchangeI ||
2457         xop == Op_CompareAndExchangeL ||
2458         xop == Op_CompareAndExchangeP ||
2459         xop == Op_CompareAndExchangeN ||


2460         xop == Op_WeakCompareAndSwapL ||
2461         xop == Op_WeakCompareAndSwapP ||
2462         xop == Op_WeakCompareAndSwapN ||
2463         xop == Op_WeakCompareAndSwapI ||


2464         xop == Op_CompareAndSwapL ||
2465         xop == Op_CompareAndSwapP ||
2466         xop == Op_CompareAndSwapN ||
2467         xop == Op_CompareAndSwapI) {
2468       return true;
2469     }
2470 
2471     // Op_FastLock previously appeared in the Op_* list above.
2472     // With biased locking we're no longer guaranteed that a monitor
2473     // enter operation contains a serializing instruction.
2474     if ((xop == Op_FastLock) && !UseBiasedLocking) {
2475       return true;
2476     }
2477 
2478     if (x->is_MemBar()) {
2479       // We must retain this membar if there is an upcoming volatile
2480       // load, which will be followed by acquire membar.
2481       if (xop == Op_MemBarAcquire || xop == Op_LoadFence) {
2482         return false;
2483       } else {




2211     else if (nstate == Alt_Post_Visit) {
2212       mstack.pop(); // Remove node from stack
2213       // We cannot remove the Cmp input from the Bool here, as the Bool may be
2214       // shared and all users of the Bool need to move the Cmp in parallel.
2215       // This leaves both the Bool and the If pointing at the Cmp.  To
2216       // prevent the Matcher from trying to Match the Cmp along both paths
2217       // BoolNode::match_edge always returns a zero.
2218 
2219       // We reorder the Op_If in a pre-order manner, so we can visit without
2220       // accidentally sharing the Cmp (the Bool and the If make 2 users).
2221       n->add_req( n->in(1)->in(1) ); // Add the Cmp next to the Bool
2222     }
2223     else if (nstate == Post_Visit) {
2224       mstack.pop(); // Remove node from stack
2225 
2226       // Now hack a few special opcodes
2227       switch( n->Opcode() ) {       // Handle some opcodes special
2228       case Op_StorePConditional:
2229       case Op_StoreIConditional:
2230       case Op_StoreLConditional:
2231       case Op_CompareAndExchangeB:
2232       case Op_CompareAndExchangeS:
2233       case Op_CompareAndExchangeI:
2234       case Op_CompareAndExchangeL:
2235       case Op_CompareAndExchangeP:
2236       case Op_CompareAndExchangeN:
2237       case Op_WeakCompareAndSwapB:
2238       case Op_WeakCompareAndSwapS:
2239       case Op_WeakCompareAndSwapI:
2240       case Op_WeakCompareAndSwapL:
2241       case Op_WeakCompareAndSwapP:
2242       case Op_WeakCompareAndSwapN:
2243       case Op_CompareAndSwapB:
2244       case Op_CompareAndSwapS:
2245       case Op_CompareAndSwapI:
2246       case Op_CompareAndSwapL:
2247       case Op_CompareAndSwapP:
2248       case Op_CompareAndSwapN: {   // Convert trinary to binary-tree
2249         Node *newval = n->in(MemNode::ValueIn );
2250         Node *oldval  = n->in(LoadStoreConditionalNode::ExpectedIn);
2251         Node *pair = new BinaryNode( oldval, newval );
2252         n->set_req(MemNode::ValueIn,pair);
2253         n->del_req(LoadStoreConditionalNode::ExpectedIn);
2254         break;
2255       }
2256       case Op_CMoveD:              // Convert trinary to binary-tree
2257       case Op_CMoveF:
2258       case Op_CMoveI:
2259       case Op_CMoveL:
2260       case Op_CMoveN:
2261       case Op_CMoveP:
2262       case Op_CMoveVD:  {
2263         // Restructure into a binary tree for Matching.  It's possible that
2264         // we could move this code up next to the graph reshaping for IfNodes


2442     if ((p->as_Proj()->_con == TypeFunc::Control) &&
2443         !C->node_arena()->contains(p)) { // Unmatched old-space only
2444       ctrl = p;
2445       break;
2446     }
2447   }
2448   assert((ctrl != NULL), "missing control projection");
2449 
2450   for (DUIterator_Fast jmax, j = ctrl->fast_outs(jmax); j < jmax; j++) {
2451     Node *x = ctrl->fast_out(j);
2452     int xop = x->Opcode();
2453 
2454     // We don't need current barrier if we see another or a lock
2455     // before seeing volatile load.
2456     //
2457     // Op_Fastunlock previously appeared in the Op_* list below.
2458     // With the advent of 1-0 lock operations we're no longer guaranteed
2459     // that a monitor exit operation contains a serializing instruction.
2460 
2461     if (xop == Op_MemBarVolatile ||
2462         xop == Op_CompareAndExchangeB ||
2463         xop == Op_CompareAndExchangeS ||
2464         xop == Op_CompareAndExchangeI ||
2465         xop == Op_CompareAndExchangeL ||
2466         xop == Op_CompareAndExchangeP ||
2467         xop == Op_CompareAndExchangeN ||
2468         xop == Op_WeakCompareAndSwapB ||
2469         xop == Op_WeakCompareAndSwapS ||
2470         xop == Op_WeakCompareAndSwapL ||
2471         xop == Op_WeakCompareAndSwapP ||
2472         xop == Op_WeakCompareAndSwapN ||
2473         xop == Op_WeakCompareAndSwapI ||
2474         xop == Op_CompareAndSwapB ||
2475         xop == Op_CompareAndSwapS ||
2476         xop == Op_CompareAndSwapL ||
2477         xop == Op_CompareAndSwapP ||
2478         xop == Op_CompareAndSwapN ||
2479         xop == Op_CompareAndSwapI) {
2480       return true;
2481     }
2482 
2483     // Op_FastLock previously appeared in the Op_* list above.
2484     // With biased locking we're no longer guaranteed that a monitor
2485     // enter operation contains a serializing instruction.
2486     if ((xop == Op_FastLock) && !UseBiasedLocking) {
2487       return true;
2488     }
2489 
2490     if (x->is_MemBar()) {
2491       // We must retain this membar if there is an upcoming volatile
2492       // load, which will be followed by acquire membar.
2493       if (xop == Op_MemBarAcquire || xop == Op_LoadFence) {
2494         return false;
2495       } else {


< prev index next >