src/share/vm/opto/split_if.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File 6894778 Sdiff src/share/vm/opto

src/share/vm/opto/split_if.cpp

Print this page




 202     }
 203     register_new_node( x, blk1->in(j) );
 204     phi->init_req( j, x );
 205   }
 206   // Announce phi to optimizer
 207   register_new_node(phi, blk1);
 208 
 209   // Remove cloned-up value from optimizer; use phi instead
 210   _igvn.hash_delete(n);
 211   _igvn.subsume_node( n, phi );
 212 
 213   // (There used to be a self-recursive call to split_up() here,
 214   // but it is not needed.  All necessary forward walking is done
 215   // by do_split_if() below.)
 216 
 217   return true;
 218 }
 219 
 220 //------------------------------register_new_node------------------------------
 221 void PhaseIdealLoop::register_new_node( Node *n, Node *blk ) {

 222   _igvn.register_new_node_with_optimizer(n);
 223   set_ctrl(n, blk);
 224   IdealLoopTree *loop = get_loop(blk);
 225   if( !loop->_child )
 226     loop->_body.push(n);
 227 }
 228 
 229 //------------------------------small_cache------------------------------------
 230 struct small_cache : public Dict {
 231 
 232   small_cache() : Dict( cmpkey, hashptr ) {}
 233   Node *probe( Node *use_blk ) { return (Node*)((*this)[use_blk]); }
 234   void lru_insert( Node *use_blk, Node *new_def ) { Insert(use_blk,new_def); }
 235 };
 236 
 237 //------------------------------spinup-----------------------------------------
 238 // "Spin up" the dominator tree, starting at the use site and stopping when we
 239 // find the post-dominating point.
 240 
 241 // We must be at the merge point which post-dominates 'new_false' and




 202     }
 203     register_new_node( x, blk1->in(j) );
 204     phi->init_req( j, x );
 205   }
 206   // Announce phi to optimizer
 207   register_new_node(phi, blk1);
 208 
 209   // Remove cloned-up value from optimizer; use phi instead
 210   _igvn.hash_delete(n);
 211   _igvn.subsume_node( n, phi );
 212 
 213   // (There used to be a self-recursive call to split_up() here,
 214   // but it is not needed.  All necessary forward walking is done
 215   // by do_split_if() below.)
 216 
 217   return true;
 218 }
 219 
 220 //------------------------------register_new_node------------------------------
 221 void PhaseIdealLoop::register_new_node( Node *n, Node *blk ) {
 222   assert(!n->is_CFG(), "must be data node");
 223   _igvn.register_new_node_with_optimizer(n);
 224   set_ctrl(n, blk);
 225   IdealLoopTree *loop = get_loop(blk);
 226   if( !loop->_child )
 227     loop->_body.push(n);
 228 }
 229 
 230 //------------------------------small_cache------------------------------------
 231 struct small_cache : public Dict {
 232 
 233   small_cache() : Dict( cmpkey, hashptr ) {}
 234   Node *probe( Node *use_blk ) { return (Node*)((*this)[use_blk]); }
 235   void lru_insert( Node *use_blk, Node *new_def ) { Insert(use_blk,new_def); }
 236 };
 237 
 238 //------------------------------spinup-----------------------------------------
 239 // "Spin up" the dominator tree, starting at the use site and stopping when we
 240 // find the post-dominating point.
 241 
 242 // We must be at the merge point which post-dominates 'new_false' and


src/share/vm/opto/split_if.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File