< prev index next >

src/share/vm/opto/loopnode.cpp

Print this page




2300   if(VerifyLoopOptimizations) verify();
2301   if(TraceLoopOpts && C->has_loops()) {
2302     _ltree_root->dump();
2303   }
2304 #endif
2305 
2306   if (skip_loop_opts) {
2307     // Cleanup any modified bits
2308     _igvn.optimize();
2309 
2310     if (C->log() != NULL) {
2311       log_loop_tree(_ltree_root, _ltree_root, C->log());
2312     }
2313     return;
2314   }
2315 
2316   if (ReassociateInvariants) {
2317     // Reassociate invariants and prep for split_thru_phi
2318     for (LoopTreeIterator iter(_ltree_root); !iter.done(); iter.next()) {
2319       IdealLoopTree* lpt = iter.current();
2320       if (!lpt->is_counted() || !lpt->is_inner()) continue;




2321 
2322       lpt->reassociate_invariants(this);
2323 
2324       // Because RCE opportunities can be masked by split_thru_phi,
2325       // look for RCE candidates and inhibit split_thru_phi
2326       // on just their loop-phi's for this pass of loop opts
2327       if (SplitIfBlocks && do_split_ifs) {
2328         if (lpt->policy_range_check(this)) {
2329           lpt->_rce_candidate = 1; // = true
2330         }
2331       }
2332     }
2333   }
2334 
2335   // Check for aggressive application of split-if and other transforms
2336   // that require basic-block info (like cloning through Phi's)
2337   if( SplitIfBlocks && do_split_ifs ) {
2338     visited.Clear();
2339     split_if_with_blocks( visited, nstack );
2340     NOT_PRODUCT( if( VerifyLoopOptimizations ) verify(); );




2300   if(VerifyLoopOptimizations) verify();
2301   if(TraceLoopOpts && C->has_loops()) {
2302     _ltree_root->dump();
2303   }
2304 #endif
2305 
2306   if (skip_loop_opts) {
2307     // Cleanup any modified bits
2308     _igvn.optimize();
2309 
2310     if (C->log() != NULL) {
2311       log_loop_tree(_ltree_root, _ltree_root, C->log());
2312     }
2313     return;
2314   }
2315 
2316   if (ReassociateInvariants) {
2317     // Reassociate invariants and prep for split_thru_phi
2318     for (LoopTreeIterator iter(_ltree_root); !iter.done(); iter.next()) {
2319       IdealLoopTree* lpt = iter.current();
2320       bool is_counted = lpt->is_counted();
2321       if (!is_counted || !lpt->is_inner()) continue;
2322 
2323       // check for vectorized loops, any reassociation of invariants was already done
2324       if (is_counted && lpt->_head->as_CountedLoop()->do_unroll_only()) continue;
2325 
2326       lpt->reassociate_invariants(this);
2327 
2328       // Because RCE opportunities can be masked by split_thru_phi,
2329       // look for RCE candidates and inhibit split_thru_phi
2330       // on just their loop-phi's for this pass of loop opts
2331       if (SplitIfBlocks && do_split_ifs) {
2332         if (lpt->policy_range_check(this)) {
2333           lpt->_rce_candidate = 1; // = true
2334         }
2335       }
2336     }
2337   }
2338 
2339   // Check for aggressive application of split-if and other transforms
2340   // that require basic-block info (like cloning through Phi's)
2341   if( SplitIfBlocks && do_split_ifs ) {
2342     visited.Clear();
2343     split_if_with_blocks( visited, nstack );
2344     NOT_PRODUCT( if( VerifyLoopOptimizations ) verify(); );


< prev index next >