1 /* 2 * Copyright (c) 2006, 2019, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "memory/allocation.inline.hpp" 27 #include "opto/mulnode.hpp" 28 #include "opto/addnode.hpp" 29 #include "opto/connode.hpp" 30 #include "opto/convertnode.hpp" 31 #include "opto/loopnode.hpp" 32 #include "opto/opaquenode.hpp" 33 #include "opto/rootnode.hpp" 34 35 //================= Loop Unswitching ===================== 36 // 37 // orig: transformed: 38 // if (invariant-test) then 39 // predicate predicate 40 // loop loop 41 // stmt1 stmt1 42 // if (invariant-test) then stmt2 43 // stmt2 stmt4 44 // else endloop 45 // stmt3 else 46 // endif predicate [clone] 47 // stmt4 loop [clone] 48 // endloop stmt1 [clone] 49 // stmt3 50 // stmt4 [clone] 51 // endloop 52 // endif 53 // 54 // Note: the "else" clause may be empty 55 56 57 //------------------------------policy_unswitching----------------------------- 58 // Return TRUE or FALSE if the loop should be unswitched 59 // (ie. clone loop with an invariant test that does not exit the loop) 60 bool IdealLoopTree::policy_unswitching( PhaseIdealLoop *phase ) const { 61 if (!LoopUnswitching) { 62 return false; 63 } 64 if (!_head->is_Loop()) { 65 return false; 66 } 67 68 // If nodes are depleted, some transform has miscalculated its needs. 69 assert(!phase->exceeding_node_budget(), "sanity"); 70 71 // check for vectorized loops, any unswitching was already applied 72 if (_head->is_CountedLoop() && _head->as_CountedLoop()->is_unroll_only()) { 73 return false; 74 } 75 76 LoopNode* head = _head->as_Loop(); 77 if (head->unswitch_count() + 1 > head->unswitch_max()) { 78 return false; 79 } 80 81 if (head->is_flattened_arrays()) { 82 return false; 83 } 84 85 Node_List flattened_checks; 86 if (phase->find_unswitching_candidate(this, flattened_checks) == NULL && flattened_checks.size() == 0) { 87 return false; 88 } 89 90 // Too speculative if running low on nodes. 91 return phase->may_require_nodes(est_loop_clone_sz(2)); 92 } 93 94 //------------------------------find_unswitching_candidate----------------------------- 95 // Find candidate "if" for unswitching 96 IfNode* PhaseIdealLoop::find_unswitching_candidate(const IdealLoopTree *loop, Node_List& flattened_checks) const { 97 98 // Find first invariant test that doesn't exit the loop 99 LoopNode *head = loop->_head->as_Loop(); 100 IfNode* unswitch_iff = NULL; 101 Node* n = head->in(LoopNode::LoopBackControl); 102 while (n != head) { 103 Node* n_dom = idom(n); 104 if (n->is_Region()) { 105 if (n_dom->is_If()) { 106 IfNode* iff = n_dom->as_If(); 107 if (iff->in(1)->is_Bool()) { 108 BoolNode* bol = iff->in(1)->as_Bool(); 109 if (bol->in(1)->is_Cmp()) { 110 // If condition is invariant and not a loop exit, 111 // then found reason to unswitch. 112 if (loop->is_invariant(bol) && !loop->is_loop_exit(iff)) { 113 unswitch_iff = iff; 114 } 115 } 116 } 117 } 118 } 119 n = n_dom; 120 } 121 122 Node* array; 123 if (unswitch_iff == NULL || unswitch_iff->is_flattened_array_check(&_igvn, array)) { 124 // collect all flattened array checks 125 for (uint i = 0; i < loop->_body.size(); i++) { 126 Node* n = loop->_body.at(i); 127 if (n->is_If() && n->as_If()->is_flattened_array_check(&_igvn, array) && 128 loop->is_invariant(n->in(1)) && 129 !loop->is_loop_exit(n)) { 130 flattened_checks.push(n); 131 } 132 } 133 unswitch_iff = NULL; 134 } 135 136 return unswitch_iff; 137 } 138 139 //------------------------------do_unswitching----------------------------- 140 // Clone loop with an invariant test (that does not exit) and 141 // insert a clone of the test that selects which version to 142 // execute. 143 void PhaseIdealLoop::do_unswitching(IdealLoopTree *loop, Node_List &old_new) { 144 145 // Find first invariant test that doesn't exit the loop 146 LoopNode *head = loop->_head->as_Loop(); 147 148 Node_List flattened_checks; 149 IfNode* unswitch_iff = find_unswitching_candidate((const IdealLoopTree *)loop, flattened_checks); 150 assert(unswitch_iff != NULL || flattened_checks.size() > 0, "should be at least one"); 151 if (unswitch_iff == NULL) { 152 unswitch_iff = flattened_checks.at(0)->as_If(); 153 } 154 155 #ifndef PRODUCT 156 if (TraceLoopOpts) { 157 tty->print("Unswitch %d ", head->unswitch_count()+1); 158 loop->dump_head(); 159 } 160 #endif 161 162 // Need to revert back to normal loop 163 if (head->is_CountedLoop() && !head->as_CountedLoop()->is_normal_loop()) { 164 head->as_CountedLoop()->set_normal_loop(); 165 } 166 167 ProjNode* proj_true = create_slow_version_of_loop(loop, old_new, unswitch_iff->Opcode(), CloneIncludesStripMined); 168 169 #ifdef ASSERT 170 Node* uniqc = proj_true->unique_ctrl_out(); 171 Node* entry = head->skip_strip_mined()->in(LoopNode::EntryControl); 172 Node* predicate = find_predicate(entry); 173 if (predicate != NULL) { 174 entry = skip_loop_predicates(entry); 175 } 176 if (predicate != NULL && UseLoopPredicate) { 177 // We may have two predicates, find first. 178 Node* n = find_predicate(entry); 179 if (n != NULL) { 180 predicate = n; 181 entry = skip_loop_predicates(entry); 182 } 183 } 184 if (predicate != NULL && UseProfiledLoopPredicate) { 185 entry = find_predicate(entry); 186 if (entry != NULL) predicate = entry; 187 } 188 if (predicate != NULL) predicate = predicate->in(0); 189 assert(proj_true->is_IfTrue() && 190 (predicate == NULL && uniqc == head && !head->is_strip_mined() || 191 predicate == NULL && uniqc == head->in(LoopNode::EntryControl) && head->is_strip_mined() || 192 predicate != NULL && uniqc == predicate), "by construction"); 193 #endif 194 // Increment unswitch count 195 LoopNode* head_clone = old_new[head->_idx]->as_Loop(); 196 int nct = head->unswitch_count() + 1; 197 head->set_unswitch_count(nct); 198 head_clone->set_unswitch_count(nct); 199 head_clone->mark_flattened_arrays(); 200 201 // Add test to new "if" outside of loop 202 IfNode* invar_iff = proj_true->in(0)->as_If(); 203 Node* invar_iff_c = invar_iff->in(0); 204 invar_iff->_prob = unswitch_iff->_prob; 205 if (flattened_checks.size() > 0) { 206 // Flattened array checks are used in 207 // Parse::array_store()/Parse::array_load() to switch between a 208 // legacy object array access and a flattened value array 209 // access. We want the performance impact on legacy accesses to be 210 // as small as possible so we make 2 copies of the loops: a fast 211 // one where all accesses are known to be legacy, a slow one where 212 // some accesses are to flattened arrays. Flattened array checks 213 // can be removed from the first one but not from the second one 214 // as it can have a mix of flattened/legacy accesses. 215 BoolNode* bol = unswitch_iff->in(1)->clone()->as_Bool(); 216 register_new_node(bol, invar_iff->in(0)); 217 Node* cmp = bol->in(1)->clone(); 218 register_new_node(cmp, invar_iff->in(0)); 219 bol->set_req(1, cmp); 220 Node* in1 = NULL; 221 for (uint i = 0; i < flattened_checks.size(); i++) { 222 Node* v = flattened_checks.at(i)->in(1)->in(1)->in(1); 223 v = new AndINode(v, _igvn.intcon(Klass::_lh_array_tag_vt_value)); 224 register_new_node(v, invar_iff->in(0)); 225 if (in1 == NULL) { 226 in1 = v; 227 } else { 228 in1 = new OrINode(in1, v); 229 register_new_node(in1, invar_iff->in(0)); 230 } 231 } 232 cmp->set_req(1, in1); 233 invar_iff->set_req(1, bol); 234 } else { 235 BoolNode* bol = unswitch_iff->in(1)->as_Bool(); 236 invar_iff->set_req(1, bol); 237 } 238 239 ProjNode* proj_false = invar_iff->proj_out(0)->as_Proj(); 240 241 // Hoist invariant casts out of each loop to the appropriate 242 // control projection. 243 244 Node_List worklist; 245 246 for (DUIterator_Fast imax, i = unswitch_iff->fast_outs(imax); i < imax; i++) { 247 ProjNode* proj= unswitch_iff->fast_out(i)->as_Proj(); 248 // Copy to a worklist for easier manipulation 249 for (DUIterator_Fast jmax, j = proj->fast_outs(jmax); j < jmax; j++) { 250 Node* use = proj->fast_out(j); 251 if (use->Opcode() == Op_CheckCastPP && loop->is_invariant(use->in(1))) { 252 worklist.push(use); 253 } 254 } 255 ProjNode* invar_proj = invar_iff->proj_out(proj->_con)->as_Proj(); 256 while (worklist.size() > 0) { 257 Node* use = worklist.pop(); 258 Node* nuse = use->clone(); 259 nuse->set_req(0, invar_proj); 260 _igvn.replace_input_of(use, 1, nuse); 261 register_new_node(nuse, invar_proj); 262 // Same for the clone 263 Node* use_clone = old_new[use->_idx]; 264 _igvn.replace_input_of(use_clone, 1, nuse); 265 } 266 } 267 268 IfNode* unswitch_iff_clone = old_new[unswitch_iff->_idx]->as_If(); 269 if (flattened_checks.size() > 0) { 270 for (uint i = 0; i < flattened_checks.size(); i++) { 271 IfNode* iff = flattened_checks.at(i)->as_If(); 272 _igvn.rehash_node_delayed(iff); 273 short_circuit_if(iff, proj_true); 274 } 275 } else { 276 // Hardwire the control paths in the loops into if(true) and if(false) 277 _igvn.rehash_node_delayed(unswitch_iff); 278 short_circuit_if(unswitch_iff, proj_true); 279 280 _igvn.rehash_node_delayed(unswitch_iff_clone); 281 short_circuit_if(unswitch_iff_clone, proj_false); 282 } 283 284 // Reoptimize loops 285 loop->record_for_igvn(); 286 for(int i = loop->_body.size() - 1; i >= 0 ; i--) { 287 Node *n = loop->_body[i]; 288 Node *n_clone = old_new[n->_idx]; 289 _igvn._worklist.push(n_clone); 290 } 291 292 #ifndef PRODUCT 293 if (TraceLoopUnswitching) { 294 tty->print_cr("Loop unswitching orig: %d @ %d new: %d @ %d", 295 head->_idx, unswitch_iff->_idx, 296 old_new[head->_idx]->_idx, unswitch_iff_clone->_idx); 297 } 298 #endif 299 300 C->set_major_progress(); 301 } 302 303 //-------------------------create_slow_version_of_loop------------------------ 304 // Create a slow version of the loop by cloning the loop 305 // and inserting an if to select fast-slow versions. 306 // Return control projection of the entry to the fast version. 307 ProjNode* PhaseIdealLoop::create_slow_version_of_loop(IdealLoopTree *loop, 308 Node_List &old_new, 309 int opcode, 310 CloneLoopMode mode) { 311 LoopNode* head = loop->_head->as_Loop(); 312 bool counted_loop = head->is_CountedLoop(); 313 Node* entry = head->skip_strip_mined()->in(LoopNode::EntryControl); 314 _igvn.rehash_node_delayed(entry); 315 IdealLoopTree* outer_loop = loop->_parent; 316 317 head->verify_strip_mined(1); 318 319 Node *cont = _igvn.intcon(1); 320 set_ctrl(cont, C->root()); 321 Node* opq = new Opaque1Node(C, cont); 322 register_node(opq, outer_loop, entry, dom_depth(entry)); 323 Node *bol = new Conv2BNode(opq); 324 register_node(bol, outer_loop, entry, dom_depth(entry)); 325 IfNode* iff = (opcode == Op_RangeCheck) ? new RangeCheckNode(entry, bol, PROB_MAX, COUNT_UNKNOWN) : 326 new IfNode(entry, bol, PROB_MAX, COUNT_UNKNOWN); 327 register_node(iff, outer_loop, entry, dom_depth(entry)); 328 ProjNode* iffast = new IfTrueNode(iff); 329 register_node(iffast, outer_loop, iff, dom_depth(iff)); 330 ProjNode* ifslow = new IfFalseNode(iff); 331 register_node(ifslow, outer_loop, iff, dom_depth(iff)); 332 333 // Clone the loop body. The clone becomes the fast loop. The 334 // original pre-header will (illegally) have 3 control users 335 // (old & new loops & new if). 336 clone_loop(loop, old_new, dom_depth(head->skip_strip_mined()), mode, iff); 337 assert(old_new[head->_idx]->is_Loop(), "" ); 338 339 // Fast (true) control 340 Node* iffast_pred = clone_loop_predicates(entry, iffast, !counted_loop); 341 342 // Slow (false) control 343 Node* ifslow_pred = clone_loop_predicates(entry, ifslow, !counted_loop); 344 345 Node* l = head->skip_strip_mined(); 346 _igvn.replace_input_of(l, LoopNode::EntryControl, iffast_pred); 347 set_idom(l, iffast_pred, dom_depth(l)); 348 LoopNode* slow_l = old_new[head->_idx]->as_Loop()->skip_strip_mined(); 349 _igvn.replace_input_of(slow_l, LoopNode::EntryControl, ifslow_pred); 350 set_idom(slow_l, ifslow_pred, dom_depth(l)); 351 352 recompute_dom_depth(); 353 354 return iffast; 355 } 356 357 LoopNode* PhaseIdealLoop::create_reserve_version_of_loop(IdealLoopTree *loop, CountedLoopReserveKit* lk) { 358 Node_List old_new; 359 LoopNode* head = loop->_head->as_Loop(); 360 bool counted_loop = head->is_CountedLoop(); 361 Node* entry = head->skip_strip_mined()->in(LoopNode::EntryControl); 362 _igvn.rehash_node_delayed(entry); 363 IdealLoopTree* outer_loop = head->is_strip_mined() ? loop->_parent->_parent : loop->_parent; 364 365 ConINode* const_1 = _igvn.intcon(1); 366 set_ctrl(const_1, C->root()); 367 IfNode* iff = new IfNode(entry, const_1, PROB_MAX, COUNT_UNKNOWN); 368 register_node(iff, outer_loop, entry, dom_depth(entry)); 369 ProjNode* iffast = new IfTrueNode(iff); 370 register_node(iffast, outer_loop, iff, dom_depth(iff)); 371 ProjNode* ifslow = new IfFalseNode(iff); 372 register_node(ifslow, outer_loop, iff, dom_depth(iff)); 373 374 // Clone the loop body. The clone becomes the fast loop. The 375 // original pre-header will (illegally) have 3 control users 376 // (old & new loops & new if). 377 clone_loop(loop, old_new, dom_depth(head), CloneIncludesStripMined, iff); 378 assert(old_new[head->_idx]->is_Loop(), "" ); 379 380 LoopNode* slow_head = old_new[head->_idx]->as_Loop(); 381 382 #ifndef PRODUCT 383 if (TraceLoopOpts) { 384 tty->print_cr("PhaseIdealLoop::create_reserve_version_of_loop:"); 385 tty->print("\t iff = %d, ", iff->_idx); iff->dump(); 386 tty->print("\t iffast = %d, ", iffast->_idx); iffast->dump(); 387 tty->print("\t ifslow = %d, ", ifslow->_idx); ifslow->dump(); 388 tty->print("\t before replace_input_of: head = %d, ", head->_idx); head->dump(); 389 tty->print("\t before replace_input_of: slow_head = %d, ", slow_head->_idx); slow_head->dump(); 390 } 391 #endif 392 393 // Fast (true) control 394 _igvn.replace_input_of(head->skip_strip_mined(), LoopNode::EntryControl, iffast); 395 // Slow (false) control 396 _igvn.replace_input_of(slow_head->skip_strip_mined(), LoopNode::EntryControl, ifslow); 397 398 recompute_dom_depth(); 399 400 lk->set_iff(iff); 401 402 #ifndef PRODUCT 403 if (TraceLoopOpts ) { 404 tty->print("\t after replace_input_of: head = %d, ", head->_idx); head->dump(); 405 tty->print("\t after replace_input_of: slow_head = %d, ", slow_head->_idx); slow_head->dump(); 406 } 407 #endif 408 409 return slow_head->as_Loop(); 410 } 411 412 CountedLoopReserveKit::CountedLoopReserveKit(PhaseIdealLoop* phase, IdealLoopTree *loop, bool active = true) : 413 _phase(phase), 414 _lpt(loop), 415 _lp(NULL), 416 _iff(NULL), 417 _lp_reserved(NULL), 418 _has_reserved(false), 419 _use_new(false), 420 _active(active) 421 { 422 create_reserve(); 423 }; 424 425 CountedLoopReserveKit::~CountedLoopReserveKit() { 426 if (!_active) { 427 return; 428 } 429 430 if (_has_reserved && !_use_new) { 431 // intcon(0)->iff-node reverts CF to the reserved copy 432 ConINode* const_0 = _phase->_igvn.intcon(0); 433 _phase->set_ctrl(const_0, _phase->C->root()); 434 _iff->set_req(1, const_0); 435 436 #ifndef PRODUCT 437 if (TraceLoopOpts) { 438 tty->print_cr("CountedLoopReserveKit::~CountedLoopReserveKit()"); 439 tty->print("\t discard loop %d and revert to the reserved loop clone %d: ", _lp->_idx, _lp_reserved->_idx); 440 _lp_reserved->dump(); 441 } 442 #endif 443 } 444 } 445 446 bool CountedLoopReserveKit::create_reserve() { 447 if (!_active) { 448 return false; 449 } 450 451 if(!_lpt->_head->is_CountedLoop()) { 452 if (TraceLoopOpts) { 453 tty->print_cr("CountedLoopReserveKit::create_reserve: %d not counted loop", _lpt->_head->_idx); 454 } 455 return false; 456 } 457 CountedLoopNode *cl = _lpt->_head->as_CountedLoop(); 458 if (!cl->is_valid_counted_loop()) { 459 if (TraceLoopOpts) { 460 tty->print_cr("CountedLoopReserveKit::create_reserve: %d not valid counted loop", cl->_idx); 461 } 462 return false; // skip malformed counted loop 463 } 464 if (!cl->is_main_loop()) { 465 bool loop_not_canonical = true; 466 if (cl->is_post_loop() && (cl->slp_max_unroll() > 0)) { 467 loop_not_canonical = false; 468 } 469 // only reject some loop forms 470 if (loop_not_canonical) { 471 if (TraceLoopOpts) { 472 tty->print_cr("CountedLoopReserveKit::create_reserve: %d not canonical loop", cl->_idx); 473 } 474 return false; // skip normal, pre, and post (conditionally) loops 475 } 476 } 477 478 _lp = _lpt->_head->as_Loop(); 479 _lp_reserved = _phase->create_reserve_version_of_loop(_lpt, this); 480 481 if (!_lp_reserved->is_CountedLoop()) { 482 return false; 483 } 484 485 Node* ifslow_pred = _lp_reserved->skip_strip_mined()->in(LoopNode::EntryControl); 486 487 if (!ifslow_pred->is_IfFalse()) { 488 return false; 489 } 490 491 Node* iff = ifslow_pred->in(0); 492 if (!iff->is_If() || iff != _iff) { 493 return false; 494 } 495 496 if (iff->in(1)->Opcode() != Op_ConI) { 497 return false; 498 } 499 500 return _has_reserved = true; 501 }