< prev index next >

src/share/vm/opto/block.cpp

Print this page




 192 // executed infrequently.  Check to see if the block ends in a Halt or
 193 // a low probability call.
 194 bool Block::has_uncommon_code() const {
 195   Node* en = end();
 196 
 197   if (en->is_MachGoto())
 198     en = en->in(0);
 199   if (en->is_Catch())
 200     en = en->in(0);
 201   if (en->is_MachProj() && en->in(0)->is_MachCall()) {
 202     MachCallNode* call = en->in(0)->as_MachCall();
 203     if (call->cnt() != COUNT_UNKNOWN && call->cnt() <= PROB_UNLIKELY_MAG(4)) {
 204       // This is true for slow-path stubs like new_{instance,array},
 205       // slow_arraycopy, complete_monitor_locking, uncommon_trap.
 206       // The magic number corresponds to the probability of an uncommon_trap,
 207       // even though it is a count not a probability.
 208       return true;
 209     }
 210   }
 211 
 212   int op = en->is_Mach() ? en->as_Mach()->ideal_Opcode() : en->Opcode();
 213   return op == Op_Halt;
 214 }
 215 
 216 // True if block is low enough frequency or guarded by a test which
 217 // mostly does not go here.
 218 bool PhaseCFG::is_uncommon(const Block* block) {
 219   // Initial blocks must never be moved, so are never uncommon.
 220   if (block->head()->is_Root() || block->head()->is_Start())  return false;
 221 
 222   // Check for way-low freq
 223   if(block->_freq < BLOCK_FREQUENCY(0.00001f) ) return true;
 224 
 225   // Look for code shape indicating uncommon_trap or slow path
 226   if (block->has_uncommon_code()) return true;
 227 
 228   const float epsilon = 0.05f;
 229   const float guard_factor = PROB_UNLIKELY_MAG(4) / (1.f - epsilon);
 230   uint uncommon_preds = 0;
 231   uint freq_preds = 0;
 232   uint uncommon_for_freq_preds = 0;
 233 


 530   block->_freq = freq;
 531   // add new basic block to basic block list
 532   add_block_at(block_no + 1, block);
 533 }
 534 
 535 // Does this block end in a multiway branch that cannot have the default case
 536 // flipped for another case?
 537 static bool no_flip_branch(Block *b) {
 538   int branch_idx = b->number_of_nodes() - b->_num_succs-1;
 539   if (branch_idx < 1) {
 540     return false;
 541   }
 542   Node *branch = b->get_node(branch_idx);
 543   if (branch->is_Catch()) {
 544     return true;
 545   }
 546   if (branch->is_Mach()) {
 547     if (branch->is_MachNullCheck()) {
 548       return true;
 549     }
 550     int iop = branch->as_Mach()->ideal_Opcode();
 551     if (iop == Op_FastLock || iop == Op_FastUnlock) {
 552       return true;
 553     }
 554     // Don't flip if branch has an implicit check.
 555     if (branch->as_Mach()->is_TrapBasedCheckNode()) {
 556       return true;
 557     }
 558   }
 559   return false;
 560 }
 561 
 562 // Check for NeverBranch at block end.  This needs to become a GOTO to the
 563 // true target.  NeverBranch are treated as a conditional branch that always
 564 // goes the same direction for most of the optimizer and are used to give a
 565 // fake exit path to infinite loops.  At this late stage they need to turn
 566 // into Goto's so that when you enter the infinite loop you indeed hang.
 567 void PhaseCFG::convert_NeverBranch_to_Goto(Block *b) {
 568   // Find true target
 569   int end_idx = b->end_idx();
 570   int idx = b->get_node(end_idx+1)->as_Proj()->_con;
 571   Block *succ = b->_succs[idx];


 663 
 664 // Make empty basic blocks to be "connector" blocks, Move uncommon blocks
 665 // to the end.
 666 void PhaseCFG::remove_empty_blocks() {
 667   // Move uncommon blocks to the end
 668   uint last = number_of_blocks();
 669   assert(get_block(0) == get_root_block(), "");
 670 
 671   for (uint i = 1; i < last; i++) {
 672     Block* block = get_block(i);
 673     if (block->is_connector()) {
 674       break;
 675     }
 676 
 677     // Check for NeverBranch at block end.  This needs to become a GOTO to the
 678     // true target.  NeverBranch are treated as a conditional branch that
 679     // always goes the same direction for most of the optimizer and are used
 680     // to give a fake exit path to infinite loops.  At this late stage they
 681     // need to turn into Goto's so that when you enter the infinite loop you
 682     // indeed hang.
 683     if (block->get_node(block->end_idx())->Opcode() == Op_NeverBranch) {
 684       convert_NeverBranch_to_Goto(block);
 685     }
 686 
 687     // Look for uncommon blocks and move to end.
 688     if (!C->do_freq_based_layout()) {
 689       if (is_uncommon(block)) {
 690         move_to_end(block, i);
 691         last--;                   // No longer check for being uncommon!
 692         if (no_flip_branch(block)) { // Fall-thru case must follow?
 693           // Find the fall-thru block
 694           block = get_block(i);
 695           move_to_end(block, i);
 696           last--;
 697         }
 698         // backup block counter post-increment
 699         i--;
 700       }
 701     }
 702   }
 703 


 707     Block* block = get_block(i);
 708     if (block->is_Empty() != Block::not_empty) {
 709       move_to_end(block, i);
 710       last--;
 711       i--;
 712     }
 713   } // End of for all blocks
 714 }
 715 
 716 Block *PhaseCFG::fixup_trap_based_check(Node *branch, Block *block, int block_pos, Block *bnext) {
 717   // Trap based checks must fall through to the successor with
 718   // PROB_ALWAYS.
 719   // They should be an If with 2 successors.
 720   assert(branch->is_MachIf(),   "must be If");
 721   assert(block->_num_succs == 2, "must have 2 successors");
 722 
 723   // Get the If node and the projection for the first successor.
 724   MachIfNode *iff   = block->get_node(block->number_of_nodes()-3)->as_MachIf();
 725   ProjNode   *proj0 = block->get_node(block->number_of_nodes()-2)->as_Proj();
 726   ProjNode   *proj1 = block->get_node(block->number_of_nodes()-1)->as_Proj();
 727   ProjNode   *projt = (proj0->Opcode() == Op_IfTrue)  ? proj0 : proj1;
 728   ProjNode   *projf = (proj0->Opcode() == Op_IfFalse) ? proj0 : proj1;
 729 
 730   // Assert that proj0 and succs[0] match up. Similarly for proj1 and succs[1].
 731   assert(proj0->raw_out(0) == block->_succs[0]->head(), "Mismatch successor 0");
 732   assert(proj1->raw_out(0) == block->_succs[1]->head(), "Mismatch successor 1");
 733 
 734   ProjNode *proj_always;
 735   ProjNode *proj_never;
 736   // We must negate the branch if the implicit check doesn't follow
 737   // the branch's TRUE path. Then, the new TRUE branch target will
 738   // be the old FALSE branch target.
 739   if (iff->_prob <= 2*PROB_NEVER) {   // There are small rounding errors.
 740     proj_never  = projt;
 741     proj_always = projf;
 742   } else {
 743     // We must negate the branch if the trap doesn't follow the
 744     // branch's TRUE path. Then, the new TRUE branch target will
 745     // be the old FALSE branch target.
 746     proj_never  = projf;
 747     proj_always = projt;
 748     iff->negate();


 853       // Assert that proj0 and succs[0] match up. Similarly for proj1 and succs[1].
 854       assert(proj0->raw_out(0) == block->_succs[0]->head(), "Mismatch successor 0");
 855       assert(proj1->raw_out(0) == block->_succs[1]->head(), "Mismatch successor 1");
 856 
 857       Block* bs1 = block->non_connector_successor(1);
 858 
 859       // Check for neither successor block following the current
 860       // block ending in a conditional. If so, move one of the
 861       // successors after the current one, provided that the
 862       // successor was previously unscheduled, but moveable
 863       // (i.e., all paths to it involve a branch).
 864       if (!C->do_freq_based_layout() && bnext != bs0 && bnext != bs1) {
 865         // Choose the more common successor based on the probability
 866         // of the conditional branch.
 867         Block* bx = bs0;
 868         Block* by = bs1;
 869 
 870         // _prob is the probability of taking the true path. Make
 871         // p the probability of taking successor #1.
 872         float p = iff->as_MachIf()->_prob;
 873         if (proj0->Opcode() == Op_IfTrue) {
 874           p = 1.0 - p;
 875         }
 876 
 877         // Prefer successor #1 if p > 0.5
 878         if (p > PROB_FAIR) {
 879           bx = bs1;
 880           by = bs0;
 881         }
 882 
 883         // Attempt the more common successor first
 884         if (move_to_next(bx, i)) {
 885           bnext = bx;
 886         } else if (move_to_next(by, i)) {
 887           bnext = by;
 888         }
 889       }
 890 
 891       // Check for conditional branching the wrong way.  Negate
 892       // conditional, if needed, so it falls into the following block
 893       // and branches to the not-following block.


 899         // Fall-thru case in succs[0], so flip targets in succs map
 900         Block* tbs0 = block->_succs[0];
 901         Block* tbs1 = block->_succs[1];
 902         block->_succs.map(0, tbs1);
 903         block->_succs.map(1, tbs0);
 904         // Flip projection for each target
 905         ProjNode* tmp = proj0;
 906         proj0 = proj1;
 907         proj1 = tmp;
 908 
 909       } else if(bnext != bs1) {
 910         // Need a double-branch
 911         // The existing conditional branch need not change.
 912         // Add a unconditional branch to the false target.
 913         // Alas, it must appear in its own block and adding a
 914         // block this late in the game is complicated.  Sigh.
 915         insert_goto_at(i, 1);
 916       }
 917 
 918       // Make sure we TRUE branch to the target
 919       if (proj0->Opcode() == Op_IfFalse) {
 920         iff->as_MachIf()->negate();
 921       }
 922 
 923       block->pop_node();          // Remove IfFalse & IfTrue projections
 924       block->pop_node();
 925 
 926     } else {
 927       // Multi-exit block, e.g. a switch statement
 928       // But we don't need to do anything here
 929     }
 930   } // End of for all blocks
 931 }
 932 
 933 
 934 // postalloc_expand: Expand nodes after register allocation.
 935 //
 936 // postalloc_expand has to be called after register allocation, just
 937 // before output (i.e. scheduling). It only gets called if
 938 // Matcher::require_postalloc_expand is true.
 939 //


1192 
1193 void PhaseCFG::dump_headers() {
1194   for (uint i = 0; i < number_of_blocks(); i++) {
1195     Block* block = get_block(i);
1196     if (block != NULL) {
1197       block->dump_head(this);
1198     }
1199   }
1200 }
1201 
1202 void PhaseCFG::verify() const {
1203 #ifdef ASSERT
1204   // Verify sane CFG
1205   for (uint i = 0; i < number_of_blocks(); i++) {
1206     Block* block = get_block(i);
1207     uint cnt = block->number_of_nodes();
1208     uint j;
1209     for (j = 0; j < cnt; j++)  {
1210       Node *n = block->get_node(j);
1211       assert(get_block_for_node(n) == block, "");
1212       if (j >= 1 && n->is_Mach() && n->as_Mach()->ideal_Opcode() == Op_CreateEx) {
1213         assert(j == 1 || block->get_node(j-1)->is_Phi(), "CreateEx must be first instruction in block");
1214       }
1215       for (uint k = 0; k < n->req(); k++) {
1216         Node *def = n->in(k);
1217         if (def && def != n) {
1218           assert(get_block_for_node(def) || def->is_Con(), "must have block; constants for debug info ok");
1219           // Verify that instructions in the block is in correct order.
1220           // Uses must follow their definition if they are at the same block.
1221           // Mostly done to check that MachSpillCopy nodes are placed correctly
1222           // when CreateEx node is moved in build_ifg_physical().
1223           if (get_block_for_node(def) == block && !(block->head()->is_Loop() && n->is_Phi()) &&
1224               // See (+++) comment in reg_split.cpp
1225               !(n->jvms() != NULL && n->jvms()->is_monitor_use(k))) {
1226             bool is_loop = false;
1227             if (n->is_Phi()) {
1228               for (uint l = 1; l < def->req(); l++) {
1229                 if (n == def->in(l)) {
1230                   is_loop = true;
1231                   break; // Some kind of loop
1232                 }
1233               }
1234             }
1235             assert(is_loop || block->find_node(def) < j, "uses must follow definitions");
1236           }
1237         }
1238       }
1239     }
1240 
1241     j = block->end_idx();
1242     Node* bp = (Node*)block->get_node(block->number_of_nodes() - 1)->is_block_proj();
1243     assert(bp, "last instruction must be a block proj");
1244     assert(bp == block->get_node(j), "wrong number of successors for this block");
1245     if (bp->is_Catch()) {
1246       while (block->get_node(--j)->is_MachProj()) {
1247         ;
1248       }
1249       assert(block->get_node(j)->is_MachCall(), "CatchProj must follow call");
1250     } else if (bp->is_Mach() && bp->as_Mach()->ideal_Opcode() == Op_If) {
1251       assert(block->_num_succs == 2, "Conditional branch must have two targets");
1252     }
1253   }
1254 #endif
1255 }
1256 #endif
1257 
1258 UnionFind::UnionFind( uint max ) : _cnt(max), _max(max), _indices(NEW_RESOURCE_ARRAY(uint,max)) {
1259   Copy::zero_to_bytes( _indices, sizeof(uint)*max );
1260 }
1261 
1262 void UnionFind::extend( uint from_idx, uint to_idx ) {
1263   _nesting.check();
1264   if( from_idx >= _max ) {
1265     uint size = 16;
1266     while( size <= from_idx ) size <<=1;
1267     _indices = REALLOC_RESOURCE_ARRAY( uint, _indices, _max, size );
1268     _max = size;
1269   }
1270   while( _cnt <= from_idx ) _indices[_cnt++] = 0;




 192 // executed infrequently.  Check to see if the block ends in a Halt or
 193 // a low probability call.
 194 bool Block::has_uncommon_code() const {
 195   Node* en = end();
 196 
 197   if (en->is_MachGoto())
 198     en = en->in(0);
 199   if (en->is_Catch())
 200     en = en->in(0);
 201   if (en->is_MachProj() && en->in(0)->is_MachCall()) {
 202     MachCallNode* call = en->in(0)->as_MachCall();
 203     if (call->cnt() != COUNT_UNKNOWN && call->cnt() <= PROB_UNLIKELY_MAG(4)) {
 204       // This is true for slow-path stubs like new_{instance,array},
 205       // slow_arraycopy, complete_monitor_locking, uncommon_trap.
 206       // The magic number corresponds to the probability of an uncommon_trap,
 207       // even though it is a count not a probability.
 208       return true;
 209     }
 210   }
 211 
 212   Opcodes op = en->is_Mach() ? en->as_Mach()->ideal_Opcode() : en->Opcode();
 213   return op == Opcodes::Op_Halt;
 214 }
 215 
 216 // True if block is low enough frequency or guarded by a test which
 217 // mostly does not go here.
 218 bool PhaseCFG::is_uncommon(const Block* block) {
 219   // Initial blocks must never be moved, so are never uncommon.
 220   if (block->head()->is_Root() || block->head()->is_Start())  return false;
 221 
 222   // Check for way-low freq
 223   if(block->_freq < BLOCK_FREQUENCY(0.00001f) ) return true;
 224 
 225   // Look for code shape indicating uncommon_trap or slow path
 226   if (block->has_uncommon_code()) return true;
 227 
 228   const float epsilon = 0.05f;
 229   const float guard_factor = PROB_UNLIKELY_MAG(4) / (1.f - epsilon);
 230   uint uncommon_preds = 0;
 231   uint freq_preds = 0;
 232   uint uncommon_for_freq_preds = 0;
 233 


 530   block->_freq = freq;
 531   // add new basic block to basic block list
 532   add_block_at(block_no + 1, block);
 533 }
 534 
 535 // Does this block end in a multiway branch that cannot have the default case
 536 // flipped for another case?
 537 static bool no_flip_branch(Block *b) {
 538   int branch_idx = b->number_of_nodes() - b->_num_succs-1;
 539   if (branch_idx < 1) {
 540     return false;
 541   }
 542   Node *branch = b->get_node(branch_idx);
 543   if (branch->is_Catch()) {
 544     return true;
 545   }
 546   if (branch->is_Mach()) {
 547     if (branch->is_MachNullCheck()) {
 548       return true;
 549     }
 550     Opcodes iop = branch->as_Mach()->ideal_Opcode();
 551     if (iop == Opcodes::Op_FastLock || iop == Opcodes::Op_FastUnlock) {
 552       return true;
 553     }
 554     // Don't flip if branch has an implicit check.
 555     if (branch->as_Mach()->is_TrapBasedCheckNode()) {
 556       return true;
 557     }
 558   }
 559   return false;
 560 }
 561 
 562 // Check for NeverBranch at block end.  This needs to become a GOTO to the
 563 // true target.  NeverBranch are treated as a conditional branch that always
 564 // goes the same direction for most of the optimizer and are used to give a
 565 // fake exit path to infinite loops.  At this late stage they need to turn
 566 // into Goto's so that when you enter the infinite loop you indeed hang.
 567 void PhaseCFG::convert_NeverBranch_to_Goto(Block *b) {
 568   // Find true target
 569   int end_idx = b->end_idx();
 570   int idx = b->get_node(end_idx+1)->as_Proj()->_con;
 571   Block *succ = b->_succs[idx];


 663 
 664 // Make empty basic blocks to be "connector" blocks, Move uncommon blocks
 665 // to the end.
 666 void PhaseCFG::remove_empty_blocks() {
 667   // Move uncommon blocks to the end
 668   uint last = number_of_blocks();
 669   assert(get_block(0) == get_root_block(), "");
 670 
 671   for (uint i = 1; i < last; i++) {
 672     Block* block = get_block(i);
 673     if (block->is_connector()) {
 674       break;
 675     }
 676 
 677     // Check for NeverBranch at block end.  This needs to become a GOTO to the
 678     // true target.  NeverBranch are treated as a conditional branch that
 679     // always goes the same direction for most of the optimizer and are used
 680     // to give a fake exit path to infinite loops.  At this late stage they
 681     // need to turn into Goto's so that when you enter the infinite loop you
 682     // indeed hang.
 683     if (block->get_node(block->end_idx())->Opcode() == Opcodes::Op_NeverBranch) {
 684       convert_NeverBranch_to_Goto(block);
 685     }
 686 
 687     // Look for uncommon blocks and move to end.
 688     if (!C->do_freq_based_layout()) {
 689       if (is_uncommon(block)) {
 690         move_to_end(block, i);
 691         last--;                   // No longer check for being uncommon!
 692         if (no_flip_branch(block)) { // Fall-thru case must follow?
 693           // Find the fall-thru block
 694           block = get_block(i);
 695           move_to_end(block, i);
 696           last--;
 697         }
 698         // backup block counter post-increment
 699         i--;
 700       }
 701     }
 702   }
 703 


 707     Block* block = get_block(i);
 708     if (block->is_Empty() != Block::not_empty) {
 709       move_to_end(block, i);
 710       last--;
 711       i--;
 712     }
 713   } // End of for all blocks
 714 }
 715 
 716 Block *PhaseCFG::fixup_trap_based_check(Node *branch, Block *block, int block_pos, Block *bnext) {
 717   // Trap based checks must fall through to the successor with
 718   // PROB_ALWAYS.
 719   // They should be an If with 2 successors.
 720   assert(branch->is_MachIf(),   "must be If");
 721   assert(block->_num_succs == 2, "must have 2 successors");
 722 
 723   // Get the If node and the projection for the first successor.
 724   MachIfNode *iff   = block->get_node(block->number_of_nodes()-3)->as_MachIf();
 725   ProjNode   *proj0 = block->get_node(block->number_of_nodes()-2)->as_Proj();
 726   ProjNode   *proj1 = block->get_node(block->number_of_nodes()-1)->as_Proj();
 727   ProjNode   *projt = (proj0->Opcode() == Opcodes::Op_IfTrue)  ? proj0 : proj1;
 728   ProjNode   *projf = (proj0->Opcode() == Opcodes::Op_IfFalse) ? proj0 : proj1;
 729 
 730   // Assert that proj0 and succs[0] match up. Similarly for proj1 and succs[1].
 731   assert(proj0->raw_out(0) == block->_succs[0]->head(), "Mismatch successor 0");
 732   assert(proj1->raw_out(0) == block->_succs[1]->head(), "Mismatch successor 1");
 733 
 734   ProjNode *proj_always;
 735   ProjNode *proj_never;
 736   // We must negate the branch if the implicit check doesn't follow
 737   // the branch's TRUE path. Then, the new TRUE branch target will
 738   // be the old FALSE branch target.
 739   if (iff->_prob <= 2*PROB_NEVER) {   // There are small rounding errors.
 740     proj_never  = projt;
 741     proj_always = projf;
 742   } else {
 743     // We must negate the branch if the trap doesn't follow the
 744     // branch's TRUE path. Then, the new TRUE branch target will
 745     // be the old FALSE branch target.
 746     proj_never  = projf;
 747     proj_always = projt;
 748     iff->negate();


 853       // Assert that proj0 and succs[0] match up. Similarly for proj1 and succs[1].
 854       assert(proj0->raw_out(0) == block->_succs[0]->head(), "Mismatch successor 0");
 855       assert(proj1->raw_out(0) == block->_succs[1]->head(), "Mismatch successor 1");
 856 
 857       Block* bs1 = block->non_connector_successor(1);
 858 
 859       // Check for neither successor block following the current
 860       // block ending in a conditional. If so, move one of the
 861       // successors after the current one, provided that the
 862       // successor was previously unscheduled, but moveable
 863       // (i.e., all paths to it involve a branch).
 864       if (!C->do_freq_based_layout() && bnext != bs0 && bnext != bs1) {
 865         // Choose the more common successor based on the probability
 866         // of the conditional branch.
 867         Block* bx = bs0;
 868         Block* by = bs1;
 869 
 870         // _prob is the probability of taking the true path. Make
 871         // p the probability of taking successor #1.
 872         float p = iff->as_MachIf()->_prob;
 873         if (proj0->Opcode() == Opcodes::Op_IfTrue) {
 874           p = 1.0 - p;
 875         }
 876 
 877         // Prefer successor #1 if p > 0.5
 878         if (p > PROB_FAIR) {
 879           bx = bs1;
 880           by = bs0;
 881         }
 882 
 883         // Attempt the more common successor first
 884         if (move_to_next(bx, i)) {
 885           bnext = bx;
 886         } else if (move_to_next(by, i)) {
 887           bnext = by;
 888         }
 889       }
 890 
 891       // Check for conditional branching the wrong way.  Negate
 892       // conditional, if needed, so it falls into the following block
 893       // and branches to the not-following block.


 899         // Fall-thru case in succs[0], so flip targets in succs map
 900         Block* tbs0 = block->_succs[0];
 901         Block* tbs1 = block->_succs[1];
 902         block->_succs.map(0, tbs1);
 903         block->_succs.map(1, tbs0);
 904         // Flip projection for each target
 905         ProjNode* tmp = proj0;
 906         proj0 = proj1;
 907         proj1 = tmp;
 908 
 909       } else if(bnext != bs1) {
 910         // Need a double-branch
 911         // The existing conditional branch need not change.
 912         // Add a unconditional branch to the false target.
 913         // Alas, it must appear in its own block and adding a
 914         // block this late in the game is complicated.  Sigh.
 915         insert_goto_at(i, 1);
 916       }
 917 
 918       // Make sure we TRUE branch to the target
 919       if (proj0->Opcode() == Opcodes::Op_IfFalse) {
 920         iff->as_MachIf()->negate();
 921       }
 922 
 923       block->pop_node();          // Remove IfFalse & IfTrue projections
 924       block->pop_node();
 925 
 926     } else {
 927       // Multi-exit block, e.g. a switch statement
 928       // But we don't need to do anything here
 929     }
 930   } // End of for all blocks
 931 }
 932 
 933 
 934 // postalloc_expand: Expand nodes after register allocation.
 935 //
 936 // postalloc_expand has to be called after register allocation, just
 937 // before output (i.e. scheduling). It only gets called if
 938 // Matcher::require_postalloc_expand is true.
 939 //


1192 
1193 void PhaseCFG::dump_headers() {
1194   for (uint i = 0; i < number_of_blocks(); i++) {
1195     Block* block = get_block(i);
1196     if (block != NULL) {
1197       block->dump_head(this);
1198     }
1199   }
1200 }
1201 
1202 void PhaseCFG::verify() const {
1203 #ifdef ASSERT
1204   // Verify sane CFG
1205   for (uint i = 0; i < number_of_blocks(); i++) {
1206     Block* block = get_block(i);
1207     uint cnt = block->number_of_nodes();
1208     uint j;
1209     for (j = 0; j < cnt; j++)  {
1210       Node *n = block->get_node(j);
1211       assert(get_block_for_node(n) == block, "");
1212       if (j >= 1 && n->is_Mach() && n->as_Mach()->ideal_Opcode() == Opcodes::Op_CreateEx) {
1213         assert(j == 1 || block->get_node(j-1)->is_Phi(), "CreateEx must be first instruction in block");
1214       }
1215       for (uint k = 0; k < n->req(); k++) {
1216         Node *def = n->in(k);
1217         if (def && def != n) {
1218           assert(get_block_for_node(def) || def->is_Con(), "must have block; constants for debug info ok");
1219           // Verify that instructions in the block is in correct order.
1220           // Uses must follow their definition if they are at the same block.
1221           // Mostly done to check that MachSpillCopy nodes are placed correctly
1222           // when CreateEx node is moved in build_ifg_physical().
1223           if (get_block_for_node(def) == block && !(block->head()->is_Loop() && n->is_Phi()) &&
1224               // See (+++) comment in reg_split.cpp
1225               !(n->jvms() != NULL && n->jvms()->is_monitor_use(k))) {
1226             bool is_loop = false;
1227             if (n->is_Phi()) {
1228               for (uint l = 1; l < def->req(); l++) {
1229                 if (n == def->in(l)) {
1230                   is_loop = true;
1231                   break; // Some kind of loop
1232                 }
1233               }
1234             }
1235             assert(is_loop || block->find_node(def) < j, "uses must follow definitions");
1236           }
1237         }
1238       }
1239     }
1240 
1241     j = block->end_idx();
1242     Node* bp = (Node*)block->get_node(block->number_of_nodes() - 1)->is_block_proj();
1243     assert(bp, "last instruction must be a block proj");
1244     assert(bp == block->get_node(j), "wrong number of successors for this block");
1245     if (bp->is_Catch()) {
1246       while (block->get_node(--j)->is_MachProj()) {
1247         ;
1248       }
1249       assert(block->get_node(j)->is_MachCall(), "CatchProj must follow call");
1250     } else if (bp->is_Mach() && bp->as_Mach()->ideal_Opcode() == Opcodes::Op_If) {
1251       assert(block->_num_succs == 2, "Conditional branch must have two targets");
1252     }
1253   }
1254 #endif
1255 }
1256 #endif
1257 
1258 UnionFind::UnionFind( uint max ) : _cnt(max), _max(max), _indices(NEW_RESOURCE_ARRAY(uint,max)) {
1259   Copy::zero_to_bytes( _indices, sizeof(uint)*max );
1260 }
1261 
1262 void UnionFind::extend( uint from_idx, uint to_idx ) {
1263   _nesting.check();
1264   if( from_idx >= _max ) {
1265     uint size = 16;
1266     while( size <= from_idx ) size <<=1;
1267     _indices = REALLOC_RESOURCE_ARRAY( uint, _indices, _max, size );
1268     _max = size;
1269   }
1270   while( _cnt <= from_idx ) _indices[_cnt++] = 0;


< prev index next >