< prev index next >

src/share/vm/opto/block.cpp

Print this page




 757   }
 758 
 759   // Place the fall through block after this block.
 760   Block *bs1 = block->non_connector_successor(1);
 761   if (bs1 != bnext && move_to_next(bs1, block_pos)) {
 762     bnext = bs1;
 763   }
 764   // If the fall through block still is not the next block, insert a goto.
 765   if (bs1 != bnext) {
 766     insert_goto_at(block_pos, 1);
 767   }
 768   return bnext;
 769 }
 770 
 771 // Fix up the final control flow for basic blocks.
 772 void PhaseCFG::fixup_flow() {
 773   // Fixup final control flow for the blocks.  Remove jump-to-next
 774   // block. If neither arm of an IF follows the conditional branch, we
 775   // have to add a second jump after the conditional.  We place the
 776   // TRUE branch target in succs[0] for both GOTOs and IFs.

 777   for (uint i = 0; i < number_of_blocks(); i++) {
 778     Block* block = get_block(i);
 779     block->_pre_order = i;          // turn pre-order into block-index
 780 








 781     // Connector blocks need no further processing.
 782     if (block->is_connector()) {
 783       assert((i+1) == number_of_blocks() || get_block(i + 1)->is_connector(), "All connector blocks should sink to the end");
 784       continue;
 785     }
 786     assert(block->is_Empty() != Block::completely_empty, "Empty blocks should be connectors");
 787 
 788     Block* bnext = (i < number_of_blocks() - 1) ? get_block(i + 1) : NULL;
 789     Block* bs0 = block->non_connector_successor(0);
 790 
 791     // Check for multi-way branches where I cannot negate the test to
 792     // exchange the true and false targets.
 793     if (no_flip_branch(block)) {
 794       // Find fall through case - if must fall into its target.
 795       // Get the index of the branch's first successor.
 796       int branch_idx = block->number_of_nodes() - block->_num_succs;
 797 
 798       // The branch is 1 before the branch's first successor.
 799       Node *branch = block->get_node(branch_idx-1);
 800 


 910         // The existing conditional branch need not change.
 911         // Add a unconditional branch to the false target.
 912         // Alas, it must appear in its own block and adding a
 913         // block this late in the game is complicated.  Sigh.
 914         insert_goto_at(i, 1);
 915       }
 916 
 917       // Make sure we TRUE branch to the target
 918       if (proj0->Opcode() == Op_IfFalse) {
 919         iff->as_MachIf()->negate();
 920       }
 921 
 922       block->pop_node();          // Remove IfFalse & IfTrue projections
 923       block->pop_node();
 924 
 925     } else {
 926       // Multi-exit block, e.g. a switch statement
 927       // But we don't need to do anything here
 928     }
 929   } // End of for all blocks












































 930 }
 931 
 932 
 933 // postalloc_expand: Expand nodes after register allocation.
 934 //
 935 // postalloc_expand has to be called after register allocation, just
 936 // before output (i.e. scheduling). It only gets called if
 937 // Matcher::require_postalloc_expand is true.
 938 //
 939 // Background:
 940 //
 941 // Nodes that are expandend (one compound node requiring several
 942 // assembler instructions to be implemented split into two or more
 943 // non-compound nodes) after register allocation are not as nice as
 944 // the ones expanded before register allocation - they don't
 945 // participate in optimizations as global code motion. But after
 946 // register allocation we can expand nodes that use registers which
 947 // are not spillable or registers that are not allocated, because the
 948 // old compound node is simply replaced (in its location in the basic
 949 // block) by a new subgraph which does not contain compound nodes any




 757   }
 758 
 759   // Place the fall through block after this block.
 760   Block *bs1 = block->non_connector_successor(1);
 761   if (bs1 != bnext && move_to_next(bs1, block_pos)) {
 762     bnext = bs1;
 763   }
 764   // If the fall through block still is not the next block, insert a goto.
 765   if (bs1 != bnext) {
 766     insert_goto_at(block_pos, 1);
 767   }
 768   return bnext;
 769 }
 770 
 771 // Fix up the final control flow for basic blocks.
 772 void PhaseCFG::fixup_flow() {
 773   // Fixup final control flow for the blocks.  Remove jump-to-next
 774   // block. If neither arm of an IF follows the conditional branch, we
 775   // have to add a second jump after the conditional.  We place the
 776   // TRUE branch target in succs[0] for both GOTOs and IFs.
 777   bool found_fixup_loops = false;
 778   for (uint i = 0; i < number_of_blocks(); i++) {
 779     Block* block = get_block(i);
 780     block->_pre_order = i;          // turn pre-order into block-index
 781 
 782     Node *bh = block->head();
 783     if (bh->is_Loop()) {
 784       LoopNode *loop = bh->as_Loop();
 785       if (loop->is_inner_loop() && loop->is_multiversioned() && loop->is_vectorized_loop() && !loop->range_checks_present()) {
 786         found_fixup_loops = true;
 787       }
 788     }
 789 
 790     // Connector blocks need no further processing.
 791     if (block->is_connector()) {
 792       assert((i+1) == number_of_blocks() || get_block(i + 1)->is_connector(), "All connector blocks should sink to the end");
 793       continue;
 794     }
 795     assert(block->is_Empty() != Block::completely_empty, "Empty blocks should be connectors");
 796 
 797     Block* bnext = (i < number_of_blocks() - 1) ? get_block(i + 1) : NULL;
 798     Block* bs0 = block->non_connector_successor(0);
 799 
 800     // Check for multi-way branches where I cannot negate the test to
 801     // exchange the true and false targets.
 802     if (no_flip_branch(block)) {
 803       // Find fall through case - if must fall into its target.
 804       // Get the index of the branch's first successor.
 805       int branch_idx = block->number_of_nodes() - block->_num_succs;
 806 
 807       // The branch is 1 before the branch's first successor.
 808       Node *branch = block->get_node(branch_idx-1);
 809 


 919         // The existing conditional branch need not change.
 920         // Add a unconditional branch to the false target.
 921         // Alas, it must appear in its own block and adding a
 922         // block this late in the game is complicated.  Sigh.
 923         insert_goto_at(i, 1);
 924       }
 925 
 926       // Make sure we TRUE branch to the target
 927       if (proj0->Opcode() == Op_IfFalse) {
 928         iff->as_MachIf()->negate();
 929       }
 930 
 931       block->pop_node();          // Remove IfFalse & IfTrue projections
 932       block->pop_node();
 933 
 934     } else {
 935       // Multi-exit block, e.g. a switch statement
 936       // But we don't need to do anything here
 937     }
 938   } // End of for all blocks
 939 
 940   if (found_fixup_loops) {
 941     // find all fixup-loops and process them
 942     for (uint i = 0; i < number_of_blocks(); i++) {
 943       Block* block = get_block(i);
 944       Node *bh = block->head();
 945       if (bh->is_Loop()) {
 946         LoopNode *loop = bh->as_Loop();
 947         // fixup loops are only marked for processing when they are predicated and
 948         // vectorized else they are just post loops.
 949         if (Matcher::has_predicated_vectors()) {
 950           if (loop->is_inner_loop() && loop->is_multiversioned() && loop->is_vectorized_loop() && !loop->range_checks_present()) {
 951             CFGLoop *cur_loop = block->_loop;
 952             // fixup loops can have multiple exits, so we need to find the backedge
 953             Block *back_edge = cur_loop->backedge_block();
 954             if (back_edge) {
 955               // fetch the region of the back edge
 956               Node *backedge_region = back_edge->get_node(0);
 957               Block *idom = back_edge->_idom;
 958               if (backedge_region->is_Region()) {
 959                 Node *if_true = backedge_region->in(1);
 960                 if (if_true->Opcode() == Op_IfTrue) {
 961                   Node *backedge_iff = if_true->in(0);
 962                   if (backedge_iff->is_MachIf() && idom) {
 963                     for (uint j = 0; j < idom->number_of_nodes(); j++) {
 964                       Node *n = idom->get_node(j);
 965                       if (n == backedge_iff) {
 966                         MachMskNode *mask = new MachMskNode(true);
 967                         if (mask) {
 968                           idom->insert_node(mask, j);
 969                           map_node_to_block(mask, idom);
 970                           break;
 971                         }
 972                       }
 973                     }
 974                   }
 975                 }
 976               }
 977             }
 978           }
 979         }
 980       }
 981     }
 982   }
 983 }
 984 
 985 
 986 // postalloc_expand: Expand nodes after register allocation.
 987 //
 988 // postalloc_expand has to be called after register allocation, just
 989 // before output (i.e. scheduling). It only gets called if
 990 // Matcher::require_postalloc_expand is true.
 991 //
 992 // Background:
 993 //
 994 // Nodes that are expandend (one compound node requiring several
 995 // assembler instructions to be implemented split into two or more
 996 // non-compound nodes) after register allocation are not as nice as
 997 // the ones expanded before register allocation - they don't
 998 // participate in optimizations as global code motion. But after
 999 // register allocation we can expand nodes that use registers which
1000 // are not spillable or registers that are not allocated, because the
1001 // old compound node is simply replaced (in its location in the basic
1002 // block) by a new subgraph which does not contain compound nodes any


< prev index next >