< prev index next >

src/share/vm/opto/block.cpp

Print this page
rev 10582 : 8154135: Loop alignment may be added inside the loop body
Summary: loop alignment code may add alignment constraint to top of loop and loop head
Reviewed-by:


  55   assert(i < _cnt, "index out of bounds");
  56   Copy::conjoint_words_to_lower((HeapWord*)&_blocks[i+1], (HeapWord*)&_blocks[i], ((_cnt-i-1)*sizeof(Block*)));
  57   pop(); // shrink list by one block
  58 }
  59 
  60 void Block_List::insert(uint i, Block *b) {
  61   push(b); // grow list by one block
  62   Copy::conjoint_words_to_higher((HeapWord*)&_blocks[i], (HeapWord*)&_blocks[i+1], ((_cnt-i-1)*sizeof(Block*)));
  63   _blocks[i] = b;
  64 }
  65 
  66 #ifndef PRODUCT
  67 void Block_List::print() {
  68   for (uint i=0; i < size(); i++) {
  69     tty->print("B%d ", _blocks[i]->_pre_order);
  70   }
  71   tty->print("size = %d\n", size());
  72 }
  73 #endif
  74 
  75 uint Block::code_alignment() {
  76   // Check for Root block
  77   if (_pre_order == 0) return CodeEntryAlignment;
  78   // Check for Start block
  79   if (_pre_order == 1) return InteriorEntryAlignment;
  80   // Check for loop alignment
  81   if (has_loop_alignment()) return loop_alignment();
  82 
  83   return relocInfo::addr_unit(); // no particular alignment
  84 }
  85 
  86 uint Block::compute_loop_alignment() {
  87   Node *h = head();
  88   int unit_sz = relocInfo::addr_unit();
  89   if (h->is_Loop() && h->as_Loop()->is_inner_loop())  {
  90     // Pre- and post-loops have low trip count so do not bother with
  91     // NOPs for align loop head.  The constants are hidden from tuning
  92     // but only because my "divide by 4" heuristic surely gets nearly
  93     // all possible gain (a "do not align at all" heuristic has a
  94     // chance of getting a really tiny gain).
  95     if (h->is_CountedLoop() && (h->as_CountedLoop()->is_pre_loop() ||


1709         break_loop_after(b);
1710       }
1711     }
1712 
1713     // Backbranch to the top of a trace
1714     // Scroll forward through the trace from the targ_block. If we find
1715     // a loop head before another loop top, use the the loop head alignment.
1716     for (Block *b = targ_block; b != NULL; b = next(b)) {
1717       if (b->has_loop_alignment()) {
1718         break;
1719       }
1720       if (b->head()->is_Loop()) {
1721         targ_block = b;
1722         break;
1723       }
1724     }
1725 
1726     first_block()->set_loop_alignment(targ_block);
1727 
1728   } else {













1729     // Backbranch into the middle of a trace
1730     targ_block->set_loop_alignment(targ_block);

1731   }
1732 
1733   return loop_rotated;
1734 }
1735 
1736 // push blocks onto the CFG list
1737 // ensure that blocks have the correct two-way branch sense
1738 void Trace::fixup_blocks(PhaseCFG &cfg) {
1739   Block *last = last_block();
1740   for (Block *b = first_block(); b != NULL; b = next(b)) {
1741     cfg.add_block(b);
1742     if (!b->is_connector()) {
1743       int nfallthru = b->num_fall_throughs();
1744       if (b != last) {
1745         if (nfallthru == 2) {
1746           // Ensure that the sense of the branch is correct
1747           Block *bnext = next(b);
1748           Block *bs0 = b->non_connector_successor(0);
1749 
1750           MachNode *iff = b->get_node(b->number_of_nodes() - 3)->as_Mach();




  55   assert(i < _cnt, "index out of bounds");
  56   Copy::conjoint_words_to_lower((HeapWord*)&_blocks[i+1], (HeapWord*)&_blocks[i], ((_cnt-i-1)*sizeof(Block*)));
  57   pop(); // shrink list by one block
  58 }
  59 
  60 void Block_List::insert(uint i, Block *b) {
  61   push(b); // grow list by one block
  62   Copy::conjoint_words_to_higher((HeapWord*)&_blocks[i], (HeapWord*)&_blocks[i+1], ((_cnt-i-1)*sizeof(Block*)));
  63   _blocks[i] = b;
  64 }
  65 
  66 #ifndef PRODUCT
  67 void Block_List::print() {
  68   for (uint i=0; i < size(); i++) {
  69     tty->print("B%d ", _blocks[i]->_pre_order);
  70   }
  71   tty->print("size = %d\n", size());
  72 }
  73 #endif
  74 
  75 uint Block::code_alignment() const {
  76   // Check for Root block
  77   if (_pre_order == 0) return CodeEntryAlignment;
  78   // Check for Start block
  79   if (_pre_order == 1) return InteriorEntryAlignment;
  80   // Check for loop alignment
  81   if (has_loop_alignment()) return loop_alignment();
  82 
  83   return relocInfo::addr_unit(); // no particular alignment
  84 }
  85 
  86 uint Block::compute_loop_alignment() {
  87   Node *h = head();
  88   int unit_sz = relocInfo::addr_unit();
  89   if (h->is_Loop() && h->as_Loop()->is_inner_loop())  {
  90     // Pre- and post-loops have low trip count so do not bother with
  91     // NOPs for align loop head.  The constants are hidden from tuning
  92     // but only because my "divide by 4" heuristic surely gets nearly
  93     // all possible gain (a "do not align at all" heuristic has a
  94     // chance of getting a really tiny gain).
  95     if (h->is_CountedLoop() && (h->as_CountedLoop()->is_pre_loop() ||


1709         break_loop_after(b);
1710       }
1711     }
1712 
1713     // Backbranch to the top of a trace
1714     // Scroll forward through the trace from the targ_block. If we find
1715     // a loop head before another loop top, use the the loop head alignment.
1716     for (Block *b = targ_block; b != NULL; b = next(b)) {
1717       if (b->has_loop_alignment()) {
1718         break;
1719       }
1720       if (b->head()->is_Loop()) {
1721         targ_block = b;
1722         break;
1723       }
1724     }
1725 
1726     first_block()->set_loop_alignment(targ_block);
1727 
1728   } else {
1729     // That loop may already have a loop top (we're reaching it again
1730     // through the backedge of an outer loop)
1731     bool has_top = false;
1732     for (Block* b = prev(targ_block); b != NULL; b = prev(b)) {
1733       if (b->head()->is_Loop()) {
1734         break;
1735       }
1736       if (b->has_loop_alignment()) {
1737         has_top = true;
1738         break;
1739       }
1740     }
1741     if (!has_top) {
1742       // Backbranch into the middle of a trace
1743       targ_block->set_loop_alignment(targ_block);
1744     }
1745   }
1746 
1747   return loop_rotated;
1748 }
1749 
1750 // push blocks onto the CFG list
1751 // ensure that blocks have the correct two-way branch sense
1752 void Trace::fixup_blocks(PhaseCFG &cfg) {
1753   Block *last = last_block();
1754   for (Block *b = first_block(); b != NULL; b = next(b)) {
1755     cfg.add_block(b);
1756     if (!b->is_connector()) {
1757       int nfallthru = b->num_fall_throughs();
1758       if (b != last) {
1759         if (nfallthru == 2) {
1760           // Ensure that the sense of the branch is correct
1761           Block *bnext = next(b);
1762           Block *bs0 = b->non_connector_successor(0);
1763 
1764           MachNode *iff = b->get_node(b->number_of_nodes() - 3)->as_Mach();


< prev index next >