2739 IdealLoopTree *l = innermost;
2740 while( p && l->_head == n ) {
2741 l->_next = p->_child; // Put self on parents 'next child'
2742 p->_child = l; // Make self as first child of parent
2743 l = p; // Now walk up the parent chain
2744 p = l->_parent;
2745 }
2746 } else {
2747 // Note that it is possible for a LoopNode to reach here, if the
2748 // backedge has been made unreachable (hence the LoopNode no longer
2749 // denotes a Loop, and will eventually be removed).
2750
2751 // Record tightest enclosing loop for self. Mark as post-visited.
2752 set_loop(n, innermost);
2753 // Also record has_call flag early on
2754 if( innermost ) {
2755 if( n->is_Call() && !n->is_CallLeaf() && !n->is_macro() ) {
2756 // Do not count uncommon calls
2757 if( !n->is_CallStaticJava() || !n->as_CallStaticJava()->_name ) {
2758 Node *iff = n->in(0)->in(0);
2759 if( !iff->is_If() ||
2760 (n->in(0)->Opcode() == Op_IfFalse &&
2761 (1.0 - iff->as_If()->_prob) >= 0.01) ||
2762 (iff->as_If()->_prob >= 0.01) )
2763 innermost->_has_call = 1;
2764 }
2765 } else if( n->is_Allocate() && n->as_Allocate()->_is_scalar_replaceable ) {
2766 // Disable loop optimizations if the loop has a scalar replaceable
2767 // allocation. This disabling may cause a potential performance lost
2768 // if the allocation is not eliminated for some reason.
2769 innermost->_allow_optimizations = false;
2770 innermost->_has_call = 1; // = true
2771 }
2772 }
2773 }
2774
2775 // Flag as post-visited now
2776 set_postvisited(n);
2777 return pre_order;
2778 }
2779
|
2739 IdealLoopTree *l = innermost;
2740 while( p && l->_head == n ) {
2741 l->_next = p->_child; // Put self on parents 'next child'
2742 p->_child = l; // Make self as first child of parent
2743 l = p; // Now walk up the parent chain
2744 p = l->_parent;
2745 }
2746 } else {
2747 // Note that it is possible for a LoopNode to reach here, if the
2748 // backedge has been made unreachable (hence the LoopNode no longer
2749 // denotes a Loop, and will eventually be removed).
2750
2751 // Record tightest enclosing loop for self. Mark as post-visited.
2752 set_loop(n, innermost);
2753 // Also record has_call flag early on
2754 if( innermost ) {
2755 if( n->is_Call() && !n->is_CallLeaf() && !n->is_macro() ) {
2756 // Do not count uncommon calls
2757 if( !n->is_CallStaticJava() || !n->as_CallStaticJava()->_name ) {
2758 Node *iff = n->in(0)->in(0);
2759 // No any calls for vectorized loops.
2760 if( UseSuperWord || !iff->is_If() ||
2761 (n->in(0)->Opcode() == Op_IfFalse &&
2762 (1.0 - iff->as_If()->_prob) >= 0.01) ||
2763 (iff->as_If()->_prob >= 0.01) )
2764 innermost->_has_call = 1;
2765 }
2766 } else if( n->is_Allocate() && n->as_Allocate()->_is_scalar_replaceable ) {
2767 // Disable loop optimizations if the loop has a scalar replaceable
2768 // allocation. This disabling may cause a potential performance lost
2769 // if the allocation is not eliminated for some reason.
2770 innermost->_allow_optimizations = false;
2771 innermost->_has_call = 1; // = true
2772 }
2773 }
2774 }
2775
2776 // Flag as post-visited now
2777 set_postvisited(n);
2778 return pre_order;
2779 }
2780
|