< prev index next >

src/hotspot/share/opto/macro.cpp

Print this page




2648       switch (n->class_id()) {
2649       case Node::Class_Allocate:
2650       case Node::Class_AllocateArray:
2651         success = eliminate_allocate_node(n->as_Allocate());
2652         break;
2653       case Node::Class_CallStaticJava:
2654         success = eliminate_boxing_node(n->as_CallStaticJava());
2655         break;
2656       case Node::Class_Lock:
2657       case Node::Class_Unlock:
2658         assert(!n->as_AbstractLock()->is_eliminated(), "sanity");
2659         _has_locks = true;
2660         break;
2661       case Node::Class_ArrayCopy:
2662         break;
2663       default:
2664         assert(n->Opcode() == Op_LoopLimit ||
2665                n->Opcode() == Op_Opaque1   ||
2666                n->Opcode() == Op_Opaque2   ||
2667                n->Opcode() == Op_Opaque3   ||
2668                n->Opcode() == Op_Opaque4, "unknown node type in macro list");

2669       }
2670       assert(success == (C->macro_count() < old_macro_count), "elimination reduces macro count");
2671       progress = progress || success;
2672     }
2673   }
2674 }
2675 
2676 //------------------------------expand_macro_nodes----------------------
2677 //  Returns true if a failure occurred.
2678 bool PhaseMacroExpand::expand_macro_nodes() {
2679   // Last attempt to eliminate macro nodes.
2680   eliminate_macro_nodes();
2681 
2682   // Make sure expansion will not cause node limit to be exceeded.
2683   // Worst case is a macro node gets expanded into about 200 nodes.
2684   // Allow 50% more for optimization.
2685   if (C->check_node_count(C->macro_count() * 300, "out of nodes before macro expansion" ) )
2686     return true;
2687 
2688   // Eliminate Opaque and LoopLimit nodes. Do it after all loop optimizations.


2715         // Validate graph.
2716         assert((cmp->outcnt() == 1) && cmp->unique_out()->is_Bool(), "");
2717         BoolNode* bol = cmp->unique_out()->as_Bool();
2718         assert((bol->outcnt() == 1) && bol->unique_out()->is_If() &&
2719                (bol->_test._test == BoolTest::ne), "");
2720         IfNode* ifn = bol->unique_out()->as_If();
2721         assert((ifn->outcnt() == 2) &&
2722                ifn->proj_out(1)->is_uncommon_trap_proj(Deoptimization::Reason_rtm_state_change) != NULL, "");
2723 #endif
2724         Node* repl = n->in(1);
2725         if (!_has_locks) {
2726           // Remove RTM state check if there are no locks in the code.
2727           // Replace input to compare the same value.
2728           repl = (cmp->in(1) == n) ? cmp->in(2) : cmp->in(1);
2729         }
2730         _igvn.replace_node(n, repl);
2731         success = true;
2732 #endif
2733       } else if (n->Opcode() == Op_Opaque4) {
2734         _igvn.replace_node(n, n->in(2));





2735         success = true;
2736       }
2737       assert(success == (C->macro_count() < old_macro_count), "elimination reduces macro count");
2738       progress = progress || success;
2739     }
2740   }
2741 
2742   // expand arraycopy "macro" nodes first
2743   // For ReduceBulkZeroing, we must first process all arraycopy nodes
2744   // before the allocate nodes are expanded.
2745   int macro_idx = C->macro_count() - 1;
2746   while (macro_idx >= 0) {
2747     Node * n = C->macro_node(macro_idx);
2748     assert(n->is_macro(), "only macro nodes expected here");
2749     if (_igvn.type(n) == Type::TOP || n->in(0)->is_top() ) {
2750       // node is unreachable, so don't try to expand it
2751       C->remove_macro_node(n);
2752     } else if (n->is_ArrayCopy()){
2753       int macro_count = C->macro_count();
2754       expand_arraycopy_node(n->as_ArrayCopy());




2648       switch (n->class_id()) {
2649       case Node::Class_Allocate:
2650       case Node::Class_AllocateArray:
2651         success = eliminate_allocate_node(n->as_Allocate());
2652         break;
2653       case Node::Class_CallStaticJava:
2654         success = eliminate_boxing_node(n->as_CallStaticJava());
2655         break;
2656       case Node::Class_Lock:
2657       case Node::Class_Unlock:
2658         assert(!n->as_AbstractLock()->is_eliminated(), "sanity");
2659         _has_locks = true;
2660         break;
2661       case Node::Class_ArrayCopy:
2662         break;
2663       default:
2664         assert(n->Opcode() == Op_LoopLimit ||
2665                n->Opcode() == Op_Opaque1   ||
2666                n->Opcode() == Op_Opaque2   ||
2667                n->Opcode() == Op_Opaque3   ||
2668                n->Opcode() == Op_Opaque4   ||
2669                n->Opcode() == Op_Opaque5, "unknown node type in macro list");
2670       }
2671       assert(success == (C->macro_count() < old_macro_count), "elimination reduces macro count");
2672       progress = progress || success;
2673     }
2674   }
2675 }
2676 
2677 //------------------------------expand_macro_nodes----------------------
2678 //  Returns true if a failure occurred.
2679 bool PhaseMacroExpand::expand_macro_nodes() {
2680   // Last attempt to eliminate macro nodes.
2681   eliminate_macro_nodes();
2682 
2683   // Make sure expansion will not cause node limit to be exceeded.
2684   // Worst case is a macro node gets expanded into about 200 nodes.
2685   // Allow 50% more for optimization.
2686   if (C->check_node_count(C->macro_count() * 300, "out of nodes before macro expansion" ) )
2687     return true;
2688 
2689   // Eliminate Opaque and LoopLimit nodes. Do it after all loop optimizations.


2716         // Validate graph.
2717         assert((cmp->outcnt() == 1) && cmp->unique_out()->is_Bool(), "");
2718         BoolNode* bol = cmp->unique_out()->as_Bool();
2719         assert((bol->outcnt() == 1) && bol->unique_out()->is_If() &&
2720                (bol->_test._test == BoolTest::ne), "");
2721         IfNode* ifn = bol->unique_out()->as_If();
2722         assert((ifn->outcnt() == 2) &&
2723                ifn->proj_out(1)->is_uncommon_trap_proj(Deoptimization::Reason_rtm_state_change) != NULL, "");
2724 #endif
2725         Node* repl = n->in(1);
2726         if (!_has_locks) {
2727           // Remove RTM state check if there are no locks in the code.
2728           // Replace input to compare the same value.
2729           repl = (cmp->in(1) == n) ? cmp->in(2) : cmp->in(1);
2730         }
2731         _igvn.replace_node(n, repl);
2732         success = true;
2733 #endif
2734       } else if (n->Opcode() == Op_Opaque4) {
2735         _igvn.replace_node(n, n->in(2));
2736         success = true;
2737       } else if (n->Opcode() == Op_Opaque5) {
2738         Node* res = ((Opaque5Node*)n)->adjust_strip_mined_loop(&_igvn);
2739         guarantee(res != NULL, "strip mined adjustment failed");
2740         _igvn.replace_node(n, res);
2741         success = true;
2742       }
2743       assert(success == (C->macro_count() < old_macro_count), "elimination reduces macro count");
2744       progress = progress || success;
2745     }
2746   }
2747 
2748   // expand arraycopy "macro" nodes first
2749   // For ReduceBulkZeroing, we must first process all arraycopy nodes
2750   // before the allocate nodes are expanded.
2751   int macro_idx = C->macro_count() - 1;
2752   while (macro_idx >= 0) {
2753     Node * n = C->macro_node(macro_idx);
2754     assert(n->is_macro(), "only macro nodes expected here");
2755     if (_igvn.type(n) == Type::TOP || n->in(0)->is_top() ) {
2756       // node is unreachable, so don't try to expand it
2757       C->remove_macro_node(n);
2758     } else if (n->is_ArrayCopy()){
2759       int macro_count = C->macro_count();
2760       expand_arraycopy_node(n->as_ArrayCopy());


< prev index next >