< prev index next >

src/hotspot/share/opto/parse2.cpp

Print this page




  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "ci/ciMethodData.hpp"
  27 #include "classfile/systemDictionary.hpp"
  28 #include "classfile/vmSymbols.hpp"
  29 #include "compiler/compileLog.hpp"
  30 #include "interpreter/linkResolver.hpp"
  31 #include "memory/resourceArea.hpp"
  32 #include "memory/universe.hpp"
  33 #include "oops/oop.inline.hpp"
  34 #include "opto/addnode.hpp"
  35 #include "opto/castnode.hpp"
  36 #include "opto/convertnode.hpp"
  37 #include "opto/divnode.hpp"
  38 #include "opto/idealGraphPrinter.hpp"

  39 #include "opto/matcher.hpp"
  40 #include "opto/memnode.hpp"
  41 #include "opto/mulnode.hpp"
  42 #include "opto/opaquenode.hpp"
  43 #include "opto/parse.hpp"
  44 #include "opto/runtime.hpp"

  45 #include "runtime/deoptimization.hpp"
  46 #include "runtime/sharedRuntime.hpp"
  47 
  48 #ifndef PRODUCT
  49 extern int explicit_null_checks_inserted,
  50            explicit_null_checks_elided;
  51 #endif
  52 
  53 //---------------------------------array_load----------------------------------
  54 void Parse::array_load(BasicType bt) {
  55   const Type* elemtype = Type::TOP;
  56   bool big_val = bt == T_DOUBLE || bt == T_LONG;
  57   Node* adr = array_addressing(bt, 0, &elemtype);
  58   if (stopped())  return;     // guaranteed null or range check
  59 
  60   pop();                      // index (already used)
  61   Node* array = pop();        // the array itself


















































































  62 
  63   if (elemtype == TypeInt::BOOL) {
  64     bt = T_BOOLEAN;
  65   } else if (bt == T_OBJECT) {
  66     elemtype = _gvn.type(array)->is_aryptr()->elem()->make_oopptr();
  67   }
  68 
  69   const TypeAryPtr* adr_type = TypeAryPtr::get_array_body_type(bt);
  70 
  71   Node* ld = access_load_at(array, adr, adr_type, elemtype, bt,
  72                             IN_HEAP | IS_ARRAY | C2_CONTROL_DEPENDENT_LOAD);
  73   if (big_val) {
  74     push_pair(ld);
  75   } else {
  76     push(ld);


  77   }


  78 }
  79 
  80 
  81 //--------------------------------array_store----------------------------------
  82 void Parse::array_store(BasicType bt) {
  83   const Type* elemtype = Type::TOP;
  84   bool big_val = bt == T_DOUBLE || bt == T_LONG;
  85   Node* adr = array_addressing(bt, big_val ? 2 : 1, &elemtype);
  86   if (stopped())  return;     // guaranteed null or range check

  87   if (bt == T_OBJECT) {
  88     array_store_check();





























  89   }
  90   Node* val;                  // Oop to store
  91   if (big_val) {
  92     val = pop_pair();



























  93   } else {
  94     val = pop();




























  95   }
  96   pop();                      // index (already used)
  97   Node* array = pop();        // the array itself
  98 
  99   if (elemtype == TypeInt::BOOL) {
 100     bt = T_BOOLEAN;
 101   } else if (bt == T_OBJECT) {
 102     elemtype = _gvn.type(array)->is_aryptr()->elem()->make_oopptr();
 103   }
 104 
 105   const TypeAryPtr* adr_type = TypeAryPtr::get_array_body_type(bt);
 106 
 107   access_store_at(array, adr, adr_type, val, elemtype, bt, MO_UNORDERED | IN_HEAP | IS_ARRAY);
 108 }
 109 
 110 
 111 //------------------------------array_addressing-------------------------------
 112 // Pull array and index from the stack.  Compute pointer-to-element.
 113 Node* Parse::array_addressing(BasicType type, int vals, const Type* *result2) {
 114   Node *idx   = peek(0+vals);   // Get from stack without popping
 115   Node *ary   = peek(1+vals);   // in case of exception
 116 
 117   // Null check the array base, with correct stack contents
 118   ary = null_check(ary, T_ARRAY);
 119   // Compile-time detect of null-exception?
 120   if (stopped())  return top();
 121 
 122   const TypeAryPtr* arytype  = _gvn.type(ary)->is_aryptr();
 123   const TypeInt*    sizetype = arytype->size();
 124   const Type*       elemtype = arytype->elem();
 125 
 126   if (UseUniqueSubclasses && result2 != NULL) {
 127     const Type* el = elemtype->make_ptr();


1478 
1479   // Sanity check the probability value
1480   assert(prob > 0.0f,"Bad probability in Parser");
1481  // Need xform to put node in hash table
1482   IfNode *iff = create_and_xform_if( control(), tst, prob, cnt );
1483   assert(iff->_prob > 0.0f,"Optimizer made bad probability in parser");
1484   // True branch
1485   { PreserveJVMState pjvms(this);
1486     Node* iftrue  = _gvn.transform( new IfTrueNode (iff) );
1487     set_control(iftrue);
1488 
1489     if (stopped()) {            // Path is dead?
1490       NOT_PRODUCT(explicit_null_checks_elided++);
1491       if (C->eliminate_boxing()) {
1492         // Mark the successor block as parsed
1493         branch_block->next_path_num();
1494       }
1495     } else {                    // Path is live.
1496       // Update method data
1497       profile_taken_branch(target_bci);
1498       adjust_map_after_if(btest, c, prob, branch_block, next_block);
1499       if (!stopped()) {
1500         merge(target_bci);
1501       }
1502     }
1503   }
1504 
1505   // False branch
1506   Node* iffalse = _gvn.transform( new IfFalseNode(iff) );
1507   set_control(iffalse);
1508 
1509   if (stopped()) {              // Path is dead?
1510     NOT_PRODUCT(explicit_null_checks_elided++);
1511     if (C->eliminate_boxing()) {
1512       // Mark the successor block as parsed
1513       next_block->next_path_num();
1514     }
1515   } else  {                     // Path is live.
1516     // Update method data
1517     profile_not_taken_branch();
1518     adjust_map_after_if(BoolTest(btest).negate(), c, 1.0-prob,
1519                         next_block, branch_block);
1520   }
1521 }
1522 
1523 //------------------------------------do_if------------------------------------
1524 void Parse::do_if(BoolTest::mask btest, Node* c) {
1525   int target_bci = iter().get_dest();
1526 
1527   Block* branch_block = successor_for_bci(target_bci);
1528   Block* next_block   = successor_for_bci(iter().next_bci());
1529 
1530   float cnt;
1531   float prob = branch_prediction(cnt, btest, target_bci, c);
1532   float untaken_prob = 1.0 - prob;
1533 
1534   if (prob == PROB_UNKNOWN) {
1535     if (PrintOpto && Verbose) {
1536       tty->print_cr("Never-taken edge stops compilation at bci %d", bci());
1537     }
1538     repush_if_args(); // to gather stats on loop
1539     // We need to mark this branch as taken so that if we recompile we will
1540     // see that it is possible. In the tiered system the interpreter doesn't
1541     // do profiling and by the time we get to the lower tier from the interpreter
1542     // the path may be cold again. Make sure it doesn't look untaken
1543     profile_taken_branch(target_bci, !ProfileInterpreter);
1544     uncommon_trap(Deoptimization::Reason_unreached,


1593   }
1594 
1595   // Generate real control flow
1596   float true_prob = (taken_if_true ? prob : untaken_prob);
1597   IfNode* iff = create_and_map_if(control(), tst, true_prob, cnt);
1598   assert(iff->_prob > 0.0f,"Optimizer made bad probability in parser");
1599   Node* taken_branch   = new IfTrueNode(iff);
1600   Node* untaken_branch = new IfFalseNode(iff);
1601   if (!taken_if_true) {  // Finish conversion to canonical form
1602     Node* tmp      = taken_branch;
1603     taken_branch   = untaken_branch;
1604     untaken_branch = tmp;
1605   }
1606 
1607   // Branch is taken:
1608   { PreserveJVMState pjvms(this);
1609     taken_branch = _gvn.transform(taken_branch);
1610     set_control(taken_branch);
1611 
1612     if (stopped()) {
1613       if (C->eliminate_boxing()) {
1614         // Mark the successor block as parsed
1615         branch_block->next_path_num();
1616       }
1617     } else {
1618       // Update method data
1619       profile_taken_branch(target_bci);
1620       adjust_map_after_if(taken_btest, c, prob, branch_block, next_block);
1621       if (!stopped()) {







1622         merge(target_bci);
1623       }
1624     }
1625   }

1626 
1627   untaken_branch = _gvn.transform(untaken_branch);
1628   set_control(untaken_branch);
1629 
1630   // Branch not taken.
1631   if (stopped()) {
1632     if (C->eliminate_boxing()) {
1633       // Mark the successor block as parsed
1634       next_block->next_path_num();
1635     }
1636   } else {
1637     // Update method data
1638     profile_not_taken_branch();
1639     adjust_map_after_if(untaken_btest, c, untaken_prob,
1640                         next_block, branch_block);























































































































































































































































































































1641   }
1642 }
1643 
1644 bool Parse::path_is_suitable_for_uncommon_trap(float prob) const {
1645   // Don't want to speculate on uncommon traps when running with -Xcomp
1646   if (!UseInterpreter) {
1647     return false;
1648   }
1649   return (seems_never_taken(prob) && seems_stable_comparison());
1650 }
1651 
1652 void Parse::maybe_add_predicate_after_if(Block* path) {
1653   if (path->is_SEL_head() && path->preds_parsed() == 0) {
1654     // Add predicates at bci of if dominating the loop so traps can be
1655     // recorded on the if's profile data
1656     int bc_depth = repush_if_args();
1657     add_predicate();
1658     dec_sp(bc_depth);
1659     path->set_has_predicates();
1660   }
1661 }
1662 
1663 
1664 //----------------------------adjust_map_after_if------------------------------
1665 // Adjust the JVM state to reflect the result of taking this path.
1666 // Basically, it means inspecting the CmpNode controlling this
1667 // branch, seeing how it constrains a tested value, and then
1668 // deciding if it's worth our while to encode this constraint
1669 // as graph nodes in the current abstract interpretation map.
1670 void Parse::adjust_map_after_if(BoolTest::mask btest, Node* c, float prob,
1671                                 Block* path, Block* other_path) {
1672   if (!c->is_Cmp()) {
1673     maybe_add_predicate_after_if(path);
1674     return;
1675   }
1676 
1677   if (stopped() || btest == BoolTest::illegal) {
1678     return;                             // nothing to do
1679   }
1680 
1681   bool is_fallthrough = (path == successor_for_bci(iter().next_bci()));
1682 
1683   if (path_is_suitable_for_uncommon_trap(prob)) {
1684     repush_if_args();
1685     uncommon_trap(Deoptimization::Reason_unstable_if,
1686                   Deoptimization::Action_reinterpret,
1687                   NULL,
1688                   (is_fallthrough ? "taken always" : "taken never"));
1689     return;
1690   }
1691 


1861   if (c->Opcode() == Op_CmpP &&
1862       (c->in(1)->Opcode() == Op_LoadKlass || c->in(1)->Opcode() == Op_DecodeNKlass) &&
1863       c->in(2)->is_Con()) {
1864     Node* load_klass = NULL;
1865     Node* decode = NULL;
1866     if (c->in(1)->Opcode() == Op_DecodeNKlass) {
1867       decode = c->in(1);
1868       load_klass = c->in(1)->in(1);
1869     } else {
1870       load_klass = c->in(1);
1871     }
1872     if (load_klass->in(2)->is_AddP()) {
1873       Node* addp = load_klass->in(2);
1874       Node* obj = addp->in(AddPNode::Address);
1875       const TypeOopPtr* obj_type = _gvn.type(obj)->is_oopptr();
1876       if (obj_type->speculative_type_not_null() != NULL) {
1877         ciKlass* k = obj_type->speculative_type();
1878         inc_sp(2);
1879         obj = maybe_cast_profiled_obj(obj, k);
1880         dec_sp(2);




1881         // Make the CmpP use the casted obj
1882         addp = basic_plus_adr(obj, addp->in(AddPNode::Offset));
1883         load_klass = load_klass->clone();
1884         load_klass->set_req(2, addp);
1885         load_klass = _gvn.transform(load_klass);
1886         if (decode != NULL) {
1887           decode = decode->clone();
1888           decode->set_req(1, load_klass);
1889           load_klass = _gvn.transform(decode);
1890         }
1891         c = c->clone();
1892         c->set_req(1, load_klass);
1893         c = _gvn.transform(c);
1894       }
1895     }
1896   }
1897   return c;
1898 }
1899 
1900 //------------------------------do_one_bytecode--------------------------------


2708     // See if we can get some profile data and hand it off to the next block
2709     Block *target_block = block()->successor_for_bci(target_bci);
2710     if (target_block->pred_count() != 1)  break;
2711     ciMethodData* methodData = method()->method_data();
2712     if (!methodData->is_mature())  break;
2713     ciProfileData* data = methodData->bci_to_data(bci());
2714     assert(data != NULL && data->is_JumpData(), "need JumpData for taken branch");
2715     int taken = ((ciJumpData*)data)->taken();
2716     taken = method()->scale_count(taken);
2717     target_block->set_count(taken);
2718     break;
2719   }
2720 
2721   case Bytecodes::_ifnull:    btest = BoolTest::eq; goto handle_if_null;
2722   case Bytecodes::_ifnonnull: btest = BoolTest::ne; goto handle_if_null;
2723   handle_if_null:
2724     // If this is a backwards branch in the bytecodes, add Safepoint
2725     maybe_add_safepoint(iter().get_dest());
2726     a = null();
2727     b = pop();




2728     if (!_gvn.type(b)->speculative_maybe_null() &&
2729         !too_many_traps(Deoptimization::Reason_speculate_null_check)) {
2730       inc_sp(1);
2731       Node* null_ctl = top();
2732       b = null_check_oop(b, &null_ctl, true, true, true);
2733       assert(null_ctl->is_top(), "no null control here");
2734       dec_sp(1);
2735     } else if (_gvn.type(b)->speculative_always_null() &&
2736                !too_many_traps(Deoptimization::Reason_speculate_null_assert)) {
2737       inc_sp(1);
2738       b = null_assert(b);
2739       dec_sp(1);
2740     }
2741     c = _gvn.transform( new CmpPNode(b, a) );

2742     do_ifnull(btest, c);
2743     break;
2744 
2745   case Bytecodes::_if_acmpeq: btest = BoolTest::eq; goto handle_if_acmp;
2746   case Bytecodes::_if_acmpne: btest = BoolTest::ne; goto handle_if_acmp;
2747   handle_if_acmp:
2748     // If this is a backwards branch in the bytecodes, add Safepoint
2749     maybe_add_safepoint(iter().get_dest());
2750     a = access_resolve(pop(), 0);
2751     b = access_resolve(pop(), 0);
2752     c = _gvn.transform( new CmpPNode(b, a) );
2753     c = optimize_cmp_with_klass(c);
2754     do_if(btest, c);
2755     break;
2756 
2757   case Bytecodes::_ifeq: btest = BoolTest::eq; goto handle_ifxx;
2758   case Bytecodes::_ifne: btest = BoolTest::ne; goto handle_ifxx;
2759   case Bytecodes::_iflt: btest = BoolTest::lt; goto handle_ifxx;
2760   case Bytecodes::_ifle: btest = BoolTest::le; goto handle_ifxx;
2761   case Bytecodes::_ifgt: btest = BoolTest::gt; goto handle_ifxx;
2762   case Bytecodes::_ifge: btest = BoolTest::ge; goto handle_ifxx;
2763   handle_ifxx:
2764     // If this is a backwards branch in the bytecodes, add Safepoint
2765     maybe_add_safepoint(iter().get_dest());
2766     a = _gvn.intcon(0);
2767     b = pop();
2768     c = _gvn.transform( new CmpINode(b, a) );
2769     do_if(btest, c);
2770     break;
2771 
2772   case Bytecodes::_if_icmpeq: btest = BoolTest::eq; goto handle_if_icmp;
2773   case Bytecodes::_if_icmpne: btest = BoolTest::ne; goto handle_if_icmp;
2774   case Bytecodes::_if_icmplt: btest = BoolTest::lt; goto handle_if_icmp;


2789     break;
2790 
2791   case Bytecodes::_lookupswitch:
2792     do_lookupswitch();
2793     break;
2794 
2795   case Bytecodes::_invokestatic:
2796   case Bytecodes::_invokedynamic:
2797   case Bytecodes::_invokespecial:
2798   case Bytecodes::_invokevirtual:
2799   case Bytecodes::_invokeinterface:
2800     do_call();
2801     break;
2802   case Bytecodes::_checkcast:
2803     do_checkcast();
2804     break;
2805   case Bytecodes::_instanceof:
2806     do_instanceof();
2807     break;
2808   case Bytecodes::_anewarray:
2809     do_anewarray();
2810     break;
2811   case Bytecodes::_newarray:
2812     do_newarray((BasicType)iter().get_index());
2813     break;
2814   case Bytecodes::_multianewarray:
2815     do_multianewarray();
2816     break;
2817   case Bytecodes::_new:
2818     do_new();






2819     break;
2820 
2821   case Bytecodes::_jsr:
2822   case Bytecodes::_jsr_w:
2823     do_jsr();
2824     break;
2825 
2826   case Bytecodes::_ret:
2827     do_ret();
2828     break;
2829 
2830 
2831   case Bytecodes::_monitorenter:
2832     do_monitor_enter();
2833     break;
2834 
2835   case Bytecodes::_monitorexit:
2836     do_monitor_exit();
2837     break;
2838 




  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "ci/ciMethodData.hpp"
  27 #include "classfile/systemDictionary.hpp"
  28 #include "classfile/vmSymbols.hpp"
  29 #include "compiler/compileLog.hpp"
  30 #include "interpreter/linkResolver.hpp"
  31 #include "memory/resourceArea.hpp"
  32 #include "memory/universe.hpp"
  33 #include "oops/oop.inline.hpp"
  34 #include "opto/addnode.hpp"
  35 #include "opto/castnode.hpp"
  36 #include "opto/convertnode.hpp"
  37 #include "opto/divnode.hpp"
  38 #include "opto/idealGraphPrinter.hpp"
  39 #include "opto/idealKit.hpp"
  40 #include "opto/matcher.hpp"
  41 #include "opto/memnode.hpp"
  42 #include "opto/mulnode.hpp"
  43 #include "opto/opaquenode.hpp"
  44 #include "opto/parse.hpp"
  45 #include "opto/runtime.hpp"
  46 #include "opto/valuetypenode.hpp"
  47 #include "runtime/deoptimization.hpp"
  48 #include "runtime/sharedRuntime.hpp"
  49 
  50 #ifndef PRODUCT
  51 extern int explicit_null_checks_inserted,
  52            explicit_null_checks_elided;
  53 #endif
  54 
  55 //---------------------------------array_load----------------------------------
  56 void Parse::array_load(BasicType bt) {
  57   const Type* elemtype = Type::TOP;

  58   Node* adr = array_addressing(bt, 0, &elemtype);
  59   if (stopped())  return;     // guaranteed null or range check
  60 
  61   Node* idx = pop();
  62   Node* ary = pop();
  63 
  64   // Handle value type arrays
  65   const TypeOopPtr* elemptr = elemtype->make_oopptr();
  66   const TypeAryPtr* ary_t = _gvn.type(ary)->is_aryptr();
  67   if (elemtype->isa_valuetype() != NULL) {
  68     // Load from flattened value type array
  69     ciValueKlass* vk = elemtype->is_valuetype()->value_klass();
  70     Node* vt = ValueTypeNode::make_from_flattened(this, vk, ary, adr);
  71     push(vt);
  72     return;
  73   } else if (elemptr != NULL && elemptr->is_valuetypeptr()) {
  74     // Load from non-flattened value type array (elements can never be null)
  75     bt = T_VALUETYPE;
  76     assert(elemptr->meet(TypePtr::NULL_PTR) != elemptr, "value type array elements should never be null");
  77   } else if (ValueArrayFlatten && elemptr != NULL && elemptr->can_be_value_type() &&
  78              !ary_t->klass_is_exact()) {
  79     // Cannot statically determine if array is flattened, emit runtime check
  80     IdealKit ideal(this);
  81     IdealVariable res(ideal);
  82     ideal.declarations_done();
  83     Node* kls = load_object_klass(ary);
  84     Node* tag = load_lh_array_tag(kls);
  85     ideal.if_then(tag, BoolTest::ne, intcon(Klass::_lh_array_tag_vt_value)); {
  86       // non flattened
  87       sync_kit(ideal);
  88       const TypeAryPtr* adr_type = TypeAryPtr::get_array_body_type(bt);
  89       elemtype = ary_t->elem()->make_oopptr();
  90       Node* ld = access_load_at(ary, adr, adr_type, elemtype, bt,
  91                                 IN_HEAP | IS_ARRAY | C2_CONTROL_DEPENDENT_LOAD);
  92       ideal.sync_kit(this);
  93       ideal.set(res, ld);
  94     } ideal.else_(); {
  95       // flattened
  96       sync_kit(ideal);
  97       Node* k_adr = basic_plus_adr(kls, in_bytes(ArrayKlass::element_klass_offset()));
  98       Node* elem_klass = _gvn.transform(LoadKlassNode::make(_gvn, NULL, immutable_memory(), k_adr, TypeInstPtr::KLASS));
  99       Node* obj_size  = NULL;
 100       kill_dead_locals();
 101       inc_sp(2);
 102       Node* alloc_obj = new_instance(elem_klass, NULL, &obj_size, /*deoptimize_on_exception=*/true);
 103       dec_sp(2);
 104 
 105       AllocateNode* alloc = AllocateNode::Ideal_allocation(alloc_obj, &_gvn);
 106       assert(alloc->maybe_set_complete(&_gvn), "");
 107       alloc->initialization()->set_complete_with_arraycopy();
 108       BarrierSetC2* bs = BarrierSet::barrier_set()->barrier_set_c2();
 109       // Unknown value type so might have reference fields
 110       if (!bs->array_copy_requires_gc_barriers(false, T_OBJECT, false, BarrierSetC2::Parsing)) {
 111         int base_off = sizeof(instanceOopDesc);
 112         Node* dst_base = basic_plus_adr(alloc_obj, base_off);
 113         Node* countx = obj_size;
 114         countx = _gvn.transform(new SubXNode(countx, MakeConX(base_off)));
 115         countx = _gvn.transform(new URShiftXNode(countx, intcon(LogBytesPerLong)));
 116 
 117         assert(Klass::_lh_log2_element_size_shift == 0, "use shift in place");
 118         Node* lhp = basic_plus_adr(kls, in_bytes(Klass::layout_helper_offset()));
 119         Node* elem_shift = make_load(NULL, lhp, TypeInt::INT, T_INT, MemNode::unordered);
 120         uint header = arrayOopDesc::base_offset_in_bytes(T_VALUETYPE);
 121         Node* base  = basic_plus_adr(ary, header);
 122         idx = Compile::conv_I2X_index(&_gvn, idx, TypeInt::POS, control());
 123         Node* scale = _gvn.transform(new LShiftXNode(idx, elem_shift));
 124         Node* adr = basic_plus_adr(ary, base, scale);
 125 
 126         access_clone(adr, dst_base, countx, false);
 127       } else {
 128         ideal.sync_kit(this);
 129         ideal.make_leaf_call(OptoRuntime::load_unknown_value_Type(),
 130                              CAST_FROM_FN_PTR(address, OptoRuntime::load_unknown_value),
 131                              "load_unknown_value",
 132                              ary, idx, alloc_obj);
 133         sync_kit(ideal);
 134       }
 135 
 136       insert_mem_bar(Op_MemBarStoreStore, alloc->proj_out_or_null(AllocateNode::RawAddress));
 137 
 138       ideal.sync_kit(this);
 139       ideal.set(res, alloc_obj);
 140     } ideal.end_if();
 141     sync_kit(ideal);
 142     push_node(bt, ideal.value(res));
 143     return;
 144   }
 145 
 146   if (elemtype == TypeInt::BOOL) {
 147     bt = T_BOOLEAN;
 148   } else if (bt == T_OBJECT) {
 149     elemtype = ary_t->elem()->make_oopptr();
 150   }
 151 
 152   const TypeAryPtr* adr_type = TypeAryPtr::get_array_body_type(bt);
 153   Node* ld = access_load_at(ary, adr, adr_type, elemtype, bt,

 154                             IN_HEAP | IS_ARRAY | C2_CONTROL_DEPENDENT_LOAD);
 155   if (bt == T_VALUETYPE) {
 156     // Loading a non-flattened (but flattenable) value type from an array
 157     assert(!gvn().type(ld)->maybe_null(), "value type array elements should never be null");
 158     if (elemptr->value_klass()->is_scalarizable()) {
 159       ld = ValueTypeNode::make_from_oop(this, ld, elemptr->value_klass());
 160     }
 161   }
 162 
 163   push_node(bt, ld);
 164 }
 165 
 166 
 167 //--------------------------------array_store----------------------------------
 168 void Parse::array_store(BasicType bt) {
 169   const Type* elemtype = Type::TOP;
 170   Node* adr = array_addressing(bt, type2size[bt], &elemtype);

 171   if (stopped())  return;     // guaranteed null or range check
 172   Node* cast_val = NULL;
 173   if (bt == T_OBJECT) {
 174     cast_val = array_store_check();
 175     if (stopped()) return;
 176   }
 177   Node* val = pop_node(bt); // Value to store
 178   Node* idx = pop();        // Index in the array
 179   Node* ary = pop();        // The array itself
 180 
 181   const TypeAryPtr* ary_t = _gvn.type(ary)->is_aryptr();
 182   if (bt == T_OBJECT) {
 183     const TypeOopPtr* elemptr = elemtype->make_oopptr();
 184     const Type* val_t = _gvn.type(val);
 185     if (elemtype->isa_valuetype() != NULL) {
 186       // Store to flattened value type array
 187       if (!cast_val->is_ValueType()) {
 188         inc_sp(3);
 189         cast_val = null_check(cast_val);
 190         if (stopped()) return;
 191         dec_sp(3);
 192         cast_val = ValueTypeNode::make_from_oop(this, cast_val, elemtype->is_valuetype()->value_klass());
 193       }
 194       cast_val->as_ValueType()->store_flattened(this, ary, adr);
 195       return;
 196     } else if (elemptr->is_valuetypeptr()) {
 197       // Store to non-flattened value type array
 198       if (!cast_val->is_ValueType()) {
 199         // Can not store null into a value type array
 200         inc_sp(3);
 201         cast_val = null_check(cast_val);
 202         if (stopped()) return;
 203         dec_sp(3);
 204       }
 205     } else if (elemptr->can_be_value_type() && !ary_t->klass_is_exact() &&
 206                (val->is_ValueType() || val_t == TypePtr::NULL_PTR || val_t->is_oopptr()->can_be_value_type())) {
 207       if (ValueArrayFlatten) {
 208         IdealKit ideal(this);
 209         Node* kls = load_object_klass(ary);
 210         Node* layout_val = load_lh_array_tag(kls);
 211         ideal.if_then(layout_val, BoolTest::ne, intcon(Klass::_lh_array_tag_vt_value)); {
 212           // non flattened
 213           sync_kit(ideal);
 214 
 215           if (!val->is_ValueType() && TypePtr::NULL_PTR->higher_equal(val_t)) {
 216             gen_value_type_array_guard(ary, val, 3);
 217           }
 218 
 219           const TypeAryPtr* adr_type = TypeAryPtr::get_array_body_type(bt);
 220           elemtype = ary_t->elem()->make_oopptr();
 221           access_store_at(ary, adr, adr_type, val, elemtype, bt, MO_UNORDERED | IN_HEAP | IS_ARRAY);
 222           ideal.sync_kit(this);
 223         } ideal.else_(); {
 224           // flattened
 225           // Object/interface array must be flattened, cast it
 226           if (val->is_ValueType()) {
 227             sync_kit(ideal);
 228             const TypeValueType* vt = _gvn.type(val)->is_valuetype();
 229             ciArrayKlass* array_klass = ciArrayKlass::make(vt->value_klass());
 230             const TypeAryPtr* arytype = TypeOopPtr::make_from_klass(array_klass)->isa_aryptr();
 231             ary = _gvn.transform(new CheckCastPPNode(control(), ary, arytype));
 232             adr = array_element_address(ary, idx, T_OBJECT, arytype->size(), control());
 233             val->as_ValueType()->store_flattened(this, ary, adr);
 234             ideal.sync_kit(this);
 235           } else {
 236             if (TypePtr::NULL_PTR->higher_equal(val_t)) {
 237               sync_kit(ideal);
 238               Node* null_ctl = top();
 239               val = null_check_oop(val, &null_ctl);
 240               if (null_ctl != top()) {
 241                 PreserveJVMState pjvms(this);
 242                 inc_sp(3);
 243                 set_control(null_ctl);
 244                 uncommon_trap(Deoptimization::Reason_null_check, Deoptimization::Action_none);
 245                 dec_sp(3);
 246               }
 247               ideal.sync_kit(this);
 248             }
 249             if (!ideal.ctrl()->is_top()) {
 250               ideal.make_leaf_call(OptoRuntime::store_unknown_value_Type(),
 251                                    CAST_FROM_FN_PTR(address, OptoRuntime::store_unknown_value),
 252                                    "store_unknown_value",
 253                                    val, ary, idx);
 254             }
 255           }
 256         } ideal.end_if();
 257         sync_kit(ideal);
 258         return;
 259       } else {
 260         if (!val->is_ValueType() && TypePtr::NULL_PTR->higher_equal(val_t)) {
 261           gen_value_type_array_guard(ary, val, 3);
 262         }
 263       }
 264     }
 265   }


 266 
 267   if (elemtype == TypeInt::BOOL) {
 268     bt = T_BOOLEAN;
 269   } else if (bt == T_OBJECT) {
 270     elemtype = ary_t->elem()->make_oopptr();
 271   }
 272 
 273   const TypeAryPtr* adr_type = TypeAryPtr::get_array_body_type(bt);
 274 
 275   access_store_at(ary, adr, adr_type, val, elemtype, bt, MO_UNORDERED | IN_HEAP | IS_ARRAY);
 276 }
 277 
 278 
 279 //------------------------------array_addressing-------------------------------
 280 // Pull array and index from the stack.  Compute pointer-to-element.
 281 Node* Parse::array_addressing(BasicType type, int vals, const Type* *result2) {
 282   Node *idx   = peek(0+vals);   // Get from stack without popping
 283   Node *ary   = peek(1+vals);   // in case of exception
 284 
 285   // Null check the array base, with correct stack contents
 286   ary = null_check(ary, T_ARRAY);
 287   // Compile-time detect of null-exception?
 288   if (stopped())  return top();
 289 
 290   const TypeAryPtr* arytype  = _gvn.type(ary)->is_aryptr();
 291   const TypeInt*    sizetype = arytype->size();
 292   const Type*       elemtype = arytype->elem();
 293 
 294   if (UseUniqueSubclasses && result2 != NULL) {
 295     const Type* el = elemtype->make_ptr();


1646 
1647   // Sanity check the probability value
1648   assert(prob > 0.0f,"Bad probability in Parser");
1649  // Need xform to put node in hash table
1650   IfNode *iff = create_and_xform_if( control(), tst, prob, cnt );
1651   assert(iff->_prob > 0.0f,"Optimizer made bad probability in parser");
1652   // True branch
1653   { PreserveJVMState pjvms(this);
1654     Node* iftrue  = _gvn.transform( new IfTrueNode (iff) );
1655     set_control(iftrue);
1656 
1657     if (stopped()) {            // Path is dead?
1658       NOT_PRODUCT(explicit_null_checks_elided++);
1659       if (C->eliminate_boxing()) {
1660         // Mark the successor block as parsed
1661         branch_block->next_path_num();
1662       }
1663     } else {                    // Path is live.
1664       // Update method data
1665       profile_taken_branch(target_bci);
1666       adjust_map_after_if(btest, c, prob, branch_block);
1667       if (!stopped()) {
1668         merge(target_bci);
1669       }
1670     }
1671   }
1672 
1673   // False branch
1674   Node* iffalse = _gvn.transform( new IfFalseNode(iff) );
1675   set_control(iffalse);
1676 
1677   if (stopped()) {              // Path is dead?
1678     NOT_PRODUCT(explicit_null_checks_elided++);
1679     if (C->eliminate_boxing()) {
1680       // Mark the successor block as parsed
1681       next_block->next_path_num();
1682     }
1683   } else  {                     // Path is live.
1684     // Update method data
1685     profile_not_taken_branch();
1686     adjust_map_after_if(BoolTest(btest).negate(), c, 1.0-prob, next_block);

1687   }
1688 }
1689 
1690 //------------------------------------do_if------------------------------------
1691 void Parse::do_if(BoolTest::mask btest, Node* c, bool new_path, Node** ctrl_taken) {
1692   int target_bci = iter().get_dest();
1693 
1694   Block* branch_block = successor_for_bci(target_bci);
1695   Block* next_block   = successor_for_bci(iter().next_bci());
1696 
1697   float cnt;
1698   float prob = branch_prediction(cnt, btest, target_bci, c);
1699   float untaken_prob = 1.0 - prob;
1700 
1701   if (prob == PROB_UNKNOWN) {
1702     if (PrintOpto && Verbose) {
1703       tty->print_cr("Never-taken edge stops compilation at bci %d", bci());
1704     }
1705     repush_if_args(); // to gather stats on loop
1706     // We need to mark this branch as taken so that if we recompile we will
1707     // see that it is possible. In the tiered system the interpreter doesn't
1708     // do profiling and by the time we get to the lower tier from the interpreter
1709     // the path may be cold again. Make sure it doesn't look untaken
1710     profile_taken_branch(target_bci, !ProfileInterpreter);
1711     uncommon_trap(Deoptimization::Reason_unreached,


1760   }
1761 
1762   // Generate real control flow
1763   float true_prob = (taken_if_true ? prob : untaken_prob);
1764   IfNode* iff = create_and_map_if(control(), tst, true_prob, cnt);
1765   assert(iff->_prob > 0.0f,"Optimizer made bad probability in parser");
1766   Node* taken_branch   = new IfTrueNode(iff);
1767   Node* untaken_branch = new IfFalseNode(iff);
1768   if (!taken_if_true) {  // Finish conversion to canonical form
1769     Node* tmp      = taken_branch;
1770     taken_branch   = untaken_branch;
1771     untaken_branch = tmp;
1772   }
1773 
1774   // Branch is taken:
1775   { PreserveJVMState pjvms(this);
1776     taken_branch = _gvn.transform(taken_branch);
1777     set_control(taken_branch);
1778 
1779     if (stopped()) {
1780       if (C->eliminate_boxing() && !new_path) {
1781         // Mark the successor block as parsed (if we haven't created a new path)
1782         branch_block->next_path_num();
1783       }
1784     } else {
1785       // Update method data
1786       profile_taken_branch(target_bci);
1787       adjust_map_after_if(taken_btest, c, prob, branch_block);
1788       if (!stopped()) {
1789         if (new_path) {
1790           // Merge by using a new path
1791           merge_new_path(target_bci);
1792         } else if (ctrl_taken != NULL) {
1793           // Don't merge but save taken branch to be wired by caller
1794           *ctrl_taken = control();
1795         } else {
1796           merge(target_bci);
1797         }
1798       }
1799     }
1800   }
1801 
1802   untaken_branch = _gvn.transform(untaken_branch);
1803   set_control(untaken_branch);
1804 
1805   // Branch not taken.
1806   if (stopped() && ctrl_taken == NULL) {
1807     if (C->eliminate_boxing()) {
1808       // Mark the successor block as parsed (if caller does not re-wire control flow)
1809       next_block->next_path_num();
1810     }
1811   } else {
1812     // Update method data
1813     profile_not_taken_branch();
1814     adjust_map_after_if(untaken_btest, c, untaken_prob, next_block);
1815   }
1816 }
1817 
1818 void Parse::do_acmp(BoolTest::mask btest, Node* a, Node* b) {
1819   ciMethod* subst_method = ciEnv::current()->ValueBootstrapMethods_klass()->find_method(ciSymbol::isSubstitutable_name(), ciSymbol::object_object_boolean_signature());
1820   // If current method is ValueBootstrapMethods::isSubstitutable(),
1821   // compile the acmp as a regular pointer comparison otherwise we
1822   // could call ValueBootstrapMethods::isSubstitutable() back
1823   if (ACmpOnValues == 0 || method() == subst_method) {
1824     Node* cmp = CmpP(a, b);
1825     cmp = optimize_cmp_with_klass(cmp);
1826     do_if(btest, cmp);
1827     return;
1828   }
1829 
1830   if (ACmpOnValues == 3) {
1831     // Substituability test
1832     if (a->is_ValueType()) {
1833       inc_sp(2);
1834       a = a->as_ValueType()->allocate(this, true)->get_oop();
1835       dec_sp(2);
1836     }
1837     if (b->is_ValueType()) {
1838       inc_sp(2);
1839       b = b->as_ValueType()->allocate(this, true)->get_oop();
1840       dec_sp(2);
1841     }
1842 
1843     const TypeOopPtr* ta = _gvn.type(a)->isa_oopptr();
1844     const TypeOopPtr* tb = _gvn.type(b)->isa_oopptr();
1845 
1846     if (ta == NULL || !ta->can_be_value_type_raw() ||
1847         tb == NULL || !tb->can_be_value_type_raw()) {
1848       Node* cmp = CmpP(a, b);
1849       cmp = optimize_cmp_with_klass(cmp);
1850       do_if(btest, cmp);
1851       return;
1852     }
1853 
1854     Node* cmp = CmpP(a, b);
1855     cmp = optimize_cmp_with_klass(cmp);
1856     Node* eq_region = NULL;
1857     if (btest == BoolTest::eq) {
1858       do_if(btest, cmp, true);
1859       if (stopped()) {
1860         return;
1861       }
1862     } else {
1863       assert(btest == BoolTest::ne, "only eq or ne");
1864       Node* is_not_equal = NULL;
1865       eq_region = new RegionNode(3);
1866       {
1867         PreserveJVMState pjvms(this);
1868         do_if(btest, cmp, false, &is_not_equal);
1869         if (!stopped()) {
1870           eq_region->init_req(1, control());
1871         }
1872       }
1873       if (is_not_equal == NULL || is_not_equal->is_top()) {
1874         record_for_igvn(eq_region);
1875         set_control(_gvn.transform(eq_region));
1876         return;
1877       }
1878       set_control(is_not_equal);
1879     }
1880     // Pointers not equal, check for values
1881     Node* ne_region = new RegionNode(6);
1882     inc_sp(2);
1883     Node* null_ctl = top();
1884     Node* not_null_a = null_check_oop(a, &null_ctl, !too_many_traps(Deoptimization::Reason_null_check), false, false);
1885     dec_sp(2);
1886     ne_region->init_req(1, null_ctl);
1887     if (stopped()) {
1888       record_for_igvn(ne_region);
1889       set_control(_gvn.transform(ne_region));
1890       if (btest == BoolTest::ne) {
1891         {
1892           PreserveJVMState pjvms(this);
1893           int target_bci = iter().get_dest();
1894           merge(target_bci);
1895         }
1896         record_for_igvn(eq_region);
1897         set_control(_gvn.transform(eq_region));
1898       }
1899       return;
1900     }
1901 
1902     Node* is_value = is_always_locked(not_null_a);
1903     Node* value_mask = _gvn.MakeConX(markOopDesc::always_locked_pattern);
1904     Node* is_value_cmp = _gvn.transform(new CmpXNode(is_value, value_mask));
1905     Node* is_value_bol = _gvn.transform(new BoolNode(is_value_cmp, BoolTest::ne));
1906     IfNode* is_value_iff = create_and_map_if(control(), is_value_bol, PROB_FAIR, COUNT_UNKNOWN);
1907     Node* not_value = _gvn.transform(new IfTrueNode(is_value_iff));
1908     set_control(_gvn.transform(new IfFalseNode(is_value_iff)));
1909     ne_region->init_req(2, not_value);
1910 
1911     // One of the 2 pointers refers to a value, check if both are of
1912     // the same class
1913     inc_sp(2);
1914     null_ctl = top();
1915     Node* not_null_b = null_check_oop(b, &null_ctl, !too_many_traps(Deoptimization::Reason_null_check), false, false);
1916     dec_sp(2);
1917     ne_region->init_req(3, null_ctl);
1918     if (stopped()) {
1919       record_for_igvn(ne_region);
1920       set_control(_gvn.transform(ne_region));
1921       if (btest == BoolTest::ne) {
1922         {
1923           PreserveJVMState pjvms(this);
1924           int target_bci = iter().get_dest();
1925           merge(target_bci);
1926         }
1927         record_for_igvn(eq_region);
1928         set_control(_gvn.transform(eq_region));
1929       }
1930       return;
1931     }
1932     Node* kls_a = load_object_klass(not_null_a);
1933     Node* kls_b = load_object_klass(not_null_b);
1934     Node* kls_cmp = CmpP(kls_a, kls_b);
1935     Node* kls_bol = _gvn.transform(new BoolNode(kls_cmp, BoolTest::ne));
1936     IfNode* kls_iff = create_and_map_if(control(), kls_bol, PROB_FAIR, COUNT_UNKNOWN);
1937     Node* kls_ne = _gvn.transform(new IfTrueNode(kls_iff));
1938     set_control(_gvn.transform(new IfFalseNode(kls_iff)));
1939     ne_region->init_req(4, kls_ne);
1940 
1941     if (stopped()) {
1942       record_for_igvn(ne_region);
1943       set_control(_gvn.transform(ne_region));
1944       if (btest == BoolTest::ne) {
1945         {
1946           PreserveJVMState pjvms(this);
1947           int target_bci = iter().get_dest();
1948           merge(target_bci);
1949         }
1950         record_for_igvn(eq_region);
1951         set_control(_gvn.transform(eq_region));
1952       }
1953       return;
1954     }
1955     // Both are values of the same class, we need to perform a
1956     // substitutability test. Delegate to
1957     // ValueBootstrapMethods::isSubstitutable().
1958 
1959     Node* ne_io_phi = PhiNode::make(ne_region, i_o());
1960     Node* mem = reset_memory();
1961     Node* ne_mem_phi = PhiNode::make(ne_region, mem);
1962 
1963     Node* eq_io_phi = NULL;
1964     Node* eq_mem_phi = NULL;
1965     if (eq_region != NULL) {
1966       eq_io_phi = PhiNode::make(eq_region, i_o());
1967       eq_mem_phi = PhiNode::make(eq_region, mem);
1968     }
1969 
1970     set_all_memory(mem);
1971 
1972     kill_dead_locals();
1973     CallStaticJavaNode *call = new CallStaticJavaNode(C, TypeFunc::make(subst_method), SharedRuntime::get_resolve_static_call_stub(), subst_method, bci());
1974     call->set_override_symbolic_info(true);
1975     call->init_req(TypeFunc::Parms, not_null_a);
1976     call->init_req(TypeFunc::Parms+1, not_null_b);
1977     inc_sp(2);
1978     set_edges_for_java_call(call, false, false);
1979     Node* ret = set_results_for_java_call(call, false, true);
1980     dec_sp(2);
1981 
1982     // Test the return value of ValueBootstrapMethods::isSubstitutable()
1983     Node* subst_cmp = _gvn.transform(new CmpINode(ret, intcon(1)));
1984     if (btest == BoolTest::eq) {
1985       do_if(btest, subst_cmp);
1986     } else {
1987       assert(btest == BoolTest::ne, "only eq or ne");
1988       Node* is_not_equal = NULL;
1989       {
1990         PreserveJVMState pjvms(this);
1991         do_if(btest, subst_cmp, false, &is_not_equal);
1992         if (!stopped()) {
1993           eq_region->init_req(2, control());
1994           eq_io_phi->init_req(2, i_o());
1995           eq_mem_phi->init_req(2, reset_memory());
1996         }
1997       }
1998       set_control(is_not_equal);
1999     }
2000     ne_region->init_req(5, control());
2001     ne_io_phi->init_req(5, i_o());
2002     ne_mem_phi->init_req(5, reset_memory());
2003 
2004     record_for_igvn(ne_region);
2005     set_control(_gvn.transform(ne_region));
2006     set_i_o(_gvn.transform(ne_io_phi));
2007     set_all_memory(_gvn.transform(ne_mem_phi));
2008 
2009     if (btest == BoolTest::ne) {
2010       {
2011         PreserveJVMState pjvms(this);
2012         int target_bci = iter().get_dest();
2013         merge(target_bci);
2014       }
2015 
2016       record_for_igvn(eq_region);
2017       set_control(_gvn.transform(eq_region));
2018       set_i_o(_gvn.transform(eq_io_phi));
2019       set_all_memory(_gvn.transform(eq_mem_phi));
2020     }
2021 
2022     return;
2023   }
2024   // In the case were both operands might be value types, we need to
2025   // use the new acmp implementation. Otherwise, i.e. if one operand
2026   // is not a value type, we can use the old acmp implementation.
2027   Node* cmp = C->optimize_acmp(&_gvn, a, b);
2028   if (cmp != NULL) {
2029     // Use optimized/old acmp
2030     cmp = optimize_cmp_with_klass(_gvn.transform(cmp));
2031     do_if(btest, cmp);
2032     return;
2033   }
2034 
2035   Node* ctrl = NULL;
2036   bool safe_for_replace = true;
2037   if (ACmpOnValues != 1) {
2038     // Emit old acmp before new acmp for quick a != b check
2039     cmp = CmpP(a, b);
2040     cmp = optimize_cmp_with_klass(_gvn.transform(cmp));
2041     if (btest == BoolTest::ne) {
2042       do_if(btest, cmp, true);
2043       if (stopped()) {
2044         return; // Never equal
2045       }
2046     } else if (btest == BoolTest::eq) {
2047       Node* is_equal = NULL;
2048       {
2049         PreserveJVMState pjvms(this);
2050         do_if(btest, cmp, false, &is_equal);
2051         if (!stopped()) {
2052           // Not equal, skip valuetype check
2053           ctrl = new RegionNode(3);
2054           ctrl->init_req(1, control());
2055           _gvn.set_type(ctrl, Type::CONTROL);
2056           record_for_igvn(ctrl);
2057           safe_for_replace = false;
2058         }
2059       }
2060       if (is_equal == NULL) {
2061         assert(ctrl != NULL, "no control left");
2062         set_control(_gvn.transform(ctrl));
2063         return; // Never equal
2064       }
2065       set_control(is_equal);
2066     }
2067   }
2068 
2069   // Null check operand before loading the is_value bit
2070   bool speculate = false;
2071   if (!TypePtr::NULL_PTR->higher_equal(_gvn.type(b))) {
2072     // Operand 'b' is never null, swap operands to avoid null check
2073     swap(a, b);
2074   } else if (!too_many_traps(Deoptimization::Reason_speculate_null_check)) {
2075     // Speculate on non-nullness of one operand
2076     if (!_gvn.type(a)->speculative_maybe_null()) {
2077       speculate = true;
2078     } else if (!_gvn.type(b)->speculative_maybe_null()) {
2079       speculate = true;
2080       swap(a, b);
2081     }
2082   }
2083   inc_sp(2);
2084   Node* null_ctl = top();
2085   Node* not_null_a = null_check_oop(a, &null_ctl, speculate, safe_for_replace, speculate);
2086   assert(!stopped(), "operand is always null");
2087   dec_sp(2);
2088   Node* region = new RegionNode(2);
2089   Node* is_value = new PhiNode(region, TypeX_X);
2090   if (null_ctl != top()) {
2091     assert(!speculate, "should never be null");
2092     region->add_req(null_ctl);
2093     is_value->add_req(_gvn.MakeConX(0));
2094   }
2095 
2096   Node* value_mask = _gvn.MakeConX(markOopDesc::always_locked_pattern);
2097   if (ACmpOnValues == 1) {
2098     Node* mark_addr = basic_plus_adr(not_null_a, oopDesc::mark_offset_in_bytes());
2099     Node* mark = make_load(NULL, mark_addr, TypeX_X, TypeX_X->basic_type(), MemNode::unordered);
2100     Node* not_mark = _gvn.transform(new XorXNode(mark, _gvn.MakeConX(-1)));
2101     Node* andn = _gvn.transform(new AndXNode(not_mark, value_mask));
2102     Node* neg_if_value = _gvn.transform(new SubXNode(andn, _gvn.MakeConX(1)));
2103     is_value->init_req(1, _gvn.transform(new RShiftXNode(neg_if_value, _gvn.intcon(63))));
2104   } else {
2105     is_value->init_req(1, is_always_locked(not_null_a));
2106   }
2107   region->init_req(1, control());
2108 
2109   set_control(_gvn.transform(region));
2110   is_value = _gvn.transform(is_value);
2111 
2112   if (ACmpOnValues == 1) {
2113     // Perturbe oop if operand is a value type to make comparison fail
2114     Node* pert = _gvn.transform(new AddPNode(a, a, is_value));
2115     cmp = _gvn.transform(new CmpPNode(pert, b));
2116   } else {
2117     // Check for a value type because we already know that operands are equal
2118     cmp = _gvn.transform(new CmpXNode(is_value, value_mask));
2119     btest = (btest == BoolTest::eq) ? BoolTest::ne : BoolTest::eq;
2120   }
2121   cmp = optimize_cmp_with_klass(cmp);
2122   do_if(btest, cmp);
2123 
2124   if (ctrl != NULL) {
2125     ctrl->init_req(2, control());
2126     set_control(_gvn.transform(ctrl));
2127   }
2128 }
2129 
2130 bool Parse::path_is_suitable_for_uncommon_trap(float prob) const {
2131   // Don't want to speculate on uncommon traps when running with -Xcomp
2132   if (!UseInterpreter) {
2133     return false;
2134   }
2135   return (seems_never_taken(prob) && seems_stable_comparison());
2136 }
2137 
2138 void Parse::maybe_add_predicate_after_if(Block* path) {
2139   if (path->is_SEL_head() && path->preds_parsed() == 0) {
2140     // Add predicates at bci of if dominating the loop so traps can be
2141     // recorded on the if's profile data
2142     int bc_depth = repush_if_args();
2143     add_predicate();
2144     dec_sp(bc_depth);
2145     path->set_has_predicates();
2146   }
2147 }
2148 
2149 
2150 //----------------------------adjust_map_after_if------------------------------
2151 // Adjust the JVM state to reflect the result of taking this path.
2152 // Basically, it means inspecting the CmpNode controlling this
2153 // branch, seeing how it constrains a tested value, and then
2154 // deciding if it's worth our while to encode this constraint
2155 // as graph nodes in the current abstract interpretation map.
2156 void Parse::adjust_map_after_if(BoolTest::mask btest, Node* c, float prob, Block* path) {

2157   if (!c->is_Cmp()) {
2158     maybe_add_predicate_after_if(path);
2159     return;
2160   }
2161 
2162   if (stopped() || btest == BoolTest::illegal) {
2163     return;                             // nothing to do
2164   }
2165 
2166   bool is_fallthrough = (path == successor_for_bci(iter().next_bci()));
2167 
2168   if (path_is_suitable_for_uncommon_trap(prob)) {
2169     repush_if_args();
2170     uncommon_trap(Deoptimization::Reason_unstable_if,
2171                   Deoptimization::Action_reinterpret,
2172                   NULL,
2173                   (is_fallthrough ? "taken always" : "taken never"));
2174     return;
2175   }
2176 


2346   if (c->Opcode() == Op_CmpP &&
2347       (c->in(1)->Opcode() == Op_LoadKlass || c->in(1)->Opcode() == Op_DecodeNKlass) &&
2348       c->in(2)->is_Con()) {
2349     Node* load_klass = NULL;
2350     Node* decode = NULL;
2351     if (c->in(1)->Opcode() == Op_DecodeNKlass) {
2352       decode = c->in(1);
2353       load_klass = c->in(1)->in(1);
2354     } else {
2355       load_klass = c->in(1);
2356     }
2357     if (load_klass->in(2)->is_AddP()) {
2358       Node* addp = load_klass->in(2);
2359       Node* obj = addp->in(AddPNode::Address);
2360       const TypeOopPtr* obj_type = _gvn.type(obj)->is_oopptr();
2361       if (obj_type->speculative_type_not_null() != NULL) {
2362         ciKlass* k = obj_type->speculative_type();
2363         inc_sp(2);
2364         obj = maybe_cast_profiled_obj(obj, k);
2365         dec_sp(2);
2366         if (obj->is_ValueType()) {
2367           assert(obj->as_ValueType()->is_allocated(&_gvn), "must be allocated");
2368           obj = obj->as_ValueType()->get_oop();
2369         }
2370         // Make the CmpP use the casted obj
2371         addp = basic_plus_adr(obj, addp->in(AddPNode::Offset));
2372         load_klass = load_klass->clone();
2373         load_klass->set_req(2, addp);
2374         load_klass = _gvn.transform(load_klass);
2375         if (decode != NULL) {
2376           decode = decode->clone();
2377           decode->set_req(1, load_klass);
2378           load_klass = _gvn.transform(decode);
2379         }
2380         c = c->clone();
2381         c->set_req(1, load_klass);
2382         c = _gvn.transform(c);
2383       }
2384     }
2385   }
2386   return c;
2387 }
2388 
2389 //------------------------------do_one_bytecode--------------------------------


3197     // See if we can get some profile data and hand it off to the next block
3198     Block *target_block = block()->successor_for_bci(target_bci);
3199     if (target_block->pred_count() != 1)  break;
3200     ciMethodData* methodData = method()->method_data();
3201     if (!methodData->is_mature())  break;
3202     ciProfileData* data = methodData->bci_to_data(bci());
3203     assert(data != NULL && data->is_JumpData(), "need JumpData for taken branch");
3204     int taken = ((ciJumpData*)data)->taken();
3205     taken = method()->scale_count(taken);
3206     target_block->set_count(taken);
3207     break;
3208   }
3209 
3210   case Bytecodes::_ifnull:    btest = BoolTest::eq; goto handle_if_null;
3211   case Bytecodes::_ifnonnull: btest = BoolTest::ne; goto handle_if_null;
3212   handle_if_null:
3213     // If this is a backwards branch in the bytecodes, add Safepoint
3214     maybe_add_safepoint(iter().get_dest());
3215     a = null();
3216     b = pop();
3217     if (b->is_ValueType()) {
3218       // Return constant false because 'b' is always non-null
3219       c = _gvn.makecon(TypeInt::CC_GT);
3220     } else {
3221       if (!_gvn.type(b)->speculative_maybe_null() &&
3222           !too_many_traps(Deoptimization::Reason_speculate_null_check)) {
3223         inc_sp(1);
3224         Node* null_ctl = top();
3225         b = null_check_oop(b, &null_ctl, true, true, true);
3226         assert(null_ctl->is_top(), "no null control here");
3227         dec_sp(1);
3228       } else if (_gvn.type(b)->speculative_always_null() &&
3229                  !too_many_traps(Deoptimization::Reason_speculate_null_assert)) {
3230         inc_sp(1);
3231         b = null_assert(b);
3232         dec_sp(1);
3233       }
3234       c = _gvn.transform( new CmpPNode(b, a) );
3235     }
3236     do_ifnull(btest, c);
3237     break;
3238 
3239   case Bytecodes::_if_acmpeq: btest = BoolTest::eq; goto handle_if_acmp;
3240   case Bytecodes::_if_acmpne: btest = BoolTest::ne; goto handle_if_acmp;
3241   handle_if_acmp:
3242     // If this is a backwards branch in the bytecodes, add Safepoint
3243     maybe_add_safepoint(iter().get_dest());
3244     a = access_resolve(pop(), 0);
3245     b = access_resolve(pop(), 0);
3246     do_acmp(btest, a, b);


3247     break;
3248 
3249   case Bytecodes::_ifeq: btest = BoolTest::eq; goto handle_ifxx;
3250   case Bytecodes::_ifne: btest = BoolTest::ne; goto handle_ifxx;
3251   case Bytecodes::_iflt: btest = BoolTest::lt; goto handle_ifxx;
3252   case Bytecodes::_ifle: btest = BoolTest::le; goto handle_ifxx;
3253   case Bytecodes::_ifgt: btest = BoolTest::gt; goto handle_ifxx;
3254   case Bytecodes::_ifge: btest = BoolTest::ge; goto handle_ifxx;
3255   handle_ifxx:
3256     // If this is a backwards branch in the bytecodes, add Safepoint
3257     maybe_add_safepoint(iter().get_dest());
3258     a = _gvn.intcon(0);
3259     b = pop();
3260     c = _gvn.transform( new CmpINode(b, a) );
3261     do_if(btest, c);
3262     break;
3263 
3264   case Bytecodes::_if_icmpeq: btest = BoolTest::eq; goto handle_if_icmp;
3265   case Bytecodes::_if_icmpne: btest = BoolTest::ne; goto handle_if_icmp;
3266   case Bytecodes::_if_icmplt: btest = BoolTest::lt; goto handle_if_icmp;


3281     break;
3282 
3283   case Bytecodes::_lookupswitch:
3284     do_lookupswitch();
3285     break;
3286 
3287   case Bytecodes::_invokestatic:
3288   case Bytecodes::_invokedynamic:
3289   case Bytecodes::_invokespecial:
3290   case Bytecodes::_invokevirtual:
3291   case Bytecodes::_invokeinterface:
3292     do_call();
3293     break;
3294   case Bytecodes::_checkcast:
3295     do_checkcast();
3296     break;
3297   case Bytecodes::_instanceof:
3298     do_instanceof();
3299     break;
3300   case Bytecodes::_anewarray:
3301     do_newarray();
3302     break;
3303   case Bytecodes::_newarray:
3304     do_newarray((BasicType)iter().get_index());
3305     break;
3306   case Bytecodes::_multianewarray:
3307     do_multianewarray();
3308     break;
3309   case Bytecodes::_new:
3310     do_new();
3311     break;
3312   case Bytecodes::_defaultvalue:
3313     do_defaultvalue();
3314     break;
3315   case Bytecodes::_withfield:
3316     do_withfield();
3317     break;
3318 
3319   case Bytecodes::_jsr:
3320   case Bytecodes::_jsr_w:
3321     do_jsr();
3322     break;
3323 
3324   case Bytecodes::_ret:
3325     do_ret();
3326     break;
3327 
3328 
3329   case Bytecodes::_monitorenter:
3330     do_monitor_enter();
3331     break;
3332 
3333   case Bytecodes::_monitorexit:
3334     do_monitor_exit();
3335     break;
3336 


< prev index next >