< prev index next >

src/share/vm/opto/parse2.cpp

Print this page
rev 8961 : [mq]: diff-shenandoah.patch


  33 #include "opto/addnode.hpp"
  34 #include "opto/castnode.hpp"
  35 #include "opto/convertnode.hpp"
  36 #include "opto/divnode.hpp"
  37 #include "opto/idealGraphPrinter.hpp"
  38 #include "opto/matcher.hpp"
  39 #include "opto/memnode.hpp"
  40 #include "opto/mulnode.hpp"
  41 #include "opto/opaquenode.hpp"
  42 #include "opto/parse.hpp"
  43 #include "opto/runtime.hpp"
  44 #include "runtime/deoptimization.hpp"
  45 #include "runtime/sharedRuntime.hpp"
  46 
  47 extern int explicit_null_checks_inserted,
  48            explicit_null_checks_elided;
  49 
  50 //---------------------------------array_load----------------------------------
  51 void Parse::array_load(BasicType elem_type) {
  52   const Type* elem = Type::TOP;
  53   Node* adr = array_addressing(elem_type, 0, &elem);
  54   if (stopped())  return;     // guaranteed null or range check
  55   dec_sp(2);                  // Pop array and index
  56   const TypeAryPtr* adr_type = TypeAryPtr::get_array_body_type(elem_type);
  57   Node* ld = make_load(control(), adr, elem, elem_type, adr_type, MemNode::unordered);
  58   push(ld);
  59 }
  60 
  61 
  62 //--------------------------------array_store----------------------------------
  63 void Parse::array_store(BasicType elem_type) {
  64   Node* adr = array_addressing(elem_type, 1);
  65   if (stopped())  return;     // guaranteed null or range check
  66   Node* val = pop();
  67   dec_sp(2);                  // Pop array and index
  68   const TypeAryPtr* adr_type = TypeAryPtr::get_array_body_type(elem_type);
  69   store_to_memory(control(), adr, val, elem_type, adr_type, StoreNode::release_if_reference(elem_type));
  70 }
  71 
  72 
  73 //------------------------------array_addressing-------------------------------
  74 // Pull array and index from the stack.  Compute pointer-to-element.
  75 Node* Parse::array_addressing(BasicType type, int vals, const Type* *result2) {
  76   Node *idx   = peek(0+vals);   // Get from stack without popping
  77   Node *ary   = peek(1+vals);   // in case of exception
  78 
  79   // Null check the array base, with correct stack contents
  80   ary = null_check(ary, T_ARRAY);
  81   // Compile-time detect of null-exception?
  82   if (stopped())  return top();
  83 
  84   const TypeAryPtr* arytype  = _gvn.type(ary)->is_aryptr();
  85   const TypeInt*    sizetype = arytype->size();
  86   const Type*       elemtype = arytype->elem();
  87 
  88   if (UseUniqueSubclasses && result2 != NULL) {
  89     const Type* el = elemtype->make_ptr();
  90     if (el && el->isa_instptr()) {
  91       const TypeInstPtr* toop = el->is_instptr();
  92       if (toop->klass()->as_instance_klass()->unique_concrete_subklass()) {
  93         // If we load from "AbstractClass[]" we must see "ConcreteSubClass".
  94         const Type* subklass = Type::get_const_type(toop->klass());
  95         elemtype = subklass->join_speculative(el);


 141       if (C->allow_range_check_smearing()) {
 142         // Do not use builtin_throw, since range checks are sometimes
 143         // made more stringent by an optimistic transformation.
 144         // This creates "tentative" range checks at this point,
 145         // which are not guaranteed to throw exceptions.
 146         // See IfNode::Ideal, is_range_check, adjust_check.
 147         uncommon_trap(Deoptimization::Reason_range_check,
 148                       Deoptimization::Action_make_not_entrant,
 149                       NULL, "range_check");
 150       } else {
 151         // If we have already recompiled with the range-check-widening
 152         // heroic optimization turned off, then we must really be throwing
 153         // range check exceptions.
 154         builtin_throw(Deoptimization::Reason_range_check, idx);
 155       }
 156     }
 157   }
 158   // Check for always knowing you are throwing a range-check exception
 159   if (stopped())  return top();
 160 






 161   Node* ptr = array_element_address(ary, idx, type, sizetype);
 162 
 163   if (result2 != NULL)  *result2 = elemtype;
 164 
 165   assert(ptr != top(), "top should go hand-in-hand with stopped");
 166 
 167   return ptr;
 168 }
 169 
 170 
 171 // returns IfNode
 172 IfNode* Parse::jump_if_fork_int(Node* a, Node* b, BoolTest::mask mask) {
 173   Node   *cmp = _gvn.transform( new CmpINode( a, b)); // two cases: shiftcount > 32 and shiftcount <= 32
 174   Node   *tst = _gvn.transform( new BoolNode( cmp, mask));
 175   IfNode *iff = create_and_map_if( control(), tst, ((mask == BoolTest::eq) ? PROB_STATIC_INFREQUENT : PROB_FAIR), COUNT_UNKNOWN );
 176   return iff;
 177 }
 178 
 179 // return Region node
 180 Node* Parse::jump_if_join(Node* iffalse, Node* iftrue) {


1679     push( a );
1680     break;
1681 
1682   case Bytecodes::_arraylength: {
1683     // Must do null-check with value on expression stack
1684     Node *ary = null_check(peek(), T_ARRAY);
1685     // Compile-time detect of null-exception?
1686     if (stopped())  return;
1687     a = pop();
1688     push(load_array_length(a));
1689     break;
1690   }
1691 
1692   case Bytecodes::_baload: array_load(T_BYTE);   break;
1693   case Bytecodes::_caload: array_load(T_CHAR);   break;
1694   case Bytecodes::_iaload: array_load(T_INT);    break;
1695   case Bytecodes::_saload: array_load(T_SHORT);  break;
1696   case Bytecodes::_faload: array_load(T_FLOAT);  break;
1697   case Bytecodes::_aaload: array_load(T_OBJECT); break;
1698   case Bytecodes::_laload: {
1699     a = array_addressing(T_LONG, 0);
1700     if (stopped())  return;     // guaranteed null or range check
1701     dec_sp(2);                  // Pop array and index
1702     push_pair(make_load(control(), a, TypeLong::LONG, T_LONG, TypeAryPtr::LONGS, MemNode::unordered));
1703     break;
1704   }
1705   case Bytecodes::_daload: {
1706     a = array_addressing(T_DOUBLE, 0);
1707     if (stopped())  return;     // guaranteed null or range check
1708     dec_sp(2);                  // Pop array and index
1709     push_pair(make_load(control(), a, Type::DOUBLE, T_DOUBLE, TypeAryPtr::DOUBLES, MemNode::unordered));
1710     break;
1711   }
1712   case Bytecodes::_bastore: array_store(T_BYTE);  break;
1713   case Bytecodes::_castore: array_store(T_CHAR);  break;
1714   case Bytecodes::_iastore: array_store(T_INT);   break;
1715   case Bytecodes::_sastore: array_store(T_SHORT); break;
1716   case Bytecodes::_fastore: array_store(T_FLOAT); break;
1717   case Bytecodes::_aastore: {
1718     d = array_addressing(T_OBJECT, 1);
1719     if (stopped())  return;     // guaranteed null or range check
1720     array_store_check();
1721     c = pop();                  // Oop to store
1722     b = pop();                  // index (already used)
1723     a = pop();                  // the array itself
1724     const TypeOopPtr* elemtype  = _gvn.type(a)->is_aryptr()->elem()->make_oopptr();
1725     const TypeAryPtr* adr_type = TypeAryPtr::OOPS;





1726     Node* store = store_oop_to_array(control(), a, d, adr_type, c, elemtype, T_OBJECT,
1727                                      StoreNode::release_if_reference(T_OBJECT));
1728     break;
1729   }
1730   case Bytecodes::_lastore: {
1731     a = array_addressing(T_LONG, 2);
1732     if (stopped())  return;     // guaranteed null or range check
1733     c = pop_pair();
1734     dec_sp(2);                  // Pop array and index
1735     store_to_memory(control(), a, c, T_LONG, TypeAryPtr::LONGS, MemNode::unordered);
1736     break;
1737   }
1738   case Bytecodes::_dastore: {
1739     a = array_addressing(T_DOUBLE, 2);
1740     if (stopped())  return;     // guaranteed null or range check
1741     c = pop_pair();
1742     dec_sp(2);                  // Pop array and index
1743     c = dstore_rounding(c);
1744     store_to_memory(control(), a, c, T_DOUBLE, TypeAryPtr::DOUBLES, MemNode::unordered);
1745     break;
1746   }
1747   case Bytecodes::_getfield:
1748     do_getfield();
1749     break;
1750 
1751   case Bytecodes::_getstatic:
1752     do_getstatic();
1753     break;
1754 
1755   case Bytecodes::_putfield:
1756     do_putfield();
1757     break;
1758 
1759   case Bytecodes::_putstatic:


2259     b = pop();
2260     if (!_gvn.type(b)->speculative_maybe_null() &&
2261         !too_many_traps(Deoptimization::Reason_speculate_null_check)) {
2262       inc_sp(1);
2263       Node* null_ctl = top();
2264       b = null_check_oop(b, &null_ctl, true, true, true);
2265       assert(null_ctl->is_top(), "no null control here");
2266       dec_sp(1);
2267     }
2268     c = _gvn.transform( new CmpPNode(b, a) );
2269     do_ifnull(btest, c);
2270     break;
2271 
2272   case Bytecodes::_if_acmpeq: btest = BoolTest::eq; goto handle_if_acmp;
2273   case Bytecodes::_if_acmpne: btest = BoolTest::ne; goto handle_if_acmp;
2274   handle_if_acmp:
2275     // If this is a backwards branch in the bytecodes, add Safepoint
2276     maybe_add_safepoint(iter().get_dest());
2277     a = pop();
2278     b = pop();

2279     c = _gvn.transform( new CmpPNode(b, a) );
2280     c = optimize_cmp_with_klass(c);
2281     do_if(btest, c);
2282     break;
2283 
2284   case Bytecodes::_ifeq: btest = BoolTest::eq; goto handle_ifxx;
2285   case Bytecodes::_ifne: btest = BoolTest::ne; goto handle_ifxx;
2286   case Bytecodes::_iflt: btest = BoolTest::lt; goto handle_ifxx;
2287   case Bytecodes::_ifle: btest = BoolTest::le; goto handle_ifxx;
2288   case Bytecodes::_ifgt: btest = BoolTest::gt; goto handle_ifxx;
2289   case Bytecodes::_ifge: btest = BoolTest::ge; goto handle_ifxx;
2290   handle_ifxx:
2291     // If this is a backwards branch in the bytecodes, add Safepoint
2292     maybe_add_safepoint(iter().get_dest());
2293     a = _gvn.intcon(0);
2294     b = pop();
2295     c = _gvn.transform( new CmpINode(b, a) );
2296     do_if(btest, c);
2297     break;
2298 




  33 #include "opto/addnode.hpp"
  34 #include "opto/castnode.hpp"
  35 #include "opto/convertnode.hpp"
  36 #include "opto/divnode.hpp"
  37 #include "opto/idealGraphPrinter.hpp"
  38 #include "opto/matcher.hpp"
  39 #include "opto/memnode.hpp"
  40 #include "opto/mulnode.hpp"
  41 #include "opto/opaquenode.hpp"
  42 #include "opto/parse.hpp"
  43 #include "opto/runtime.hpp"
  44 #include "runtime/deoptimization.hpp"
  45 #include "runtime/sharedRuntime.hpp"
  46 
  47 extern int explicit_null_checks_inserted,
  48            explicit_null_checks_elided;
  49 
  50 //---------------------------------array_load----------------------------------
  51 void Parse::array_load(BasicType elem_type) {
  52   const Type* elem = Type::TOP;
  53   Node* adr = array_addressing(elem_type, 0, false, &elem);
  54   if (stopped())  return;     // guaranteed null or range check
  55   dec_sp(2);                  // Pop array and index
  56   const TypeAryPtr* adr_type = TypeAryPtr::get_array_body_type(elem_type);
  57   Node* ld = make_load(control(), adr, elem, elem_type, adr_type, MemNode::unordered);
  58   push(ld);
  59 }
  60 
  61 
  62 //--------------------------------array_store----------------------------------
  63 void Parse::array_store(BasicType elem_type) {
  64   Node* adr = array_addressing(elem_type, 1, true);
  65   if (stopped())  return;     // guaranteed null or range check
  66   Node* val = pop();
  67   dec_sp(2);                  // Pop array and index
  68   const TypeAryPtr* adr_type = TypeAryPtr::get_array_body_type(elem_type);
  69   store_to_memory(control(), adr, val, elem_type, adr_type, StoreNode::release_if_reference(elem_type));
  70 }
  71 
  72 
  73 //------------------------------array_addressing-------------------------------
  74 // Pull array and index from the stack.  Compute pointer-to-element.
  75 Node* Parse::array_addressing(BasicType type, int vals, bool is_store, const Type* *result2) {
  76   Node *idx   = peek(0+vals);   // Get from stack without popping
  77   Node *ary   = peek(1+vals);   // in case of exception
  78 
  79   // Null check the array base, with correct stack contents
  80   ary = null_check(ary, T_ARRAY);
  81   // Compile-time detect of null-exception?
  82   if (stopped())  return top();
  83 
  84   const TypeAryPtr* arytype  = _gvn.type(ary)->is_aryptr();
  85   const TypeInt*    sizetype = arytype->size();
  86   const Type*       elemtype = arytype->elem();
  87 
  88   if (UseUniqueSubclasses && result2 != NULL) {
  89     const Type* el = elemtype->make_ptr();
  90     if (el && el->isa_instptr()) {
  91       const TypeInstPtr* toop = el->is_instptr();
  92       if (toop->klass()->as_instance_klass()->unique_concrete_subklass()) {
  93         // If we load from "AbstractClass[]" we must see "ConcreteSubClass".
  94         const Type* subklass = Type::get_const_type(toop->klass());
  95         elemtype = subklass->join_speculative(el);


 141       if (C->allow_range_check_smearing()) {
 142         // Do not use builtin_throw, since range checks are sometimes
 143         // made more stringent by an optimistic transformation.
 144         // This creates "tentative" range checks at this point,
 145         // which are not guaranteed to throw exceptions.
 146         // See IfNode::Ideal, is_range_check, adjust_check.
 147         uncommon_trap(Deoptimization::Reason_range_check,
 148                       Deoptimization::Action_make_not_entrant,
 149                       NULL, "range_check");
 150       } else {
 151         // If we have already recompiled with the range-check-widening
 152         // heroic optimization turned off, then we must really be throwing
 153         // range check exceptions.
 154         builtin_throw(Deoptimization::Reason_range_check, idx);
 155       }
 156     }
 157   }
 158   // Check for always knowing you are throwing a range-check exception
 159   if (stopped())  return top();
 160 
 161   if (is_store) {
 162     ary = shenandoah_write_barrier(ary);
 163   } else {
 164     ary = shenandoah_read_barrier(ary);
 165   }
 166 
 167   Node* ptr = array_element_address(ary, idx, type, sizetype);
 168 
 169   if (result2 != NULL)  *result2 = elemtype;
 170 
 171   assert(ptr != top(), "top should go hand-in-hand with stopped");
 172 
 173   return ptr;
 174 }
 175 
 176 
 177 // returns IfNode
 178 IfNode* Parse::jump_if_fork_int(Node* a, Node* b, BoolTest::mask mask) {
 179   Node   *cmp = _gvn.transform( new CmpINode( a, b)); // two cases: shiftcount > 32 and shiftcount <= 32
 180   Node   *tst = _gvn.transform( new BoolNode( cmp, mask));
 181   IfNode *iff = create_and_map_if( control(), tst, ((mask == BoolTest::eq) ? PROB_STATIC_INFREQUENT : PROB_FAIR), COUNT_UNKNOWN );
 182   return iff;
 183 }
 184 
 185 // return Region node
 186 Node* Parse::jump_if_join(Node* iffalse, Node* iftrue) {


1685     push( a );
1686     break;
1687 
1688   case Bytecodes::_arraylength: {
1689     // Must do null-check with value on expression stack
1690     Node *ary = null_check(peek(), T_ARRAY);
1691     // Compile-time detect of null-exception?
1692     if (stopped())  return;
1693     a = pop();
1694     push(load_array_length(a));
1695     break;
1696   }
1697 
1698   case Bytecodes::_baload: array_load(T_BYTE);   break;
1699   case Bytecodes::_caload: array_load(T_CHAR);   break;
1700   case Bytecodes::_iaload: array_load(T_INT);    break;
1701   case Bytecodes::_saload: array_load(T_SHORT);  break;
1702   case Bytecodes::_faload: array_load(T_FLOAT);  break;
1703   case Bytecodes::_aaload: array_load(T_OBJECT); break;
1704   case Bytecodes::_laload: {
1705     a = array_addressing(T_LONG, 0, false);
1706     if (stopped())  return;     // guaranteed null or range check
1707     dec_sp(2);                  // Pop array and index
1708     push_pair(make_load(control(), a, TypeLong::LONG, T_LONG, TypeAryPtr::LONGS, MemNode::unordered));
1709     break;
1710   }
1711   case Bytecodes::_daload: {
1712     a = array_addressing(T_DOUBLE, 0, false);
1713     if (stopped())  return;     // guaranteed null or range check
1714     dec_sp(2);                  // Pop array and index
1715     push_pair(make_load(control(), a, Type::DOUBLE, T_DOUBLE, TypeAryPtr::DOUBLES, MemNode::unordered));
1716     break;
1717   }
1718   case Bytecodes::_bastore: array_store(T_BYTE);  break;
1719   case Bytecodes::_castore: array_store(T_CHAR);  break;
1720   case Bytecodes::_iastore: array_store(T_INT);   break;
1721   case Bytecodes::_sastore: array_store(T_SHORT); break;
1722   case Bytecodes::_fastore: array_store(T_FLOAT); break;
1723   case Bytecodes::_aastore: {
1724     d = array_addressing(T_OBJECT, 1, true);
1725     if (stopped())  return;     // guaranteed null or range check
1726     array_store_check();
1727     c = pop();                  // Oop to store
1728     b = pop();                  // index (already used)
1729     a = pop();                  // the array itself
1730     const TypeOopPtr* elemtype  = _gvn.type(a)->is_aryptr()->elem()->make_oopptr();
1731     const TypeAryPtr* adr_type = TypeAryPtr::OOPS;
1732     // Note: We don't need a write barrier for Shenandoah on a here, because
1733     // a is not used except for an assert. The address d already has the
1734     // write barrier. Adding a barrier on a only results in additional code
1735     // being generated.
1736     c = shenandoah_read_barrier_nomem(c);
1737     Node* store = store_oop_to_array(control(), a, d, adr_type, c, elemtype, T_OBJECT,
1738                                      StoreNode::release_if_reference(T_OBJECT));
1739     break;
1740   }
1741   case Bytecodes::_lastore: {
1742     a = array_addressing(T_LONG, 2, true);
1743     if (stopped())  return;     // guaranteed null or range check
1744     c = pop_pair();
1745     dec_sp(2);                  // Pop array and index
1746     store_to_memory(control(), a, c, T_LONG, TypeAryPtr::LONGS, MemNode::unordered);
1747     break;
1748   }
1749   case Bytecodes::_dastore: {
1750     a = array_addressing(T_DOUBLE, 2, true);
1751     if (stopped())  return;     // guaranteed null or range check
1752     c = pop_pair();
1753     dec_sp(2);                  // Pop array and index
1754     c = dstore_rounding(c);
1755     store_to_memory(control(), a, c, T_DOUBLE, TypeAryPtr::DOUBLES, MemNode::unordered);
1756     break;
1757   }
1758   case Bytecodes::_getfield:
1759     do_getfield();
1760     break;
1761 
1762   case Bytecodes::_getstatic:
1763     do_getstatic();
1764     break;
1765 
1766   case Bytecodes::_putfield:
1767     do_putfield();
1768     break;
1769 
1770   case Bytecodes::_putstatic:


2270     b = pop();
2271     if (!_gvn.type(b)->speculative_maybe_null() &&
2272         !too_many_traps(Deoptimization::Reason_speculate_null_check)) {
2273       inc_sp(1);
2274       Node* null_ctl = top();
2275       b = null_check_oop(b, &null_ctl, true, true, true);
2276       assert(null_ctl->is_top(), "no null control here");
2277       dec_sp(1);
2278     }
2279     c = _gvn.transform( new CmpPNode(b, a) );
2280     do_ifnull(btest, c);
2281     break;
2282 
2283   case Bytecodes::_if_acmpeq: btest = BoolTest::eq; goto handle_if_acmp;
2284   case Bytecodes::_if_acmpne: btest = BoolTest::ne; goto handle_if_acmp;
2285   handle_if_acmp:
2286     // If this is a backwards branch in the bytecodes, add Safepoint
2287     maybe_add_safepoint(iter().get_dest());
2288     a = pop();
2289     b = pop();
2290     shenandoah_acmp_barrier(a, b);
2291     c = _gvn.transform( new CmpPNode(b, a) );
2292     c = optimize_cmp_with_klass(c);
2293     do_if(btest, c);
2294     break;
2295 
2296   case Bytecodes::_ifeq: btest = BoolTest::eq; goto handle_ifxx;
2297   case Bytecodes::_ifne: btest = BoolTest::ne; goto handle_ifxx;
2298   case Bytecodes::_iflt: btest = BoolTest::lt; goto handle_ifxx;
2299   case Bytecodes::_ifle: btest = BoolTest::le; goto handle_ifxx;
2300   case Bytecodes::_ifgt: btest = BoolTest::gt; goto handle_ifxx;
2301   case Bytecodes::_ifge: btest = BoolTest::ge; goto handle_ifxx;
2302   handle_ifxx:
2303     // If this is a backwards branch in the bytecodes, add Safepoint
2304     maybe_add_safepoint(iter().get_dest());
2305     a = _gvn.intcon(0);
2306     b = pop();
2307     c = _gvn.transform( new CmpINode(b, a) );
2308     do_if(btest, c);
2309     break;
2310 


< prev index next >