< prev index next >

src/share/vm/opto/compile.cpp

Print this page
rev 8961 : [mq]: diff-shenandoah.patch


  44 #include "opto/connode.hpp"
  45 #include "opto/convertnode.hpp"
  46 #include "opto/divnode.hpp"
  47 #include "opto/escape.hpp"
  48 #include "opto/idealGraphPrinter.hpp"
  49 #include "opto/loopnode.hpp"
  50 #include "opto/machnode.hpp"
  51 #include "opto/macro.hpp"
  52 #include "opto/matcher.hpp"
  53 #include "opto/mathexactnode.hpp"
  54 #include "opto/memnode.hpp"
  55 #include "opto/mulnode.hpp"
  56 #include "opto/narrowptrnode.hpp"
  57 #include "opto/node.hpp"
  58 #include "opto/opcodes.hpp"
  59 #include "opto/output.hpp"
  60 #include "opto/parse.hpp"
  61 #include "opto/phaseX.hpp"
  62 #include "opto/rootnode.hpp"
  63 #include "opto/runtime.hpp"

  64 #include "opto/stringopts.hpp"
  65 #include "opto/type.hpp"
  66 #include "opto/vectornode.hpp"
  67 #include "runtime/arguments.hpp"
  68 #include "runtime/sharedRuntime.hpp"
  69 #include "runtime/signature.hpp"
  70 #include "runtime/stubRoutines.hpp"
  71 #include "runtime/timer.hpp"
  72 #include "utilities/copy.hpp"
  73 
  74 
  75 // -------------------- Compile::mach_constant_base_node -----------------------
  76 // Constant table base node singleton.
  77 MachConstantBaseNode* Compile::mach_constant_base_node() {
  78   if (_mach_constant_base_node == NULL) {
  79     _mach_constant_base_node = new MachConstantBaseNode();
  80     _mach_constant_base_node->add_req(C->root());
  81   }
  82   return _mach_constant_base_node;
  83 }


 740 
 741     // Put top into the hash table ASAP.
 742     initial_gvn()->transform_no_reclaim(top());
 743 
 744     // Set up tf(), start(), and find a CallGenerator.
 745     CallGenerator* cg = NULL;
 746     if (is_osr_compilation()) {
 747       const TypeTuple *domain = StartOSRNode::osr_domain();
 748       const TypeTuple *range = TypeTuple::make_range(method()->signature());
 749       init_tf(TypeFunc::make(domain, range));
 750       StartNode* s = new StartOSRNode(root(), domain);
 751       initial_gvn()->set_type_bottom(s);
 752       init_start(s);
 753       cg = CallGenerator::for_osr(method(), entry_bci());
 754     } else {
 755       // Normal case.
 756       init_tf(TypeFunc::make(method()));
 757       StartNode* s = new StartNode(root(), tf()->domain());
 758       initial_gvn()->set_type_bottom(s);
 759       init_start(s);
 760       if (method()->intrinsic_id() == vmIntrinsics::_Reference_get && UseG1GC) {

 761         // With java.lang.ref.reference.get() we must go through the
 762         // intrinsic when G1 is enabled - even when get() is the root
 763         // method of the compile - so that, if necessary, the value in
 764         // the referent field of the reference object gets recorded by
 765         // the pre-barrier code.
 766         // Specifically, if G1 is enabled, the value in the referent
 767         // field is recorded by the G1 SATB pre barrier. This will
 768         // result in the referent being marked live and the reference
 769         // object removed from the list of discovered references during
 770         // reference processing.
 771         cg = find_intrinsic(method(), false);
 772       }
 773       if (cg == NULL) {
 774         float past_uses = method()->interpreter_invocation_count();
 775         float expected_uses = past_uses;
 776         cg = CallGenerator::for_inline(method(), expected_uses);
 777       }
 778     }
 779     if (failing())  return;
 780     if (cg == NULL) {


1417     }
1418   } else if( ta && _AliasLevel >= 2 ) {
1419     // For arrays indexed by constant indices, we flatten the alias
1420     // space to include all of the array body.  Only the header, klass
1421     // and array length can be accessed un-aliased.
1422     if( offset != Type::OffsetBot ) {
1423       if( ta->const_oop() ) { // MethodData* or Method*
1424         offset = Type::OffsetBot;   // Flatten constant access into array body
1425         tj = ta = TypeAryPtr::make(ptr,ta->const_oop(),ta->ary(),ta->klass(),false,offset);
1426       } else if( offset == arrayOopDesc::length_offset_in_bytes() ) {
1427         // range is OK as-is.
1428         tj = ta = TypeAryPtr::RANGE;
1429       } else if( offset == oopDesc::klass_offset_in_bytes() ) {
1430         tj = TypeInstPtr::KLASS; // all klass loads look alike
1431         ta = TypeAryPtr::RANGE; // generic ignored junk
1432         ptr = TypePtr::BotPTR;
1433       } else if( offset == oopDesc::mark_offset_in_bytes() ) {
1434         tj = TypeInstPtr::MARK;
1435         ta = TypeAryPtr::RANGE; // generic ignored junk
1436         ptr = TypePtr::BotPTR;



1437       } else {                  // Random constant offset into array body
1438         offset = Type::OffsetBot;   // Flatten constant access into array body
1439         tj = ta = TypeAryPtr::make(ptr,ta->ary(),ta->klass(),false,offset);
1440       }
1441     }
1442     // Arrays of fixed size alias with arrays of unknown size.
1443     if (ta->size() != TypeInt::POS) {
1444       const TypeAry *tary = TypeAry::make(ta->elem(), TypeInt::POS);
1445       tj = ta = TypeAryPtr::make(ptr,ta->const_oop(),tary,ta->klass(),false,offset);
1446     }
1447     // Arrays of known objects become arrays of unknown objects.
1448     if (ta->elem()->isa_narrowoop() && ta->elem() != TypeNarrowOop::BOTTOM) {
1449       const TypeAry *tary = TypeAry::make(TypeNarrowOop::BOTTOM, ta->size());
1450       tj = ta = TypeAryPtr::make(ptr,ta->const_oop(),tary,NULL,false,offset);
1451     }
1452     if (ta->elem()->isa_oopptr() && ta->elem() != TypeInstPtr::BOTTOM) {
1453       const TypeAry *tary = TypeAry::make(TypeInstPtr::BOTTOM, ta->size());
1454       tj = ta = TypeAryPtr::make(ptr,ta->const_oop(),tary,NULL,false,offset);
1455     }
1456     // Arrays of bytes and of booleans both use 'bastore' and 'baload' so


1481         tj = to = TypeInstPtr::make(TypePtr::BotPTR,to->klass(),false,0,offset);
1482       }
1483     } else if( is_known_inst ) {
1484       tj = to; // Keep NotNull and klass_is_exact for instance type
1485     } else if( ptr == TypePtr::NotNull || to->klass_is_exact() ) {
1486       // During the 2nd round of IterGVN, NotNull castings are removed.
1487       // Make sure the Bottom and NotNull variants alias the same.
1488       // Also, make sure exact and non-exact variants alias the same.
1489       tj = to = TypeInstPtr::make(TypePtr::BotPTR,to->klass(),false,0,offset);
1490     }
1491     if (to->speculative() != NULL) {
1492       tj = to = TypeInstPtr::make(to->ptr(),to->klass(),to->klass_is_exact(),to->const_oop(),to->offset(), to->instance_id());
1493     }
1494     // Canonicalize the holder of this field
1495     if (offset >= 0 && offset < instanceOopDesc::base_offset_in_bytes()) {
1496       // First handle header references such as a LoadKlassNode, even if the
1497       // object's klass is unloaded at compile time (4965979).
1498       if (!is_known_inst) { // Do it only for non-instance types
1499         tj = to = TypeInstPtr::make(TypePtr::BotPTR, env()->Object_klass(), false, NULL, offset);
1500       }
1501     } else if (offset < 0 || offset >= k->size_helper() * wordSize) {
1502       // Static fields are in the space above the normal instance
1503       // fields in the java.lang.Class instance.
1504       if (to->klass() != ciEnv::current()->Class_klass()) {
1505         to = NULL;
1506         tj = TypeOopPtr::BOTTOM;
1507         offset = tj->offset();
1508       }
1509     } else {
1510       ciInstanceKlass *canonical_holder = k->get_canonical_holder(offset);
1511       if (!k->equals(canonical_holder) || tj->offset() != offset) {
1512         if( is_known_inst ) {
1513           tj = to = TypeInstPtr::make(to->ptr(), canonical_holder, true, NULL, offset, to->instance_id());
1514         } else {
1515           tj = to = TypeInstPtr::make(to->ptr(), canonical_holder, false, NULL, offset);
1516         }
1517       }
1518     }
1519   }
1520 
1521   // Klass pointers to object array klasses need some flattening


1579     case Type::AnyPtr:   tj = TypePtr::BOTTOM;      break;  // caller checks it
1580     default: ShouldNotReachHere();
1581     }
1582     break;
1583   case 2:                       // No collapsing at level 2; keep all splits
1584   case 3:                       // No collapsing at level 3; keep all splits
1585     break;
1586   default:
1587     Unimplemented();
1588   }
1589 
1590   offset = tj->offset();
1591   assert( offset != Type::OffsetTop, "Offset has fallen from constant" );
1592 
1593   assert( (offset != Type::OffsetBot && tj->base() != Type::AryPtr) ||
1594           (offset == Type::OffsetBot && tj->base() == Type::AryPtr) ||
1595           (offset == Type::OffsetBot && tj == TypeOopPtr::BOTTOM) ||
1596           (offset == Type::OffsetBot && tj == TypePtr::BOTTOM) ||
1597           (offset == oopDesc::mark_offset_in_bytes() && tj->base() == Type::AryPtr) ||
1598           (offset == oopDesc::klass_offset_in_bytes() && tj->base() == Type::AryPtr) ||
1599           (offset == arrayOopDesc::length_offset_in_bytes() && tj->base() == Type::AryPtr)  ,

1600           "For oops, klasses, raw offset must be constant; for arrays the offset is never known" );
1601   assert( tj->ptr() != TypePtr::TopPTR &&
1602           tj->ptr() != TypePtr::AnyNull &&
1603           tj->ptr() != TypePtr::Null, "No imprecise addresses" );
1604 //    assert( tj->ptr() != TypePtr::Constant ||
1605 //            tj->base() == Type::RawPtr ||
1606 //            tj->base() == Type::KlassPtr, "No constant oop addresses" );
1607 
1608   return tj;
1609 }
1610 
1611 void Compile::AliasType::Init(int i, const TypePtr* at) {
1612   _index = i;
1613   _adr_type = at;
1614   _field = NULL;
1615   _element = NULL;
1616   _is_rewritable = true; // default
1617   const TypeOopPtr *atoop = (at != NULL) ? at->isa_oopptr() : NULL;
1618   if (atoop != NULL && atoop->is_known_instance()) {
1619     const TypeOopPtr *gt = atoop->cast_to_instance_id(TypeOopPtr::InstanceBot);


2833       }
2834     }
2835 #endif
2836     break;
2837   }
2838 
2839   case Op_CastPP: {
2840     // Remove CastPP nodes to gain more freedom during scheduling but
2841     // keep the dependency they encode as control or precedence edges
2842     // (if control is set already) on memory operations. Some CastPP
2843     // nodes don't have a control (don't carry a dependency): skip
2844     // those.
2845     if (n->in(0) != NULL) {
2846       ResourceMark rm;
2847       Unique_Node_List wq;
2848       wq.push(n);
2849       for (uint next = 0; next < wq.size(); ++next) {
2850         Node *m = wq.at(next);
2851         for (DUIterator_Fast imax, i = m->fast_outs(imax); i < imax; i++) {
2852           Node* use = m->fast_out(i);
2853           if (use->is_Mem() || use->is_EncodeNarrowPtr()) {
2854             use->ensure_control_or_add_prec(n->in(0));
2855           } else if (use->in(0) == NULL) {
2856             switch(use->Opcode()) {
2857             case Op_AddP:
2858             case Op_DecodeN:
2859             case Op_DecodeNKlass:
2860             case Op_CheckCastPP:
2861             case Op_CastPP:
2862               wq.push(use);
2863               break;
2864             }
2865           }
2866         }
2867       }
2868     }
2869     const bool is_LP64 = LP64_ONLY(true) NOT_LP64(false);
2870     if (is_LP64 && n->in(1)->is_DecodeN() && Matcher::gen_narrow_oop_implicit_null_checks()) {
2871       Node* in1 = n->in(1);
2872       const Type* t = n->bottom_type();
2873       Node* new_in1 = in1->clone();


3157         }
3158       } else {
3159         if (t == NULL || t->_lo < 0 || t->_hi > (int)mask) {
3160           Node* shift = new AndINode(in2, ConNode::make(TypeInt::make(mask)));
3161           n->set_req(2, shift);
3162         }
3163       }
3164       if (in2->outcnt() == 0) { // Remove dead node
3165         in2->disconnect_inputs(NULL, this);
3166       }
3167     }
3168     break;
3169   case Op_MemBarStoreStore:
3170   case Op_MemBarRelease:
3171     // Break the link with AllocateNode: it is no longer useful and
3172     // confuses register allocation.
3173     if (n->req() > MemBarNode::Precedent) {
3174       n->set_req(MemBarNode::Precedent, top());
3175     }
3176     break;





3177   default:
3178     assert( !n->is_Call(), "" );
3179     assert( !n->is_Mem(), "" );
3180     assert( nop != Op_ProfileBoolean, "should be eliminated during IGVN");
3181     break;
3182   }
3183 
3184   // Collect CFG split points
3185   if (n->is_MultiBranch())
3186     frc._tests.push(n);
3187 }
3188 
3189 //------------------------------final_graph_reshaping_walk---------------------
3190 // Replacing Opaque nodes with their input in final_graph_reshaping_impl(),
3191 // requires that the walk visits a node's inputs before visiting the node.
3192 void Compile::final_graph_reshaping_walk( Node_Stack &nstack, Node *root, Final_Reshape_Counts &frc ) {
3193   ResourceArea *area = Thread::current()->resource_area();
3194   Unique_Node_List sfpt(area);
3195 
3196   frc._visited.set(root->_idx); // first, mark node as visited


3523           if (use->is_Con())        continue;  // a dead ConNode is OK
3524           // At this point, we have found a dead node which is DU-reachable.
3525           if (!dead_nodes) {
3526             tty->print_cr("*** Dead nodes reachable via DU edges:");
3527             dead_nodes = true;
3528           }
3529           use->dump(2);
3530           tty->print_cr("---");
3531           checked.push(use);  // No repeats; pretend it is now checked.
3532         }
3533       }
3534       assert(!dead_nodes, "using nodes must be reachable from root");
3535     }
3536   }
3537 }
3538 
3539 // Verify GC barriers consistency
3540 // Currently supported:
3541 // - G1 pre-barriers (see GraphKit::g1_write_barrier_pre())
3542 void Compile::verify_barriers() {
3543   if (UseG1GC) {
3544     // Verify G1 pre-barriers
3545     const int marking_offset = in_bytes(JavaThread::satb_mark_queue_offset() + PtrQueue::byte_offset_of_active());
3546 
3547     ResourceArea *area = Thread::current()->resource_area();
3548     Unique_Node_List visited(area);
3549     Node_List worklist(area);
3550     // We're going to walk control flow backwards starting from the Root
3551     worklist.push(_root);
3552     while (worklist.size() > 0) {
3553       Node* x = worklist.pop();
3554       if (x == NULL || x == top()) continue;
3555       if (visited.member(x)) {
3556         continue;
3557       } else {
3558         visited.push(x);
3559       }
3560 
3561       if (x->is_Region()) {
3562         for (uint i = 1; i < x->req(); i++) {
3563           worklist.push(x->in(i));




  44 #include "opto/connode.hpp"
  45 #include "opto/convertnode.hpp"
  46 #include "opto/divnode.hpp"
  47 #include "opto/escape.hpp"
  48 #include "opto/idealGraphPrinter.hpp"
  49 #include "opto/loopnode.hpp"
  50 #include "opto/machnode.hpp"
  51 #include "opto/macro.hpp"
  52 #include "opto/matcher.hpp"
  53 #include "opto/mathexactnode.hpp"
  54 #include "opto/memnode.hpp"
  55 #include "opto/mulnode.hpp"
  56 #include "opto/narrowptrnode.hpp"
  57 #include "opto/node.hpp"
  58 #include "opto/opcodes.hpp"
  59 #include "opto/output.hpp"
  60 #include "opto/parse.hpp"
  61 #include "opto/phaseX.hpp"
  62 #include "opto/rootnode.hpp"
  63 #include "opto/runtime.hpp"
  64 #include "opto/shenandoahSupport.hpp"
  65 #include "opto/stringopts.hpp"
  66 #include "opto/type.hpp"
  67 #include "opto/vectornode.hpp"
  68 #include "runtime/arguments.hpp"
  69 #include "runtime/sharedRuntime.hpp"
  70 #include "runtime/signature.hpp"
  71 #include "runtime/stubRoutines.hpp"
  72 #include "runtime/timer.hpp"
  73 #include "utilities/copy.hpp"
  74 
  75 
  76 // -------------------- Compile::mach_constant_base_node -----------------------
  77 // Constant table base node singleton.
  78 MachConstantBaseNode* Compile::mach_constant_base_node() {
  79   if (_mach_constant_base_node == NULL) {
  80     _mach_constant_base_node = new MachConstantBaseNode();
  81     _mach_constant_base_node->add_req(C->root());
  82   }
  83   return _mach_constant_base_node;
  84 }


 741 
 742     // Put top into the hash table ASAP.
 743     initial_gvn()->transform_no_reclaim(top());
 744 
 745     // Set up tf(), start(), and find a CallGenerator.
 746     CallGenerator* cg = NULL;
 747     if (is_osr_compilation()) {
 748       const TypeTuple *domain = StartOSRNode::osr_domain();
 749       const TypeTuple *range = TypeTuple::make_range(method()->signature());
 750       init_tf(TypeFunc::make(domain, range));
 751       StartNode* s = new StartOSRNode(root(), domain);
 752       initial_gvn()->set_type_bottom(s);
 753       init_start(s);
 754       cg = CallGenerator::for_osr(method(), entry_bci());
 755     } else {
 756       // Normal case.
 757       init_tf(TypeFunc::make(method()));
 758       StartNode* s = new StartNode(root(), tf()->domain());
 759       initial_gvn()->set_type_bottom(s);
 760       init_start(s);
 761       if (method()->intrinsic_id() == vmIntrinsics::_Reference_get
 762           && (UseG1GC || UseShenandoahGC)) {
 763         // With java.lang.ref.reference.get() we must go through the
 764         // intrinsic when G1 is enabled - even when get() is the root
 765         // method of the compile - so that, if necessary, the value in
 766         // the referent field of the reference object gets recorded by
 767         // the pre-barrier code.
 768         // Specifically, if G1 is enabled, the value in the referent
 769         // field is recorded by the G1 SATB pre barrier. This will
 770         // result in the referent being marked live and the reference
 771         // object removed from the list of discovered references during
 772         // reference processing.
 773         cg = find_intrinsic(method(), false);
 774       }
 775       if (cg == NULL) {
 776         float past_uses = method()->interpreter_invocation_count();
 777         float expected_uses = past_uses;
 778         cg = CallGenerator::for_inline(method(), expected_uses);
 779       }
 780     }
 781     if (failing())  return;
 782     if (cg == NULL) {


1419     }
1420   } else if( ta && _AliasLevel >= 2 ) {
1421     // For arrays indexed by constant indices, we flatten the alias
1422     // space to include all of the array body.  Only the header, klass
1423     // and array length can be accessed un-aliased.
1424     if( offset != Type::OffsetBot ) {
1425       if( ta->const_oop() ) { // MethodData* or Method*
1426         offset = Type::OffsetBot;   // Flatten constant access into array body
1427         tj = ta = TypeAryPtr::make(ptr,ta->const_oop(),ta->ary(),ta->klass(),false,offset);
1428       } else if( offset == arrayOopDesc::length_offset_in_bytes() ) {
1429         // range is OK as-is.
1430         tj = ta = TypeAryPtr::RANGE;
1431       } else if( offset == oopDesc::klass_offset_in_bytes() ) {
1432         tj = TypeInstPtr::KLASS; // all klass loads look alike
1433         ta = TypeAryPtr::RANGE; // generic ignored junk
1434         ptr = TypePtr::BotPTR;
1435       } else if( offset == oopDesc::mark_offset_in_bytes() ) {
1436         tj = TypeInstPtr::MARK;
1437         ta = TypeAryPtr::RANGE; // generic ignored junk
1438         ptr = TypePtr::BotPTR;
1439       } else if (offset == -8) {
1440         // Need to distinguish brooks ptr as is.
1441         tj = ta = TypeAryPtr::make(ptr,ta->ary(),ta->klass(),false,offset);
1442       } else {                  // Random constant offset into array body
1443         offset = Type::OffsetBot;   // Flatten constant access into array body
1444         tj = ta = TypeAryPtr::make(ptr,ta->ary(),ta->klass(),false,offset);
1445       }
1446     }
1447     // Arrays of fixed size alias with arrays of unknown size.
1448     if (ta->size() != TypeInt::POS) {
1449       const TypeAry *tary = TypeAry::make(ta->elem(), TypeInt::POS);
1450       tj = ta = TypeAryPtr::make(ptr,ta->const_oop(),tary,ta->klass(),false,offset);
1451     }
1452     // Arrays of known objects become arrays of unknown objects.
1453     if (ta->elem()->isa_narrowoop() && ta->elem() != TypeNarrowOop::BOTTOM) {
1454       const TypeAry *tary = TypeAry::make(TypeNarrowOop::BOTTOM, ta->size());
1455       tj = ta = TypeAryPtr::make(ptr,ta->const_oop(),tary,NULL,false,offset);
1456     }
1457     if (ta->elem()->isa_oopptr() && ta->elem() != TypeInstPtr::BOTTOM) {
1458       const TypeAry *tary = TypeAry::make(TypeInstPtr::BOTTOM, ta->size());
1459       tj = ta = TypeAryPtr::make(ptr,ta->const_oop(),tary,NULL,false,offset);
1460     }
1461     // Arrays of bytes and of booleans both use 'bastore' and 'baload' so


1486         tj = to = TypeInstPtr::make(TypePtr::BotPTR,to->klass(),false,0,offset);
1487       }
1488     } else if( is_known_inst ) {
1489       tj = to; // Keep NotNull and klass_is_exact for instance type
1490     } else if( ptr == TypePtr::NotNull || to->klass_is_exact() ) {
1491       // During the 2nd round of IterGVN, NotNull castings are removed.
1492       // Make sure the Bottom and NotNull variants alias the same.
1493       // Also, make sure exact and non-exact variants alias the same.
1494       tj = to = TypeInstPtr::make(TypePtr::BotPTR,to->klass(),false,0,offset);
1495     }
1496     if (to->speculative() != NULL) {
1497       tj = to = TypeInstPtr::make(to->ptr(),to->klass(),to->klass_is_exact(),to->const_oop(),to->offset(), to->instance_id());
1498     }
1499     // Canonicalize the holder of this field
1500     if (offset >= 0 && offset < instanceOopDesc::base_offset_in_bytes()) {
1501       // First handle header references such as a LoadKlassNode, even if the
1502       // object's klass is unloaded at compile time (4965979).
1503       if (!is_known_inst) { // Do it only for non-instance types
1504         tj = to = TypeInstPtr::make(TypePtr::BotPTR, env()->Object_klass(), false, NULL, offset);
1505       }
1506     } else if ((offset != -8) && (offset < 0 || offset >= k->size_helper() * wordSize)) {
1507       // Static fields are in the space above the normal instance
1508       // fields in the java.lang.Class instance.
1509       if (to->klass() != ciEnv::current()->Class_klass()) {
1510         to = NULL;
1511         tj = TypeOopPtr::BOTTOM;
1512         offset = tj->offset();
1513       }
1514     } else {
1515       ciInstanceKlass *canonical_holder = k->get_canonical_holder(offset);
1516       if (!k->equals(canonical_holder) || tj->offset() != offset) {
1517         if( is_known_inst ) {
1518           tj = to = TypeInstPtr::make(to->ptr(), canonical_holder, true, NULL, offset, to->instance_id());
1519         } else {
1520           tj = to = TypeInstPtr::make(to->ptr(), canonical_holder, false, NULL, offset);
1521         }
1522       }
1523     }
1524   }
1525 
1526   // Klass pointers to object array klasses need some flattening


1584     case Type::AnyPtr:   tj = TypePtr::BOTTOM;      break;  // caller checks it
1585     default: ShouldNotReachHere();
1586     }
1587     break;
1588   case 2:                       // No collapsing at level 2; keep all splits
1589   case 3:                       // No collapsing at level 3; keep all splits
1590     break;
1591   default:
1592     Unimplemented();
1593   }
1594 
1595   offset = tj->offset();
1596   assert( offset != Type::OffsetTop, "Offset has fallen from constant" );
1597 
1598   assert( (offset != Type::OffsetBot && tj->base() != Type::AryPtr) ||
1599           (offset == Type::OffsetBot && tj->base() == Type::AryPtr) ||
1600           (offset == Type::OffsetBot && tj == TypeOopPtr::BOTTOM) ||
1601           (offset == Type::OffsetBot && tj == TypePtr::BOTTOM) ||
1602           (offset == oopDesc::mark_offset_in_bytes() && tj->base() == Type::AryPtr) ||
1603           (offset == oopDesc::klass_offset_in_bytes() && tj->base() == Type::AryPtr) ||
1604           (offset == arrayOopDesc::length_offset_in_bytes() && tj->base() == Type::AryPtr) ||
1605           (offset == -8 && tj->base() == Type::AryPtr && UseShenandoahGC),
1606           "For oops, klasses, raw offset must be constant; for arrays the offset is never known" );
1607   assert( tj->ptr() != TypePtr::TopPTR &&
1608           tj->ptr() != TypePtr::AnyNull &&
1609           tj->ptr() != TypePtr::Null, "No imprecise addresses" );
1610 //    assert( tj->ptr() != TypePtr::Constant ||
1611 //            tj->base() == Type::RawPtr ||
1612 //            tj->base() == Type::KlassPtr, "No constant oop addresses" );
1613 
1614   return tj;
1615 }
1616 
1617 void Compile::AliasType::Init(int i, const TypePtr* at) {
1618   _index = i;
1619   _adr_type = at;
1620   _field = NULL;
1621   _element = NULL;
1622   _is_rewritable = true; // default
1623   const TypeOopPtr *atoop = (at != NULL) ? at->isa_oopptr() : NULL;
1624   if (atoop != NULL && atoop->is_known_instance()) {
1625     const TypeOopPtr *gt = atoop->cast_to_instance_id(TypeOopPtr::InstanceBot);


2839       }
2840     }
2841 #endif
2842     break;
2843   }
2844 
2845   case Op_CastPP: {
2846     // Remove CastPP nodes to gain more freedom during scheduling but
2847     // keep the dependency they encode as control or precedence edges
2848     // (if control is set already) on memory operations. Some CastPP
2849     // nodes don't have a control (don't carry a dependency): skip
2850     // those.
2851     if (n->in(0) != NULL) {
2852       ResourceMark rm;
2853       Unique_Node_List wq;
2854       wq.push(n);
2855       for (uint next = 0; next < wq.size(); ++next) {
2856         Node *m = wq.at(next);
2857         for (DUIterator_Fast imax, i = m->fast_outs(imax); i < imax; i++) {
2858           Node* use = m->fast_out(i);
2859           if (use->is_Mem() || use->is_EncodeNarrowPtr() || use->is_ShenandoahBarrier()) {
2860             use->ensure_control_or_add_prec(n->in(0));
2861           } else if (use->in(0) == NULL) {
2862             switch(use->Opcode()) {
2863             case Op_AddP:
2864             case Op_DecodeN:
2865             case Op_DecodeNKlass:
2866             case Op_CheckCastPP:
2867             case Op_CastPP:
2868               wq.push(use);
2869               break;
2870             }
2871           }
2872         }
2873       }
2874     }
2875     const bool is_LP64 = LP64_ONLY(true) NOT_LP64(false);
2876     if (is_LP64 && n->in(1)->is_DecodeN() && Matcher::gen_narrow_oop_implicit_null_checks()) {
2877       Node* in1 = n->in(1);
2878       const Type* t = n->bottom_type();
2879       Node* new_in1 = in1->clone();


3163         }
3164       } else {
3165         if (t == NULL || t->_lo < 0 || t->_hi > (int)mask) {
3166           Node* shift = new AndINode(in2, ConNode::make(TypeInt::make(mask)));
3167           n->set_req(2, shift);
3168         }
3169       }
3170       if (in2->outcnt() == 0) { // Remove dead node
3171         in2->disconnect_inputs(NULL, this);
3172       }
3173     }
3174     break;
3175   case Op_MemBarStoreStore:
3176   case Op_MemBarRelease:
3177     // Break the link with AllocateNode: it is no longer useful and
3178     // confuses register allocation.
3179     if (n->req() > MemBarNode::Precedent) {
3180       n->set_req(MemBarNode::Precedent, top());
3181     }
3182     break;
3183   case Op_ShenandoahReadBarrier:
3184     break;
3185   case Op_ShenandoahWriteBarrier:
3186     n->set_req(ShenandoahBarrierNode::Memory, immutable_memory());
3187     break;
3188   default:
3189     assert( !n->is_Call(), "" );
3190     assert( !n->is_Mem(), "" );
3191     assert( nop != Op_ProfileBoolean, "should be eliminated during IGVN");
3192     break;
3193   }
3194 
3195   // Collect CFG split points
3196   if (n->is_MultiBranch())
3197     frc._tests.push(n);
3198 }
3199 
3200 //------------------------------final_graph_reshaping_walk---------------------
3201 // Replacing Opaque nodes with their input in final_graph_reshaping_impl(),
3202 // requires that the walk visits a node's inputs before visiting the node.
3203 void Compile::final_graph_reshaping_walk( Node_Stack &nstack, Node *root, Final_Reshape_Counts &frc ) {
3204   ResourceArea *area = Thread::current()->resource_area();
3205   Unique_Node_List sfpt(area);
3206 
3207   frc._visited.set(root->_idx); // first, mark node as visited


3534           if (use->is_Con())        continue;  // a dead ConNode is OK
3535           // At this point, we have found a dead node which is DU-reachable.
3536           if (!dead_nodes) {
3537             tty->print_cr("*** Dead nodes reachable via DU edges:");
3538             dead_nodes = true;
3539           }
3540           use->dump(2);
3541           tty->print_cr("---");
3542           checked.push(use);  // No repeats; pretend it is now checked.
3543         }
3544       }
3545       assert(!dead_nodes, "using nodes must be reachable from root");
3546     }
3547   }
3548 }
3549 
3550 // Verify GC barriers consistency
3551 // Currently supported:
3552 // - G1 pre-barriers (see GraphKit::g1_write_barrier_pre())
3553 void Compile::verify_barriers() {
3554   if (UseG1GC || UseShenandoahGC) {
3555     // Verify G1 pre-barriers
3556     const int marking_offset = in_bytes(JavaThread::satb_mark_queue_offset() + PtrQueue::byte_offset_of_active());
3557 
3558     ResourceArea *area = Thread::current()->resource_area();
3559     Unique_Node_List visited(area);
3560     Node_List worklist(area);
3561     // We're going to walk control flow backwards starting from the Root
3562     worklist.push(_root);
3563     while (worklist.size() > 0) {
3564       Node* x = worklist.pop();
3565       if (x == NULL || x == top()) continue;
3566       if (visited.member(x)) {
3567         continue;
3568       } else {
3569         visited.push(x);
3570       }
3571 
3572       if (x->is_Region()) {
3573         for (uint i = 1; i < x->req(); i++) {
3574           worklist.push(x->in(i));


< prev index next >