< prev index next >

src/share/vm/opto/gcm.cpp

Print this page

        

@@ -519,21 +519,21 @@
     tty->print_cr("*** Possible Anti-Dependence Bug:  Load consumes all of memory.");
     load->dump(2);
     if (VerifyAliases)  assert(load_alias_idx != Compile::AliasIdxBot, "");
   }
 #endif
-  assert(load_alias_idx || (load->is_Mach() && load->as_Mach()->ideal_Opcode() == Op_StrComp),
+  assert(load_alias_idx || (load->is_Mach() && load->as_Mach()->ideal_Opcode() == Opcodes::Op_StrComp),
          "String compare is only known 'load' that does not conflict with any stores");
-  assert(load_alias_idx || (load->is_Mach() && load->as_Mach()->ideal_Opcode() == Op_StrEquals),
+  assert(load_alias_idx || (load->is_Mach() && load->as_Mach()->ideal_Opcode() == Opcodes::Op_StrEquals),
          "String equals is a 'load' that does not conflict with any stores");
-  assert(load_alias_idx || (load->is_Mach() && load->as_Mach()->ideal_Opcode() == Op_StrIndexOf),
+  assert(load_alias_idx || (load->is_Mach() && load->as_Mach()->ideal_Opcode() == Opcodes::Op_StrIndexOf),
          "String indexOf is a 'load' that does not conflict with any stores");
-  assert(load_alias_idx || (load->is_Mach() && load->as_Mach()->ideal_Opcode() == Op_StrIndexOfChar),
+  assert(load_alias_idx || (load->is_Mach() && load->as_Mach()->ideal_Opcode() == Opcodes::Op_StrIndexOfChar),
          "String indexOfChar is a 'load' that does not conflict with any stores");
-  assert(load_alias_idx || (load->is_Mach() && load->as_Mach()->ideal_Opcode() == Op_AryEq),
+  assert(load_alias_idx || (load->is_Mach() && load->as_Mach()->ideal_Opcode() == Opcodes::Op_AryEq),
          "Arrays equals is a 'load' that does not conflict with any stores");
-  assert(load_alias_idx || (load->is_Mach() && load->as_Mach()->ideal_Opcode() == Op_HasNegatives),
+  assert(load_alias_idx || (load->is_Mach() && load->as_Mach()->ideal_Opcode() == Opcodes::Op_HasNegatives),
          "HasNegatives is a 'load' that does not conflict with any stores");
 
   if (!C->alias_type(load_alias_idx)->is_rewritable()) {
     // It is impossible to spoil this load by putting stores before it,
     // because we know that the stores will never update the value

@@ -596,17 +596,17 @@
   worklist_mem.push(NULL);
   while (worklist_store.size() > 0) {
     // Examine a nearby store to see if it might interfere with our load.
     Node* mem   = worklist_mem.pop();
     Node* store = worklist_store.pop();
-    uint op = store->Opcode();
+    Opcodes op = store->Opcode();
 
     // MergeMems do not directly have anti-deps.
     // Treat them as internal nodes in a forward tree of memory states,
     // the leaves of which are each a 'possible-def'.
     if (store == initial_mem    // root (exclusive) of tree we are searching
-        || op == Op_MergeMem    // internal node of tree we are searching
+        || op == Opcodes::Op_MergeMem    // internal node of tree we are searching
         ) {
       mem = store;   // It's not a possibly interfering store.
       if (store == initial_mem)
         initial_mem = NULL;  // only process initial memory once
 

@@ -626,11 +626,11 @@
         worklist_store.push(store);
       }
       continue;
     }
 
-    if (op == Op_MachProj || op == Op_Catch)   continue;
+    if (op == Opcodes::Op_MachProj || op == Opcodes::Op_Catch)   continue;
     if (store->needs_anti_dependence_check())  continue;  // not really a store
 
     // Compute the alias index.  Loads and stores with different alias
     // indices do not need anti-dependence edges.  Wide MemBar's are
     // anti-dependent on everything (except immutable memories).

@@ -644,11 +644,11 @@
       if (load_alias_idx != Compile::AliasIdxRaw) {
         // Check for call into the runtime using the Java calling
         // convention (and from there into a wrapper); it has no
         // _method.  Can't do this optimization for Native calls because
         // they CAN write to Java memory.
-        if (mstore->ideal_Opcode() == Op_CallStaticJava) {
+        if (mstore->ideal_Opcode() == Opcodes::Op_CallStaticJava) {
           assert(mstore->is_MachSafePoint(), "");
           MachSafePointNode* ms = (MachSafePointNode*) mstore;
           assert(ms->is_MachCallJava(), "");
           MachCallJavaNode* mcj = (MachCallJavaNode*) ms;
           if (mcj->_method == NULL) {

@@ -658,21 +658,21 @@
           }
         }
         // Same for SafePoints: they read/write Raw but only read otherwise.
         // This is basically a workaround for SafePoints only defining control
         // instead of control + memory.
-        if (mstore->ideal_Opcode() == Op_SafePoint)
+        if (mstore->ideal_Opcode() == Opcodes::Op_SafePoint)
           continue;
       } else {
         // Some raw memory, such as the load of "top" at an allocation,
         // can be control dependent on the previous safepoint. See
         // comments in GraphKit::allocate_heap() about control input.
         // Inserting an anti-dep between such a safepoint and a use
         // creates a cycle, and will cause a subsequent failure in
         // local scheduling.  (BugId 4919904)
         // (%%% How can a control input be a safepoint and not a projection??)
-        if (mstore->ideal_Opcode() == Op_SafePoint && load->in(0) == mstore)
+        if (mstore->ideal_Opcode() == Opcodes::Op_SafePoint && load->in(0) == mstore)
           continue;
       }
     }
 
     // Identify a block that the current load must be above,

@@ -1225,16 +1225,16 @@
       continue;
 
     MachNode* mach = self->is_Mach() ? self->as_Mach() : NULL;
     if (mach) {
       switch (mach->ideal_Opcode()) {
-      case Op_CreateEx:
+      case Opcodes::Op_CreateEx:
         // Don't move exception creation
         early->add_inst(self);
         continue;
         break;
-      case Op_CheckCastPP:
+      case Opcodes::Op_CheckCastPP:
         // Don't move CheckCastPP nodes away from their input, if the input
         // is a rawptr (5071820).
         Node *def = self->in(1);
         if (def != NULL && def->bottom_type()->base() == Type::RawPtr) {
           early->add_inst(self);

@@ -1295,11 +1295,11 @@
     // Must clone guys stay next to use; no hoisting allowed.
     // Also cannot hoist guys that alter memory or are otherwise not
     // allocatable (hoisting can make a value live longer, leading to
     // anti and output dependency problems which are normally resolved
     // by the register allocator giving everyone a different register).
-    if (mach != NULL && must_clone[mach->ideal_Opcode()])
+    if (mach != NULL && must_clone[static_cast<uint>(mach->ideal_Opcode())])
       try_to_hoist = false;
 
     Block* late = NULL;
     if (try_to_hoist) {
       // Now find the block with the least execution frequency.

@@ -1818,11 +1818,11 @@
 // Determine the probability of reaching successor 'i' from the receiver block.
 float Block::succ_prob(uint i) {
   int eidx = end_idx();
   Node *n = get_node(eidx);  // Get ending Node
 
-  int op = n->Opcode();
+  Opcodes op = n->Opcode();
   if (n->is_Mach()) {
     if (n->is_MachNullCheck()) {
       // Can only reach here if called after lcm. The original Op_If is gone,
       // so we attempt to infer the probability from one or both of the
       // successor blocks.

@@ -1844,52 +1844,52 @@
   }
 
 
   // Switch on branch type
   switch( op ) {
-  case Op_CountedLoopEnd:
-  case Op_If: {
+  case Opcodes::Op_CountedLoopEnd:
+  case Opcodes::Op_If: {
     assert (i < 2, "just checking");
     // Conditionals pass on only part of their frequency
     float prob  = n->as_MachIf()->_prob;
     assert(prob >= 0.0 && prob <= 1.0, "out of range probability");
     // If succ[i] is the FALSE branch, invert path info
-    if( get_node(i + eidx + 1)->Opcode() == Op_IfFalse ) {
+    if( get_node(i + eidx + 1)->Opcode() == Opcodes::Op_IfFalse ) {
       return 1.0f - prob; // not taken
     } else {
       return prob; // taken
     }
   }
 
-  case Op_Jump:
+  case Opcodes::Op_Jump:
     // Divide the frequency between all successors evenly
     return 1.0f/_num_succs;
 
-  case Op_Catch: {
+  case Opcodes::Op_Catch: {
     const CatchProjNode *ci = get_node(i + eidx + 1)->as_CatchProj();
     if (ci->_con == CatchProjNode::fall_through_index) {
       // Fall-thru path gets the lion's share.
       return 1.0f - PROB_UNLIKELY_MAG(5)*_num_succs;
     } else {
       // Presume exceptional paths are equally unlikely
       return PROB_UNLIKELY_MAG(5);
     }
   }
 
-  case Op_Root:
-  case Op_Goto:
+  case Opcodes::Op_Root:
+  case Opcodes::Op_Goto:
     // Pass frequency straight thru to target
     return 1.0f;
 
-  case Op_NeverBranch:
+  case Opcodes::Op_NeverBranch:
     return 0.0f;
 
-  case Op_TailCall:
-  case Op_TailJump:
-  case Op_Return:
-  case Op_Halt:
-  case Op_Rethrow:
+  case Opcodes::Op_TailCall:
+  case Opcodes::Op_TailJump:
+  case Opcodes::Op_Return:
+  case Opcodes::Op_Halt:
+  case Opcodes::Op_Rethrow:
     // Do not push out freq to root block
     return 0.0f;
 
   default:
     ShouldNotReachHere();

@@ -1902,11 +1902,11 @@
 // Return the number of fall-through candidates for a block
 int Block::num_fall_throughs() {
   int eidx = end_idx();
   Node *n = get_node(eidx);  // Get ending Node
 
-  int op = n->Opcode();
+  Opcodes op = n->Opcode();
   if (n->is_Mach()) {
     if (n->is_MachNullCheck()) {
       // In theory, either side can fall-thru, for simplicity sake,
       // let's say only the false branch can now.
       return 1;

@@ -1914,35 +1914,35 @@
     op = n->as_Mach()->ideal_Opcode();
   }
 
   // Switch on branch type
   switch( op ) {
-  case Op_CountedLoopEnd:
-  case Op_If:
+  case Opcodes::Op_CountedLoopEnd:
+  case Opcodes::Op_If:
     return 2;
 
-  case Op_Root:
-  case Op_Goto:
+  case Opcodes::Op_Root:
+  case Opcodes::Op_Goto:
     return 1;
 
-  case Op_Catch: {
+  case Opcodes::Op_Catch: {
     for (uint i = 0; i < _num_succs; i++) {
       const CatchProjNode *ci = get_node(i + eidx + 1)->as_CatchProj();
       if (ci->_con == CatchProjNode::fall_through_index) {
         return 1;
       }
     }
     return 0;
   }
 
-  case Op_Jump:
-  case Op_NeverBranch:
-  case Op_TailCall:
-  case Op_TailJump:
-  case Op_Return:
-  case Op_Halt:
-  case Op_Rethrow:
+  case Opcodes::Op_Jump:
+  case Opcodes::Op_NeverBranch:
+  case Opcodes::Op_TailCall:
+  case Opcodes::Op_TailJump:
+  case Opcodes::Op_Return:
+  case Opcodes::Op_Halt:
+  case Opcodes::Op_Rethrow:
     return 0;
 
   default:
     ShouldNotReachHere();
   }

@@ -1954,40 +1954,40 @@
 // Return true if a specific successor could be fall-through target.
 bool Block::succ_fall_through(uint i) {
   int eidx = end_idx();
   Node *n = get_node(eidx);  // Get ending Node
 
-  int op = n->Opcode();
+  Opcodes op = n->Opcode();
   if (n->is_Mach()) {
     if (n->is_MachNullCheck()) {
       // In theory, either side can fall-thru, for simplicity sake,
       // let's say only the false branch can now.
-      return get_node(i + eidx + 1)->Opcode() == Op_IfFalse;
+      return get_node(i + eidx + 1)->Opcode() == Opcodes::Op_IfFalse;
     }
     op = n->as_Mach()->ideal_Opcode();
   }
 
   // Switch on branch type
   switch( op ) {
-  case Op_CountedLoopEnd:
-  case Op_If:
-  case Op_Root:
-  case Op_Goto:
+  case Opcodes::Op_CountedLoopEnd:
+  case Opcodes::Op_If:
+  case Opcodes::Op_Root:
+  case Opcodes::Op_Goto:
     return true;
 
-  case Op_Catch: {
+  case Opcodes::Op_Catch: {
     const CatchProjNode *ci = get_node(i + eidx + 1)->as_CatchProj();
     return ci->_con == CatchProjNode::fall_through_index;
   }
 
-  case Op_Jump:
-  case Op_NeverBranch:
-  case Op_TailCall:
-  case Op_TailJump:
-  case Op_Return:
-  case Op_Halt:
-  case Op_Rethrow:
+  case Opcodes::Op_Jump:
+  case Opcodes::Op_NeverBranch:
+  case Opcodes::Op_TailCall:
+  case Opcodes::Op_TailJump:
+  case Opcodes::Op_Return:
+  case Opcodes::Op_Halt:
+  case Opcodes::Op_Rethrow:
     return false;
 
   default:
     ShouldNotReachHere();
   }

@@ -1999,13 +1999,13 @@
 // Update the probability of a two-branch to be uncommon
 void Block::update_uncommon_branch(Block* ub) {
   int eidx = end_idx();
   Node *n = get_node(eidx);  // Get ending Node
 
-  int op = n->as_Mach()->ideal_Opcode();
+  Opcodes op = n->as_Mach()->ideal_Opcode();
 
-  assert(op == Op_CountedLoopEnd || op == Op_If, "must be a If");
+  assert(op == Opcodes::Op_CountedLoopEnd || op == Opcodes::Op_If, "must be a If");
   assert(num_fall_throughs() == 2, "must be a two way branch block");
 
   // Which successor is ub?
   uint s;
   for (s = 0; s <_num_succs; s++) {

@@ -2013,11 +2013,11 @@
   }
   assert(s < 2, "uncommon successor must be found");
 
   // If ub is the true path, make the proability small, else
   // ub is the false path, and make the probability large
-  bool invert = (get_node(s + eidx + 1)->Opcode() == Op_IfFalse);
+  bool invert = (get_node(s + eidx + 1)->Opcode() == Opcodes::Op_IfFalse);
 
   // Get existing probability
   float p = n->as_MachIf()->_prob;
 
   if (invert) p = 1.0 - p;
< prev index next >