< prev index next >

src/share/vm/opto/parse2.cpp

Print this page




 149       if (C->allow_range_check_smearing()) {
 150         // Do not use builtin_throw, since range checks are sometimes
 151         // made more stringent by an optimistic transformation.
 152         // This creates "tentative" range checks at this point,
 153         // which are not guaranteed to throw exceptions.
 154         // See IfNode::Ideal, is_range_check, adjust_check.
 155         uncommon_trap(Deoptimization::Reason_range_check,
 156                       Deoptimization::Action_make_not_entrant,
 157                       NULL, "range_check");
 158       } else {
 159         // If we have already recompiled with the range-check-widening
 160         // heroic optimization turned off, then we must really be throwing
 161         // range check exceptions.
 162         builtin_throw(Deoptimization::Reason_range_check, idx);
 163       }
 164     }
 165   }
 166   // Check for always knowing you are throwing a range-check exception
 167   if (stopped())  return top();
 168 
 169   Node* ptr = array_element_address(ary, idx, type, sizetype);


 170 
 171   if (result2 != NULL)  *result2 = elemtype;
 172 
 173   assert(ptr != top(), "top should go hand-in-hand with stopped");
 174 
 175   return ptr;
 176 }
 177 
 178 
 179 // returns IfNode
 180 IfNode* Parse::jump_if_fork_int(Node* a, Node* b, BoolTest::mask mask) {
 181   Node   *cmp = _gvn.transform( new CmpINode( a, b)); // two cases: shiftcount > 32 and shiftcount <= 32
 182   Node   *tst = _gvn.transform( new BoolNode( cmp, mask));
 183   IfNode *iff = create_and_map_if( control(), tst, ((mask == BoolTest::eq) ? PROB_STATIC_INFREQUENT : PROB_FAIR), COUNT_UNKNOWN );
 184   return iff;
 185 }
 186 
 187 // return Region node
 188 Node* Parse::jump_if_join(Node* iffalse, Node* iftrue) {
 189   Node *region  = new RegionNode(3); // 2 results


 449     return false;
 450 
 451   // Normalize table lookups to zero
 452   int lowval = lo->lo();
 453   key_val = _gvn.transform( new SubINode(key_val, _gvn.intcon(lowval)) );
 454 
 455   // Generate a guard to protect against input keyvals that aren't
 456   // in the switch domain.
 457   if (needs_guard) {
 458     Node*   size = _gvn.intcon(num_cases);
 459     Node*   cmp = _gvn.transform( new CmpUNode(key_val, size) );
 460     Node*   tst = _gvn.transform( new BoolNode(cmp, BoolTest::ge) );
 461     IfNode* iff = create_and_map_if( control(), tst, PROB_FAIR, COUNT_UNKNOWN);
 462     jump_if_true_fork(iff, default_dest, NullTableIndex);
 463   }
 464 
 465   // Create an ideal node JumpTable that has projections
 466   // of all possible ranges for a switch statement
 467   // The key_val input must be converted to a pointer offset and scaled.
 468   // Compare Parse::array_addressing above.
 469 #ifdef _LP64
 470   // Clean the 32-bit int into a real 64-bit offset.
 471   // Otherwise, the jint value 0 might turn into an offset of 0x0800000000.
 472   const TypeLong* lkeytype = TypeLong::make(CONST64(0), num_cases-1, Type::WidenMin);
 473   key_val       = _gvn.transform( new ConvI2LNode(key_val, lkeytype) );
 474 #endif


 475   // Shift the value by wordsize so we have an index into the table, rather
 476   // than a switch value
 477   Node *shiftWord = _gvn.MakeConX(wordSize);
 478   key_val = _gvn.transform( new MulXNode( key_val, shiftWord));
 479 
 480   // Create the JumpNode
 481   Node* jtn = _gvn.transform( new JumpNode(control(), key_val, num_cases) );
 482 
 483   // These are the switch destinations hanging off the jumpnode
 484   int i = 0;
 485   for (SwitchRange* r = lo; r <= hi; r++) {
 486     for (int64_t j = r->lo(); j <= r->hi(); j++, i++) {
 487       Node* input = _gvn.transform(new JumpProjNode(jtn, i, r->dest(), (int)(j - lowval)));
 488       {
 489         PreserveJVMState pjvms(this);
 490         set_control(input);
 491         jump_if_always_fork(r->dest(), r->table_index());
 492       }
 493     }
 494   }




 149       if (C->allow_range_check_smearing()) {
 150         // Do not use builtin_throw, since range checks are sometimes
 151         // made more stringent by an optimistic transformation.
 152         // This creates "tentative" range checks at this point,
 153         // which are not guaranteed to throw exceptions.
 154         // See IfNode::Ideal, is_range_check, adjust_check.
 155         uncommon_trap(Deoptimization::Reason_range_check,
 156                       Deoptimization::Action_make_not_entrant,
 157                       NULL, "range_check");
 158       } else {
 159         // If we have already recompiled with the range-check-widening
 160         // heroic optimization turned off, then we must really be throwing
 161         // range check exceptions.
 162         builtin_throw(Deoptimization::Reason_range_check, idx);
 163       }
 164     }
 165   }
 166   // Check for always knowing you are throwing a range-check exception
 167   if (stopped())  return top();
 168 
 169   // Make array address computation control dependent to prevent it
 170   // from floating above the range check during loop optimizations.
 171   Node* ptr = array_element_address(ary, idx, type, sizetype, control());
 172 
 173   if (result2 != NULL)  *result2 = elemtype;
 174 
 175   assert(ptr != top(), "top should go hand-in-hand with stopped");
 176 
 177   return ptr;
 178 }
 179 
 180 
 181 // returns IfNode
 182 IfNode* Parse::jump_if_fork_int(Node* a, Node* b, BoolTest::mask mask) {
 183   Node   *cmp = _gvn.transform( new CmpINode( a, b)); // two cases: shiftcount > 32 and shiftcount <= 32
 184   Node   *tst = _gvn.transform( new BoolNode( cmp, mask));
 185   IfNode *iff = create_and_map_if( control(), tst, ((mask == BoolTest::eq) ? PROB_STATIC_INFREQUENT : PROB_FAIR), COUNT_UNKNOWN );
 186   return iff;
 187 }
 188 
 189 // return Region node
 190 Node* Parse::jump_if_join(Node* iffalse, Node* iftrue) {
 191   Node *region  = new RegionNode(3); // 2 results


 451     return false;
 452 
 453   // Normalize table lookups to zero
 454   int lowval = lo->lo();
 455   key_val = _gvn.transform( new SubINode(key_val, _gvn.intcon(lowval)) );
 456 
 457   // Generate a guard to protect against input keyvals that aren't
 458   // in the switch domain.
 459   if (needs_guard) {
 460     Node*   size = _gvn.intcon(num_cases);
 461     Node*   cmp = _gvn.transform( new CmpUNode(key_val, size) );
 462     Node*   tst = _gvn.transform( new BoolNode(cmp, BoolTest::ge) );
 463     IfNode* iff = create_and_map_if( control(), tst, PROB_FAIR, COUNT_UNKNOWN);
 464     jump_if_true_fork(iff, default_dest, NullTableIndex);
 465   }
 466 
 467   // Create an ideal node JumpTable that has projections
 468   // of all possible ranges for a switch statement
 469   // The key_val input must be converted to a pointer offset and scaled.
 470   // Compare Parse::array_addressing above.
 471 
 472   // Clean the 32-bit int into a real 64-bit offset.
 473   // Otherwise, the jint value 0 might turn into an offset of 0x0800000000.
 474   const TypeInt* ikeytype = TypeInt::make(0, num_cases, Type::WidenMin);
 475   // Make I2L conversion control dependent to prevent it from
 476   // floating above the range check during loop optimizations.
 477   key_val = C->conv_I2X_index(&_gvn, key_val, ikeytype, control());
 478 
 479   // Shift the value by wordsize so we have an index into the table, rather
 480   // than a switch value
 481   Node *shiftWord = _gvn.MakeConX(wordSize);
 482   key_val = _gvn.transform( new MulXNode( key_val, shiftWord));
 483 
 484   // Create the JumpNode
 485   Node* jtn = _gvn.transform( new JumpNode(control(), key_val, num_cases) );
 486 
 487   // These are the switch destinations hanging off the jumpnode
 488   int i = 0;
 489   for (SwitchRange* r = lo; r <= hi; r++) {
 490     for (int64_t j = r->lo(); j <= r->hi(); j++, i++) {
 491       Node* input = _gvn.transform(new JumpProjNode(jtn, i, r->dest(), (int)(j - lowval)));
 492       {
 493         PreserveJVMState pjvms(this);
 494         set_control(input);
 495         jump_if_always_fork(r->dest(), r->table_index());
 496       }
 497     }
 498   }


< prev index next >