3378 } else {
3379 if (t == NULL || t->_lo < 0 || t->_hi > (int)mask) {
3380 Node* shift = new AndINode(in2, ConNode::make(TypeInt::make(mask)));
3381 n->set_req(2, shift);
3382 }
3383 }
3384 if (in2->outcnt() == 0) { // Remove dead node
3385 in2->disconnect_inputs(NULL, this);
3386 }
3387 }
3388 break;
3389 case Op_MemBarStoreStore:
3390 case Op_MemBarRelease:
3391 // Break the link with AllocateNode: it is no longer useful and
3392 // confuses register allocation.
3393 if (n->req() > MemBarNode::Precedent) {
3394 n->set_req(MemBarNode::Precedent, top());
3395 }
3396 break;
3397 #if INCLUDE_SHENANDOAHGC
3398 case Op_ShenandoahReadBarrier:
3399 break;
3400 case Op_ShenandoahWriteBarrier:
3401 assert(false, "should have been expanded already");
3402 break;
3403 #endif
3404 case Op_RangeCheck: {
3405 RangeCheckNode* rc = n->as_RangeCheck();
3406 Node* iff = new IfNode(rc->in(0), rc->in(1), rc->_prob, rc->_fcnt);
3407 n->subsume_by(iff, this);
3408 frc._tests.push(iff);
3409 break;
3410 }
3411 case Op_ConvI2L: {
3412 if (!Matcher::convi2l_type_required) {
3413 // Code generation on some platforms doesn't need accurate
3414 // ConvI2L types. Widening the type can help remove redundant
3415 // address computations.
3416 n->as_Type()->set_type(TypeLong::INT);
3417 ResourceMark rm;
|
3378 } else {
3379 if (t == NULL || t->_lo < 0 || t->_hi > (int)mask) {
3380 Node* shift = new AndINode(in2, ConNode::make(TypeInt::make(mask)));
3381 n->set_req(2, shift);
3382 }
3383 }
3384 if (in2->outcnt() == 0) { // Remove dead node
3385 in2->disconnect_inputs(NULL, this);
3386 }
3387 }
3388 break;
3389 case Op_MemBarStoreStore:
3390 case Op_MemBarRelease:
3391 // Break the link with AllocateNode: it is no longer useful and
3392 // confuses register allocation.
3393 if (n->req() > MemBarNode::Precedent) {
3394 n->set_req(MemBarNode::Precedent, top());
3395 }
3396 break;
3397 #if INCLUDE_SHENANDOAHGC
3398 case Op_ShenandoahCompareAndSwapP:
3399 case Op_ShenandoahCompareAndSwapN:
3400 case Op_ShenandoahWeakCompareAndSwapN:
3401 case Op_ShenandoahWeakCompareAndSwapP:
3402 case Op_ShenandoahCompareAndExchangeP:
3403 case Op_ShenandoahCompareAndExchangeN:
3404 #ifdef ASSERT
3405 if( VerifyOptoOopOffsets ) {
3406 MemNode* mem = n->as_Mem();
3407 // Check to see if address types have grounded out somehow.
3408 const TypeInstPtr *tp = mem->in(MemNode::Address)->bottom_type()->isa_instptr();
3409 ciInstanceKlass *k = tp->klass()->as_instance_klass();
3410 bool oop_offset_is_sane = k->contains_field_offset(tp->offset());
3411 assert( !tp || oop_offset_is_sane, "" );
3412 }
3413 #endif
3414 break;
3415 case Op_ShenandoahReadBarrier:
3416 break;
3417 case Op_ShenandoahWriteBarrier:
3418 assert(false, "should have been expanded already");
3419 break;
3420 #endif
3421 case Op_RangeCheck: {
3422 RangeCheckNode* rc = n->as_RangeCheck();
3423 Node* iff = new IfNode(rc->in(0), rc->in(1), rc->_prob, rc->_fcnt);
3424 n->subsume_by(iff, this);
3425 frc._tests.push(iff);
3426 break;
3427 }
3428 case Op_ConvI2L: {
3429 if (!Matcher::convi2l_type_required) {
3430 // Code generation on some platforms doesn't need accurate
3431 // ConvI2L types. Widening the type can help remove redundant
3432 // address computations.
3433 n->as_Type()->set_type(TypeLong::INT);
3434 ResourceMark rm;
|