< prev index next >
src/hotspot/share/opto/subnode.cpp
Print this page
*** 715,734 ****
return NULL; // No change
}
//------------------------------Ideal------------------------------------------
Node* CmpLNode::Ideal(PhaseGVN* phase, bool can_reshape) {
! if (in(1)->Opcode() == Op_OrL && in(1)->in(1)->Opcode() == Op_CastP2X && in(1)->in(2)->Opcode() == Op_CastP2X) {
Node* a = in(1)->in(1)->in(1);
Node* b = in(1)->in(2)->in(1);
const Type* ta = phase->type(a);
const Type* tb = phase->type(b);
if (ta->is_zero_type() || tb->is_zero_type()) {
return new CmpPNode(a, b);
} else if (!TypePtr::NULL_PTR->higher_equal(ta) || !TypePtr::NULL_PTR->higher_equal(tb)) {
// One operand is never NULL, emit constant false
! return new CmpLNode(phase->longcon(0), phase->longcon(1));
}
}
return NULL;
}
--- 715,739 ----
return NULL; // No change
}
//------------------------------Ideal------------------------------------------
Node* CmpLNode::Ideal(PhaseGVN* phase, bool can_reshape) {
! // Match double null check emitted by Compile::optimize_acmp()
! if (in(1)->Opcode() == Op_OrL &&
! in(1)->in(1)->Opcode() == Op_CastP2X &&
! in(1)->in(2)->Opcode() == Op_CastP2X &&
! phase->type(in(2))->is_zero_type()) {
Node* a = in(1)->in(1)->in(1);
Node* b = in(1)->in(2)->in(1);
const Type* ta = phase->type(a);
const Type* tb = phase->type(b);
if (ta->is_zero_type() || tb->is_zero_type()) {
+ // Degraded to a null check, use old acmp
return new CmpPNode(a, b);
} else if (!TypePtr::NULL_PTR->higher_equal(ta) || !TypePtr::NULL_PTR->higher_equal(tb)) {
// One operand is never NULL, emit constant false
! return new CmpLNode(phase->longcon(0), phase->longcon(1)); // We need to return a new node
}
}
return NULL;
}
*** 825,834 ****
--- 830,846 ----
if (r0 == r1 && r0->singleton()) {
// Equal pointer constants (klasses, nulls, etc.)
return TypeInt::CC_EQ;
}
+ // Optimize old acmp with value type operands
+ if ((r0->is_valuetypeptr()|| r1->is_valuetypeptr()) &&
+ (!TypePtr::NULL_PTR->higher_equal(r0) || !TypePtr::NULL_PTR->higher_equal(r1))) {
+ // One operand is a value type and one operand is never null, fold to constant false
+ return TypeInt::CC_GT;
+ }
+
// See if it is 2 unrelated classes.
const TypeOopPtr* p0 = r0->isa_oopptr();
const TypeOopPtr* p1 = r1->isa_oopptr();
if (p0 && p1) {
Node* in1 = in(1)->uncast();
*** 964,982 ****
Node* is_value = phase->C->load_is_value_bit(phase, b);
set_req(1, phase->transform(new AddPNode(b, b, is_value)));
set_req(2, a);
return this;
}
- } else {
- // Optimize old acmp with value type operands
- const TypeInstPtr* ta = phase->type(in(1))->isa_instptr();
- const TypeInstPtr* tb = phase->type(in(2))->isa_instptr();
- if (((ta != NULL && ta->is_loaded() && ta->is_valuetypeptr()) || (tb != NULL && tb->is_loaded() && tb->is_valuetypeptr())) &&
- (!TypePtr::NULL_PTR->higher_equal(phase->type(in(1))) || !TypePtr::NULL_PTR->higher_equal(phase->type(in(2))))) {
- // One operand is a value type and one operand is never null, fold to constant false
- return new CmpINode(phase->intcon(0), phase->intcon(1));
- }
}
// Normalize comparisons between Java mirrors into comparisons of the low-
// level klass, where a dependent load could be shortened.
//
--- 976,985 ----
< prev index next >