< prev index next >

src/hotspot/share/opto/valuetypenode.cpp

Print this page




 454       if (field->is_ValueType()) {
 455         assert(f->is_flattened(), "should be flattened");
 456         field = field->as_ValueType()->allocate(kit)->get_oop();
 457       }
 458       C->gvn_replace_by(pn, field);
 459       C->initial_gvn()->hash_delete(pn);
 460       pn->set_req(0, C->top());
 461       --i; --imax;
 462     }
 463   }
 464 }
 465 
 466 ValueTypeNode* ValueTypeNode::make_uninitialized(PhaseGVN& gvn, ciValueKlass* klass) {
 467   // Create a new ValueTypeNode with uninitialized values and NULL oop
 468   const TypeValueType* type = TypeValueType::make(klass);
 469   return new ValueTypeNode(type, gvn.zerocon(T_VALUETYPE));
 470 }
 471 
 472 Node* ValueTypeNode::load_default_oop(PhaseGVN& gvn, ciValueKlass* vk) {
 473   // Load the default oop from the java mirror of the given ValueKlass

 474   const TypeInstPtr* tip = TypeInstPtr::make(vk->java_mirror());
 475   Node* base = gvn.makecon(tip);
 476   Node* adr = gvn.transform(new AddPNode(base, base, gvn.MakeConX(vk->default_value_offset())));
 477   const Type* rt = Type::get_const_type(vk)->join_speculative(TypePtr::NOTNULL);
 478   return gvn.transform(LoadNode::make(gvn, NULL, gvn.C->immutable_memory(), adr, tip, rt, T_VALUETYPE, MemNode::unordered));
 479 }
 480 
 481 ValueTypeNode* ValueTypeNode::make_default(PhaseGVN& gvn, ciValueKlass* vk) {
 482   // Create a new ValueTypeNode with default values
 483   Node* oop = load_default_oop(gvn, vk);
 484   const TypeValueType* type = TypeValueType::make(vk);
 485   ValueTypeNode* vt = new ValueTypeNode(type, oop);
 486   for (uint i = 0; i < vt->field_count(); ++i) {
 487     ciType* field_type = vt->field_type(i);
 488     Node* value = NULL;
 489     if (field_type->is_valuetype()) {
 490       value = ValueTypeNode::make_default(gvn, field_type->as_value_klass());
 491     } else {
 492       value = gvn.zerocon(field_type->basic_type());
 493     }
 494     vt->set_field_value(i, value);
 495   }
 496   return gvn.transform(vt)->as_ValueType();

















 497 }
 498 
 499 ValueTypeNode* ValueTypeNode::make_from_oop(PhaseGVN& gvn, Node*& ctl, Node* mem, Node* oop, ciValueKlass* vk, bool null_check, bool buffer_check) {
 500   assert(!(null_check && buffer_check), "should not both require a null and a buffer check");
 501 
 502   if (gvn.type(oop)->remove_speculative() == TypePtr::NULL_PTR) {
 503     assert(null_check, "unexpected null?");
 504     return make_default(gvn, vk);
 505   }
 506   // Create and initialize a ValueTypeNode by loading all field
 507   // values from a heap-allocated version and also save the oop.
 508   ValueTypeNode*  vt = new ValueTypeNode(TypeValueType::make(vk), oop);
 509 
 510   if (null_check && !vt->is_allocated(&gvn)) {
 511     // Add oop null check
 512     Node* chk = gvn.transform(new CmpPNode(oop, gvn.zerocon(T_VALUETYPE)));
 513     Node* tst = gvn.transform(new BoolNode(chk, BoolTest::ne));
 514     IfNode* iff = gvn.transform(new IfNode(ctl, tst, PROB_MAX, COUNT_UNKNOWN))->as_If();
 515     Node* not_null = gvn.transform(new IfTrueNode(iff));
 516     Node* null = gvn.transform(new IfFalseNode(iff));


 707       }
 708       if (arg->is_ValueType()) {
 709         // non-flattened value type field
 710         ValueTypeNode* vt = arg->as_ValueType();
 711         assert(!assert_allocated || vt->is_allocated(&kit.gvn()), "value type field should be allocated");
 712         arg = vt->allocate(&kit)->get_oop();
 713       }
 714       n->init_req(base_input + j + extra, arg);
 715       edges++;
 716       BasicType bt = field_type(i)->basic_type();
 717       if (bt == T_LONG || bt == T_DOUBLE) {
 718         n->init_req(base_input + j + extra + 1, kit.top());
 719         edges++;
 720       }
 721     }
 722   }
 723   return edges;
 724 }
 725 
 726 Node* ValueTypeNode::Ideal(PhaseGVN* phase, bool can_reshape) {










 727   if (!is_allocated(phase) && !value_klass()->is_bufferable()) {
 728     // Save base oop if fields are loaded from memory and the value
 729     // type is not buffered (in this case we should not use the oop).
 730     Node* base = is_loaded(phase);
 731     if (base != NULL) {
 732       set_oop(base);
 733       assert(is_allocated(phase), "should now be allocated");
 734       return this;
 735     }
 736   }
 737 
 738   if (can_reshape) {
 739     PhaseIterGVN* igvn = phase->is_IterGVN();














 740     if (is_allocated(igvn)) {
 741       // Value type is heap allocated, search for safepoint uses
 742       for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) {
 743         Node* out = fast_out(i);
 744         if (out->is_SafePoint()) {
 745           // Let SafePointNode::Ideal() take care of re-wiring the
 746           // safepoint to the oop input instead of the value type node.
 747           igvn->rehash_node_delayed(out);
 748         }
 749       }
 750     }
 751   }
 752   return NULL;
 753 }
 754 
 755 // Search for multiple allocations of this value type
 756 // and try to replace them by dominating allocations.
 757 void ValueTypeNode::remove_redundant_allocations(PhaseIterGVN* igvn, PhaseIdealLoop* phase) {
 758   assert(EliminateAllocations, "allocation elimination should be enabled");
 759   // Search for allocations of this value type
 760   for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) {
 761     AllocateNode* alloc = fast_out(i)->isa_Allocate();
 762     if (alloc != NULL && alloc->result_cast() != NULL && alloc->in(AllocateNode::ValueNode) == this) {

 763       Node* res_dom = NULL;
 764       if (is_allocated(igvn)) {
 765         // The value type is already allocated but still connected to an AllocateNode.
 766         // This can happen with late inlining when we first allocate a value type argument
 767         // but later decide to inline the call with the callee code also allocating.
 768         res_dom = get_oop();
 769       } else {
 770         // Search for a dominating allocation of the same value type
 771         for (DUIterator_Fast jmax, j = fast_outs(jmax); j < jmax; j++) {
 772           Node* out2 = fast_out(j);
 773           if (alloc != out2 && out2->is_Allocate() && out2->in(AllocateNode::ValueNode) == this &&
 774               phase->is_dominator(out2, alloc)) {
 775             AllocateNode* alloc_dom =  out2->as_Allocate();
 776             assert(alloc->in(AllocateNode::KlassNode) == alloc_dom->in(AllocateNode::KlassNode), "klasses should match");
 777             res_dom = alloc_dom->result_cast();
 778             break;
 779           }
 780         }
 781       }
 782       if (res_dom != NULL) {




 454       if (field->is_ValueType()) {
 455         assert(f->is_flattened(), "should be flattened");
 456         field = field->as_ValueType()->allocate(kit)->get_oop();
 457       }
 458       C->gvn_replace_by(pn, field);
 459       C->initial_gvn()->hash_delete(pn);
 460       pn->set_req(0, C->top());
 461       --i; --imax;
 462     }
 463   }
 464 }
 465 
 466 ValueTypeNode* ValueTypeNode::make_uninitialized(PhaseGVN& gvn, ciValueKlass* klass) {
 467   // Create a new ValueTypeNode with uninitialized values and NULL oop
 468   const TypeValueType* type = TypeValueType::make(klass);
 469   return new ValueTypeNode(type, gvn.zerocon(T_VALUETYPE));
 470 }
 471 
 472 Node* ValueTypeNode::load_default_oop(PhaseGVN& gvn, ciValueKlass* vk) {
 473   // Load the default oop from the java mirror of the given ValueKlass
 474   assert(!vk->is__Value(), "__Value has no default oop");
 475   const TypeInstPtr* tip = TypeInstPtr::make(vk->java_mirror());
 476   Node* base = gvn.makecon(tip);
 477   Node* adr = gvn.transform(new AddPNode(base, base, gvn.MakeConX(vk->default_value_offset())));
 478   const Type* rt = Type::get_const_type(vk)->join_speculative(TypePtr::NOTNULL);
 479   return gvn.transform(LoadNode::make(gvn, NULL, gvn.C->immutable_memory(), adr, tip, rt, T_VALUETYPE, MemNode::unordered));
 480 }
 481 
 482 ValueTypeNode* ValueTypeNode::make_default(PhaseGVN& gvn, ciValueKlass* vk) {
 483   // Create a new ValueTypeNode with default values
 484   Node* oop = load_default_oop(gvn, vk);
 485   const TypeValueType* type = TypeValueType::make(vk);
 486   ValueTypeNode* vt = new ValueTypeNode(type, oop);
 487   for (uint i = 0; i < vt->field_count(); ++i) {
 488     ciType* field_type = vt->field_type(i);
 489     Node* value = NULL;
 490     if (field_type->is_valuetype()) {
 491       value = ValueTypeNode::make_default(gvn, field_type->as_value_klass());
 492     } else {
 493       value = gvn.zerocon(field_type->basic_type());
 494     }
 495     vt->set_field_value(i, value);
 496   }
 497   vt = gvn.transform(vt)->as_ValueType();
 498   assert(vt->is_default(gvn), "must be the default value type");
 499   return vt;
 500 }
 501 
 502 
 503 bool ValueTypeNode::is_default(PhaseGVN& gvn) const {
 504   if (value_klass()->is__Value()) {
 505     return false;
 506   }
 507   for (uint i = 0; i < field_count(); ++i) {
 508     Node* value = field_value(i);
 509     if (!gvn.type(value)->is_zero_type() &&
 510         !(value->is_ValueType() && value->as_ValueType()->is_default(gvn))) {
 511       return false;
 512     }
 513   }
 514   return true;
 515 }
 516 
 517 ValueTypeNode* ValueTypeNode::make_from_oop(PhaseGVN& gvn, Node*& ctl, Node* mem, Node* oop, ciValueKlass* vk, bool null_check, bool buffer_check) {
 518   assert(!(null_check && buffer_check), "should not both require a null and a buffer check");
 519 
 520   if (gvn.type(oop)->remove_speculative() == TypePtr::NULL_PTR) {
 521     assert(null_check, "unexpected null?");
 522     return make_default(gvn, vk);
 523   }
 524   // Create and initialize a ValueTypeNode by loading all field
 525   // values from a heap-allocated version and also save the oop.
 526   ValueTypeNode*  vt = new ValueTypeNode(TypeValueType::make(vk), oop);
 527 
 528   if (null_check && !vt->is_allocated(&gvn)) {
 529     // Add oop null check
 530     Node* chk = gvn.transform(new CmpPNode(oop, gvn.zerocon(T_VALUETYPE)));
 531     Node* tst = gvn.transform(new BoolNode(chk, BoolTest::ne));
 532     IfNode* iff = gvn.transform(new IfNode(ctl, tst, PROB_MAX, COUNT_UNKNOWN))->as_If();
 533     Node* not_null = gvn.transform(new IfTrueNode(iff));
 534     Node* null = gvn.transform(new IfFalseNode(iff));


 725       }
 726       if (arg->is_ValueType()) {
 727         // non-flattened value type field
 728         ValueTypeNode* vt = arg->as_ValueType();
 729         assert(!assert_allocated || vt->is_allocated(&kit.gvn()), "value type field should be allocated");
 730         arg = vt->allocate(&kit)->get_oop();
 731       }
 732       n->init_req(base_input + j + extra, arg);
 733       edges++;
 734       BasicType bt = field_type(i)->basic_type();
 735       if (bt == T_LONG || bt == T_DOUBLE) {
 736         n->init_req(base_input + j + extra + 1, kit.top());
 737         edges++;
 738       }
 739     }
 740   }
 741   return edges;
 742 }
 743 
 744 Node* ValueTypeNode::Ideal(PhaseGVN* phase, bool can_reshape) {
 745   Node* oop = get_oop();
 746 
 747   if (is_default(*phase) && !oop->is_Load() &&
 748       !(oop->is_DecodeN() && oop->in(1)->is_Load())) {
 749     // Use the pre-allocated oop for default value types
 750     Node* oop = load_default_oop(*phase, value_klass());
 751     set_oop(oop);
 752     return this;
 753   }
 754 
 755   if (!is_allocated(phase) && !value_klass()->is_bufferable()) {
 756     // Save base oop if fields are loaded from memory and the value
 757     // type is not buffered (in this case we should not use the oop).
 758     Node* base = is_loaded(phase);
 759     if (base != NULL) {
 760       set_oop(base);
 761       assert(is_allocated(phase), "should now be allocated");
 762       return this;
 763     }
 764   }
 765 
 766   if (can_reshape) {
 767     PhaseIterGVN* igvn = phase->is_IterGVN();
 768 
 769     if (is_default(*phase)) {
 770       // Search for allocations of the default value type
 771       for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) {
 772         AllocateNode* alloc = fast_out(i)->isa_Allocate();
 773         if (alloc != NULL && alloc->result_cast() != NULL && alloc->in(AllocateNode::ValueNode) == this) {
 774           // Replace allocation be pre-allocated oop
 775           Node* res = alloc->result_cast();
 776           Node* oop = load_default_oop(*phase, value_klass());
 777           igvn->replace_node(res, oop);
 778         }
 779       }
 780     }
 781 
 782     if (is_allocated(igvn)) {
 783       // Value type is heap allocated, search for safepoint uses
 784       for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) {
 785         Node* out = fast_out(i);
 786         if (out->is_SafePoint()) {
 787           // Let SafePointNode::Ideal() take care of re-wiring the
 788           // safepoint to the oop input instead of the value type node.
 789           igvn->rehash_node_delayed(out);
 790         }
 791       }
 792     }
 793   }
 794   return NULL;
 795 }
 796 
 797 // Search for multiple allocations of this value type
 798 // and try to replace them by dominating allocations.
 799 void ValueTypeNode::remove_redundant_allocations(PhaseIterGVN* igvn, PhaseIdealLoop* phase) {
 800   assert(EliminateAllocations, "allocation elimination should be enabled");
 801   // Search for allocations of this value type
 802   for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) {
 803     AllocateNode* alloc = fast_out(i)->isa_Allocate();
 804     if (alloc != NULL && alloc->result_cast() != NULL && alloc->in(AllocateNode::ValueNode) == this) {
 805       assert(!is_default(*igvn), "default value type allocation");
 806       Node* res_dom = NULL;
 807       if (is_allocated(igvn)) {
 808         // The value type is already allocated but still connected to an AllocateNode.
 809         // This can happen with late inlining when we first allocate a value type argument
 810         // but later decide to inline the call with the callee code also allocating.
 811         res_dom = get_oop();
 812       } else {
 813         // Search for a dominating allocation of the same value type
 814         for (DUIterator_Fast jmax, j = fast_outs(jmax); j < jmax; j++) {
 815           Node* out2 = fast_out(j);
 816           if (alloc != out2 && out2->is_Allocate() && out2->in(AllocateNode::ValueNode) == this &&
 817               phase->is_dominator(out2, alloc)) {
 818             AllocateNode* alloc_dom =  out2->as_Allocate();
 819             assert(alloc->in(AllocateNode::KlassNode) == alloc_dom->in(AllocateNode::KlassNode), "klasses should match");
 820             res_dom = alloc_dom->result_cast();
 821             break;
 822           }
 823         }
 824       }
 825       if (res_dom != NULL) {


< prev index next >