< prev index next >

src/hotspot/share/opto/valuetypenode.cpp

Print this page




 237   // Process all safepoint uses and scalarize value type
 238   Unique_Node_List worklist;
 239   for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) {
 240     Node* u = fast_out(i);
 241     if (u->is_SafePoint() && !u->is_CallLeaf() && (!u->is_Call() || u->as_Call()->has_debug_use(this))) {
 242       SafePointNode* sfpt = u->as_SafePoint();
 243       Node* in_oop = get_oop();
 244       const Type* oop_type = in_oop->bottom_type();
 245       assert(Opcode() == Op_ValueTypePtr || !isa_ValueType()->is_allocated(gvn), "already heap allocated value types should be linked directly");
 246       int nb = make_scalar_in_safepoint(worklist, sfpt, root, gvn);
 247       --i; imax -= nb;
 248     }
 249   }
 250   // Now scalarize non-flattened fields
 251   for (uint i = 0; i < worklist.size(); ++i) {
 252     Node* vt = worklist.at(i);
 253     vt->as_ValueType()->make_scalar_in_safepoints(root, gvn);
 254   }
 255 }
 256 
 257 void ValueTypeBaseNode::initialize(GraphKit* kit, MultiNode* multi, ciValueKlass* vk, int base_offset, int base_input, bool in) {
 258   assert(base_offset >= 0, "offset in value type must be positive");

 259   PhaseGVN& gvn = kit->gvn();
 260   for (uint i = 0; i < field_count(); i++) {
 261     ciType* ft = field_type(i);
 262     int offset = base_offset + field_offset(i);
 263     if (field_is_flattened(i)) {
 264       // Flattened value type field
 265       ValueTypeNode* vt = ValueTypeNode::make_uninitialized(gvn, ft->as_value_klass());
 266       vt->initialize(kit, multi, vk, offset - value_klass()->first_field_offset(), base_input, in);

 267       set_field_value(i, gvn.transform(vt));
 268     } else {
 269       int j = 0; int extra = 0;
 270       for (; j < vk->nof_nonstatic_fields(); j++) {
 271         ciField* f = vk->nonstatic_field_at(j);
 272         if (offset == f->offset()) {
 273           assert(f->type() == ft, "inconsistent field type");
 274           break;
 275         }
 276         BasicType bt = f->type()->basic_type();
 277         if (bt == T_LONG || bt == T_DOUBLE) {
 278           extra++;
 279         }
 280       }
 281       assert(j != vk->nof_nonstatic_fields(), "must find");
 282       Node* parm = NULL;








 283       if (multi->is_Start()) {
 284         assert(in, "return from start?");
 285         parm = gvn.transform(new ParmNode(multi->as_Start(), base_input + j + extra));
 286       } else {
 287         if (in) {
 288           parm = multi->as_Call()->in(base_input + j + extra);
 289         } else {
 290           parm = gvn.transform(new ProjNode(multi->as_Call(), base_input + j + extra));
 291         }
 292       }
 293       if (ft->is_valuetype()) {
 294         // Non-flattened value type field
 295         assert(!gvn.type(parm)->maybe_null(), "should never be null");

 296         parm = ValueTypeNode::make_from_oop(kit, parm, ft->as_value_klass());



 297       }

 298       set_field_value(i, parm);
 299       // Record all these guys for later GVN.
 300       gvn.record_for_igvn(parm);
 301     }
 302   }

 303 }
 304 
 305 const TypePtr* ValueTypeBaseNode::field_adr_type(Node* base, int offset, ciInstanceKlass* holder, PhaseGVN& gvn) const {
 306   const TypeAryPtr* ary_type = gvn.type(base)->isa_aryptr();
 307   const TypePtr* adr_type = NULL;
 308   bool is_array = ary_type != NULL;
 309   if (is_array) {
 310     // In the case of a flattened value type array, each field has its own slice
 311     adr_type = ary_type->with_field_offset(offset)->add_offset(Type::OffsetBot);
 312   } else {
 313     ciField* field = holder->get_field_by_offset(offset, false);
 314     assert(field != NULL, "field not found");
 315     adr_type = gvn.C->alias_type(field)->adr_type();
 316   }
 317   return adr_type;
 318 }
 319 
 320 void ValueTypeBaseNode::load(GraphKit* kit, Node* base, Node* ptr, ciInstanceKlass* holder, int holder_offset) {
 321   // Initialize the value type by loading its field values from
 322   // memory and adding the values as input edges to the node.


 585            vt->is_loaded(&gvn) == oop, "value type should be loaded");
 586   }
 587 
 588   assert(vt->is_allocated(&gvn), "value type should be allocated");
 589   return gvn.transform(vt)->as_ValueType();
 590 }
 591 
 592 // GraphKit wrapper for the 'make_from_flattened' method
 593 ValueTypeNode* ValueTypeNode::make_from_flattened(GraphKit* kit, ciValueKlass* vk, Node* obj, Node* ptr, ciInstanceKlass* holder, int holder_offset) {
 594   // Create and initialize a ValueTypeNode by loading all field values from
 595   // a flattened value type field at 'holder_offset' or from a value type array.
 596   ValueTypeNode* vt = make_uninitialized(kit->gvn(), vk);
 597   // The value type is flattened into the object without an oop header. Subtract the
 598   // offset of the first field to account for the missing header when loading the values.
 599   holder_offset -= vk->first_field_offset();
 600   vt->load(kit, obj, ptr, holder, holder_offset);
 601   assert(vt->is_loaded(&kit->gvn()) != obj, "holder oop should not be used as flattened value type oop");
 602   return kit->gvn().transform(vt)->as_ValueType();
 603 }
 604 
 605 ValueTypeNode* ValueTypeNode::make_from_multi(GraphKit* kit, MultiNode* multi, ciValueKlass* vk, int base_input, bool in) {
 606   ValueTypeNode* vt = ValueTypeNode::make_uninitialized(kit->gvn(), vk);
 607   vt->initialize(kit, multi, vk, 0, base_input, in);
 608   return kit->gvn().transform(vt)->as_ValueType();
 609 }
 610 
 611 Node* ValueTypeNode::is_loaded(PhaseGVN* phase, ciValueKlass* vk, Node* base, int holder_offset) {
 612   if (vk == NULL) {
 613     vk = value_klass();
 614   }
 615   if (field_count() == 0) {
 616     assert(is_allocated(phase), "must be allocated");
 617     return get_oop();
 618   }
 619   for (uint i = 0; i < field_count(); ++i) {
 620     int offset = holder_offset + field_offset(i);
 621     Node* value = field_value(i);
 622     if (value->is_ValueType()) {
 623       ValueTypeNode* vt = value->as_ValueType();
 624       if (field_is_flattened(i)) {
 625         // Check value type field load recursively


 669        // Flattened value type field
 670        vt->set_field_value(i, value->allocate_fields(kit));
 671      } else if (value != NULL){
 672        // Non-flattened value type field
 673        vt->set_field_value(i, value->allocate(kit));
 674      }
 675   }
 676   vt = kit->gvn().transform(vt)->as_ValueType();
 677   kit->replace_in_map(this, vt);
 678   return vt;
 679 }
 680 
 681 Node* ValueTypeNode::tagged_klass(PhaseGVN& gvn) {
 682   ciValueKlass* vk = value_klass();
 683   const TypeKlassPtr* tk = TypeKlassPtr::make(vk);
 684   intptr_t bits = tk->get_con();
 685   set_nth_bit(bits, 0);
 686   return gvn.makecon(TypeRawPtr::make((address)bits));
 687 }
 688 
 689 void ValueTypeNode::pass_klass(Node* n, uint pos, const GraphKit& kit) {
 690   n->init_req(pos, tagged_klass(kit.gvn()));
 691 }
 692 
 693 uint ValueTypeNode::pass_fields(Node* n, int base_input, GraphKit& kit, bool assert_allocated, ciValueKlass* base_vk, int base_offset) {

 694   ciValueKlass* vk = value_klass();
 695   if (base_vk == NULL) {
 696     base_vk = vk;
 697   }
 698   uint edges = 0;
 699   for (uint i = 0; i < field_count(); i++) {
 700     int offset = base_offset + field_offset(i) - (base_offset > 0 ? vk->first_field_offset() : 0);
 701     Node* arg = field_value(i);
 702     if (field_is_flattened(i)) {
 703        // Flattened value type field
 704        edges += arg->as_ValueType()->pass_fields(n, base_input, kit, assert_allocated, base_vk, offset);
 705     } else {
 706       int j = 0; int extra = 0;
 707       for (; j < base_vk->nof_nonstatic_fields(); j++) {
 708         ciField* field = base_vk->nonstatic_field_at(j);
 709         if (offset == field->offset()) {
 710           assert(field->type() == field_type(i), "inconsistent field type");
 711           break;
 712         }
 713         BasicType bt = field->type()->basic_type();
 714         if (bt == T_LONG || bt == T_DOUBLE) {
 715           extra++;
 716         }
 717       }
 718       if (arg->is_ValueType()) {
 719         // non-flattened value type field
 720         ValueTypeNode* vt = arg->as_ValueType();
 721         assert(!assert_allocated || vt->is_allocated(&kit.gvn()), "value type field should be allocated");
 722         arg = vt->allocate(&kit)->get_oop();
 723       }
 724       n->init_req(base_input + j + extra, arg);


 725       edges++;
 726       BasicType bt = field_type(i)->basic_type();
 727       if (bt == T_LONG || bt == T_DOUBLE) {
 728         n->init_req(base_input + j + extra + 1, kit.top());
 729         edges++;














 730       }
 731     }
 732   }
 733   return edges;
 734 }
 735 
 736 Node* ValueTypeNode::Ideal(PhaseGVN* phase, bool can_reshape) {
 737   Node* oop = get_oop();
 738   if (is_default(*phase) && (!oop->is_Con() || phase->type(oop)->is_zero_type())) {
 739     // Use the pre-allocated oop for default value types
 740     set_oop(default_oop(*phase, value_klass()));
 741     return this;
 742   } else if (oop->isa_ValueTypePtr()) {
 743     // Can happen with late inlining
 744     ValueTypePtrNode* vtptr = oop->as_ValueTypePtr();
 745     set_oop(vtptr->get_oop());
 746     for (uint i = Oop+1; i < vtptr->req(); ++i) {
 747       set_req(i, vtptr->in(i));
 748     }
 749     return this;




 237   // Process all safepoint uses and scalarize value type
 238   Unique_Node_List worklist;
 239   for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) {
 240     Node* u = fast_out(i);
 241     if (u->is_SafePoint() && !u->is_CallLeaf() && (!u->is_Call() || u->as_Call()->has_debug_use(this))) {
 242       SafePointNode* sfpt = u->as_SafePoint();
 243       Node* in_oop = get_oop();
 244       const Type* oop_type = in_oop->bottom_type();
 245       assert(Opcode() == Op_ValueTypePtr || !isa_ValueType()->is_allocated(gvn), "already heap allocated value types should be linked directly");
 246       int nb = make_scalar_in_safepoint(worklist, sfpt, root, gvn);
 247       --i; imax -= nb;
 248     }
 249   }
 250   // Now scalarize non-flattened fields
 251   for (uint i = 0; i < worklist.size(); ++i) {
 252     Node* vt = worklist.at(i);
 253     vt->as_ValueType()->make_scalar_in_safepoints(root, gvn);
 254   }
 255 }
 256 
 257 void ValueTypeBaseNode::initialize(GraphKit* kit, MultiNode* multi, ciValueKlass* vk, int base_offset, uint& base_input, bool in) {
 258   assert(base_offset >= 0, "offset in value type must be positive");
 259   assert(base_input >= TypeFunc::Parms, "invalid base input");
 260   PhaseGVN& gvn = kit->gvn();
 261   for (uint i = 0; i < field_count(); i++) {
 262     ciType* ft = field_type(i);
 263     int offset = base_offset + field_offset(i);
 264     if (field_is_flattened(i)) {
 265       // Flattened value type field
 266       ValueTypeNode* vt = ValueTypeNode::make_uninitialized(gvn, ft->as_value_klass());
 267       uint base = base_input;
 268       vt->initialize(kit, multi, vk, offset - value_klass()->first_field_offset(), base, in);
 269       set_field_value(i, gvn.transform(vt));
 270     } else {
 271       int j = 0; int extra = 0;
 272       for (; j < vk->nof_nonstatic_fields(); j++) {
 273         ciField* f = vk->nonstatic_field_at(j);
 274         if (offset == f->offset()) {
 275           assert(f->type() == ft, "inconsistent field type");
 276           break;
 277         }
 278         BasicType bt = f->type()->basic_type();
 279         if (bt == T_LONG || bt == T_DOUBLE) {
 280           extra++;
 281         }
 282       }
 283       assert(j != vk->nof_nonstatic_fields(), "must find");
 284       Node* parm = NULL;
 285       int index = base_input + j + extra;
 286 
 287       ciMethod* method = multi->is_Start()? kit->C->method() : multi->as_CallStaticJava()->method();
 288       SigEntry res_entry = method->get_Method()->get_res_entry();
 289       if (res_entry._offset != -1 && (index - TypeFunc::Parms) >= res_entry._offset) {
 290         // Skip reserved entry
 291         index += type2size[res_entry._bt];
 292       }
 293       if (multi->is_Start()) {
 294         assert(in, "return from start?");
 295         parm = gvn.transform(new ParmNode(multi->as_Start(), index));
 296       } else {
 297         if (in) {
 298           parm = multi->as_Call()->in(index);
 299         } else {
 300           parm = gvn.transform(new ProjNode(multi->as_Call(), index));
 301         }
 302       }
 303 
 304       if (field_is_flattenable(i)) {
 305         // Non-flattened but flattenable value type
 306         if (ft->as_value_klass()->is_scalarizable()) {
 307           parm = ValueTypeNode::make_from_oop(kit, parm, ft->as_value_klass());
 308         } else {
 309           parm = kit->null2default(parm, ft->as_value_klass());
 310         }
 311       }
 312 
 313       set_field_value(i, parm);
 314       // Record all these guys for later GVN.
 315       gvn.record_for_igvn(parm);
 316     }
 317   }
 318   base_input += vk->value_arg_slots();
 319 }
 320 
 321 const TypePtr* ValueTypeBaseNode::field_adr_type(Node* base, int offset, ciInstanceKlass* holder, PhaseGVN& gvn) const {
 322   const TypeAryPtr* ary_type = gvn.type(base)->isa_aryptr();
 323   const TypePtr* adr_type = NULL;
 324   bool is_array = ary_type != NULL;
 325   if (is_array) {
 326     // In the case of a flattened value type array, each field has its own slice
 327     adr_type = ary_type->with_field_offset(offset)->add_offset(Type::OffsetBot);
 328   } else {
 329     ciField* field = holder->get_field_by_offset(offset, false);
 330     assert(field != NULL, "field not found");
 331     adr_type = gvn.C->alias_type(field)->adr_type();
 332   }
 333   return adr_type;
 334 }
 335 
 336 void ValueTypeBaseNode::load(GraphKit* kit, Node* base, Node* ptr, ciInstanceKlass* holder, int holder_offset) {
 337   // Initialize the value type by loading its field values from
 338   // memory and adding the values as input edges to the node.


 601            vt->is_loaded(&gvn) == oop, "value type should be loaded");
 602   }
 603 
 604   assert(vt->is_allocated(&gvn), "value type should be allocated");
 605   return gvn.transform(vt)->as_ValueType();
 606 }
 607 
 608 // GraphKit wrapper for the 'make_from_flattened' method
 609 ValueTypeNode* ValueTypeNode::make_from_flattened(GraphKit* kit, ciValueKlass* vk, Node* obj, Node* ptr, ciInstanceKlass* holder, int holder_offset) {
 610   // Create and initialize a ValueTypeNode by loading all field values from
 611   // a flattened value type field at 'holder_offset' or from a value type array.
 612   ValueTypeNode* vt = make_uninitialized(kit->gvn(), vk);
 613   // The value type is flattened into the object without an oop header. Subtract the
 614   // offset of the first field to account for the missing header when loading the values.
 615   holder_offset -= vk->first_field_offset();
 616   vt->load(kit, obj, ptr, holder, holder_offset);
 617   assert(vt->is_loaded(&kit->gvn()) != obj, "holder oop should not be used as flattened value type oop");
 618   return kit->gvn().transform(vt)->as_ValueType();
 619 }
 620 
 621 ValueTypeNode* ValueTypeNode::make_from_multi(GraphKit* kit, MultiNode* multi, ciValueKlass* vk, uint& base_input, bool in) {
 622   ValueTypeNode* vt = ValueTypeNode::make_uninitialized(kit->gvn(), vk);
 623   vt->initialize(kit, multi, vk, 0, base_input, in);
 624   return kit->gvn().transform(vt)->as_ValueType();
 625 }
 626 
 627 Node* ValueTypeNode::is_loaded(PhaseGVN* phase, ciValueKlass* vk, Node* base, int holder_offset) {
 628   if (vk == NULL) {
 629     vk = value_klass();
 630   }
 631   if (field_count() == 0) {
 632     assert(is_allocated(phase), "must be allocated");
 633     return get_oop();
 634   }
 635   for (uint i = 0; i < field_count(); ++i) {
 636     int offset = holder_offset + field_offset(i);
 637     Node* value = field_value(i);
 638     if (value->is_ValueType()) {
 639       ValueTypeNode* vt = value->as_ValueType();
 640       if (field_is_flattened(i)) {
 641         // Check value type field load recursively


 685        // Flattened value type field
 686        vt->set_field_value(i, value->allocate_fields(kit));
 687      } else if (value != NULL){
 688        // Non-flattened value type field
 689        vt->set_field_value(i, value->allocate(kit));
 690      }
 691   }
 692   vt = kit->gvn().transform(vt)->as_ValueType();
 693   kit->replace_in_map(this, vt);
 694   return vt;
 695 }
 696 
 697 Node* ValueTypeNode::tagged_klass(PhaseGVN& gvn) {
 698   ciValueKlass* vk = value_klass();
 699   const TypeKlassPtr* tk = TypeKlassPtr::make(vk);
 700   intptr_t bits = tk->get_con();
 701   set_nth_bit(bits, 0);
 702   return gvn.makecon(TypeRawPtr::make((address)bits));
 703 }
 704 




 705 uint ValueTypeNode::pass_fields(Node* n, int base_input, GraphKit& kit, bool assert_allocated, ciValueKlass* base_vk, int base_offset) {
 706   assert(base_input >= TypeFunc::Parms, "invalid base input");
 707   ciValueKlass* vk = value_klass();
 708   if (base_vk == NULL) {
 709     base_vk = vk;
 710   }
 711   uint edges = 0;
 712   for (uint i = 0; i < field_count(); i++) {
 713     int offset = base_offset + field_offset(i) - (base_offset > 0 ? vk->first_field_offset() : 0);
 714     Node* arg = field_value(i);
 715     if (field_is_flattened(i)) {
 716        // Flattened value type field
 717        edges += arg->as_ValueType()->pass_fields(n, base_input, kit, assert_allocated, base_vk, offset);
 718     } else {
 719       int j = 0; int extra = 0;
 720       for (; j < base_vk->nof_nonstatic_fields(); j++) {
 721         ciField* field = base_vk->nonstatic_field_at(j);
 722         if (offset == field->offset()) {
 723           assert(field->type() == field_type(i), "inconsistent field type");
 724           break;
 725         }
 726         BasicType bt = field->type()->basic_type();
 727         if (bt == T_LONG || bt == T_DOUBLE) {
 728           extra++;
 729         }
 730       }
 731       if (arg->is_ValueType()) {
 732         // non-flattened value type field
 733         ValueTypeNode* vt = arg->as_ValueType();
 734         assert(!assert_allocated || vt->is_allocated(&kit.gvn()), "value type field should be allocated");
 735         arg = vt->allocate(&kit)->get_oop();
 736       }
 737 
 738       int index = base_input + j + extra;
 739       n->init_req(index++, arg);
 740       edges++;
 741       BasicType bt = field_type(i)->basic_type();
 742       if (bt == T_LONG || bt == T_DOUBLE) {
 743         n->init_req(index++, kit.top());
 744         edges++;
 745       }
 746       if (n->isa_CallJava()) {
 747         Method* m = n->as_CallJava()->method()->get_Method();
 748         SigEntry res_entry = m->get_res_entry();
 749         if ((index - TypeFunc::Parms) == res_entry._offset) {
 750           // Skip reserved entry
 751           int size = type2size[res_entry._bt];
 752           n->init_req(index++, kit.top());
 753           if (size == 2) {
 754             n->init_req(index++, kit.top());
 755           }
 756           base_input += size;
 757           edges += size;
 758         }
 759       }
 760     }
 761   }
 762   return edges;
 763 }
 764 
 765 Node* ValueTypeNode::Ideal(PhaseGVN* phase, bool can_reshape) {
 766   Node* oop = get_oop();
 767   if (is_default(*phase) && (!oop->is_Con() || phase->type(oop)->is_zero_type())) {
 768     // Use the pre-allocated oop for default value types
 769     set_oop(default_oop(*phase, value_klass()));
 770     return this;
 771   } else if (oop->isa_ValueTypePtr()) {
 772     // Can happen with late inlining
 773     ValueTypePtrNode* vtptr = oop->as_ValueTypePtr();
 774     set_oop(vtptr->get_oop());
 775     for (uint i = Oop+1; i < vtptr->req(); ++i) {
 776       set_req(i, vtptr->in(i));
 777     }
 778     return this;


< prev index next >