< prev index next >

src/hotspot/share/opto/valuetypenode.cpp

Print this page




 316   for (uint i = 0; i < field_count(); ++i) {
 317     int offset = holder_offset + field_offset(i);
 318     Node* value = NULL;
 319     ciType* ft = field_type(i);
 320     if (field_is_flattened(i)) {
 321       // Recursively load the flattened value type field
 322       value = ValueTypeNode::make_from_flattened(kit, ft->as_value_klass(), base, ptr, holder, offset);
 323     } else {
 324       const TypeOopPtr* oop_ptr = kit->gvn().type(base)->isa_oopptr();
 325       bool is_array = (oop_ptr->isa_aryptr() != NULL);
 326       if (base->is_Con() && !is_array) {
 327         // If the oop to the value type is constant (static final field), we can
 328         // also treat the fields as constants because the value type is immutable.
 329         ciObject* constant_oop = oop_ptr->const_oop();
 330         ciField* field = holder->get_field_by_offset(offset, false);
 331         assert(field != NULL, "field not found");
 332         ciConstant constant = constant_oop->as_instance()->field_value(field);
 333         const Type* con_type = Type::make_from_constant(constant, /*require_const=*/ true);
 334         assert(con_type != NULL, "type not found");
 335         value = kit->gvn().transform(kit->makecon(con_type));
 336         if (con_type->is_valuetypeptr()) {
 337           // Constant, non-flattened value type field
 338           value = ValueTypeNode::make_from_oop(kit, value, ft->as_value_klass());
 339         }
 340       } else {
 341         // Load field value from memory
 342         const TypePtr* adr_type = field_adr_type(base, offset, holder, kit->gvn());
 343         Node* adr = kit->basic_plus_adr(base, ptr, offset);
 344         BasicType bt = type2field[ft->basic_type()];
 345         assert(is_java_primitive(bt) || adr->bottom_type()->is_ptr_to_narrowoop() == UseCompressedOops, "inconsistent");
 346         const Type* val_type = Type::get_const_type(ft);
 347         DecoratorSet decorators = IN_HEAP | MO_UNORDERED;
 348         if (is_array) {
 349           decorators |= IS_ARRAY;
 350         }
 351         value = kit->access_load_at(base, adr, adr_type, val_type, bt, decorators);
 352         if (bt == T_VALUETYPE) {

 353           // Loading a non-flattened value type from memory
 354           value = ValueTypeNode::make_from_oop(kit, value, ft->as_value_klass(), /* buffer_check */ false, /* null2default */ field_is_flattenable(i), trap_bci);
 355         }
 356       }
 357     }
 358     set_field_value(i, value);
 359   }
 360 }
 361 
 362 void ValueTypeBaseNode::store_flattened(GraphKit* kit, Node* base, Node* ptr, ciInstanceKlass* holder, int holder_offset) const {
 363   // The value type is embedded into the object without an oop header. Subtract the
 364   // offset of the first field to account for the missing header when storing the values.
 365   if (holder == NULL) {
 366     holder = value_klass();
 367   }
 368   holder_offset -= value_klass()->first_field_offset();
 369   store(kit, base, ptr, holder, holder_offset);
 370 }
 371 
 372 void ValueTypeBaseNode::store(GraphKit* kit, Node* base, Node* ptr, ciInstanceKlass* holder, int holder_offset, bool deoptimize_on_exception) const {
 373   // Write field values to memory
 374   for (uint i = 0; i < field_count(); ++i) {
 375     int offset = holder_offset + field_offset(i);
 376     Node* value = field_value(i);
 377     ciType* ft = field_type(i);


 473       ciField* f = vk->nonstatic_field_at(field_nb - extra);
 474       Node* field = field_value_by_offset(f->offset(), true);
 475       if (field->is_ValueType()) {
 476         assert(f->is_flattened(), "should be flattened");
 477         field = field->as_ValueType()->allocate(kit)->get_oop();
 478       }
 479       C->gvn_replace_by(pn, field);
 480       C->initial_gvn()->hash_delete(pn);
 481       pn->set_req(0, C->top());
 482       --i; --imax;
 483     }
 484   }
 485 }
 486 
 487 ValueTypeNode* ValueTypeNode::make_uninitialized(PhaseGVN& gvn, ciValueKlass* klass) {
 488   // Create a new ValueTypeNode with uninitialized values and NULL oop
 489   const TypeValueType* type = TypeValueType::make(klass);
 490   return new ValueTypeNode(type, gvn.zerocon(T_VALUETYPE));
 491 }
 492 
 493 Node* ValueTypeNode::load_default_oop(PhaseGVN& gvn, ciValueKlass* vk) {
 494   // Load the default oop from the java mirror of the given ValueKlass
 495   const TypeInstPtr* tip = TypeInstPtr::make(vk->java_mirror());
 496   Node* base = gvn.makecon(tip);
 497   Node* adr = gvn.transform(new AddPNode(base, base, gvn.MakeConX(vk->default_value_offset())));
 498   const Type* rt = Type::get_const_type(vk)->join_speculative(TypePtr::NOTNULL);
 499   return gvn.transform(LoadNode::make(gvn, NULL, gvn.C->immutable_memory(), adr, tip->add_offset(vk->default_value_offset()), rt, T_VALUETYPE, MemNode::unordered));
 500 }
 501 
 502 ValueTypeNode* ValueTypeNode::make_default(PhaseGVN& gvn, ciValueKlass* vk) {
 503   // Create a new ValueTypeNode with default values
 504   Node* oop = load_default_oop(gvn, vk);
 505   const TypeValueType* type = TypeValueType::make(vk);
 506   ValueTypeNode* vt = new ValueTypeNode(type, oop);
 507   for (uint i = 0; i < vt->field_count(); ++i) {
 508     ciType* field_type = vt->field_type(i);
 509     Node* value = NULL;
 510     if (field_type->is_valuetype()) {
 511       value = ValueTypeNode::make_default(gvn, field_type->as_value_klass());
 512     } else {
 513       value = gvn.zerocon(field_type->basic_type());
 514     }
 515     vt->set_field_value(i, value);
 516   }
 517   vt = gvn.transform(vt)->as_ValueType();
 518   assert(vt->is_default(gvn), "must be the default value type");
 519   return vt;
 520 }
 521 
 522 
 523 bool ValueTypeNode::is_default(PhaseGVN& gvn) const {
 524   for (uint i = 0; i < field_count(); ++i) {


 754       if (arg->is_ValueType()) {
 755         // non-flattened value type field
 756         ValueTypeNode* vt = arg->as_ValueType();
 757         assert(!assert_allocated || vt->is_allocated(&kit.gvn()), "value type field should be allocated");
 758         arg = vt->allocate(&kit)->get_oop();
 759       }
 760       n->init_req(base_input + j + extra, arg);
 761       edges++;
 762       BasicType bt = field_type(i)->basic_type();
 763       if (bt == T_LONG || bt == T_DOUBLE) {
 764         n->init_req(base_input + j + extra + 1, kit.top());
 765         edges++;
 766       }
 767     }
 768   }
 769   return edges;
 770 }
 771 
 772 Node* ValueTypeNode::Ideal(PhaseGVN* phase, bool can_reshape) {
 773   Node* oop = get_oop();
 774 
 775   if (is_default(*phase) && !oop->is_Load() &&
 776       !(oop->is_DecodeN() && oop->in(1)->is_Load())) {
 777     // Use the pre-allocated oop for default value types
 778     Node* oop = load_default_oop(*phase, value_klass());
 779     set_oop(oop);
 780     return this;
 781   }
 782 
 783   if (!is_allocated(phase) && !value_klass()->is_bufferable()) {
 784     // Save base oop if fields are loaded from memory and the value
 785     // type is not buffered (in this case we should not use the oop).
 786     Node* base = is_loaded(phase);
 787     if (base != NULL) {
 788       set_oop(base);
 789       assert(is_allocated(phase), "should now be allocated");
 790       return this;
 791     }
 792   }
 793 
 794   if (can_reshape) {
 795     PhaseIterGVN* igvn = phase->is_IterGVN();
 796 
 797     if (is_default(*phase)) {
 798       // Search for users of the default value type
 799       for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) {
 800         Node* user = fast_out(i);
 801         AllocateNode* alloc = user->isa_Allocate();
 802         if (alloc != NULL && alloc->result_cast() != NULL && alloc->in(AllocateNode::ValueNode) == this) {
 803           // Found an allocation of the default value type.
 804           // If the code in StoreNode::Identity() that removes useless stores was not yet
 805           // executed or ReduceFieldZeroing is disabled, there can still be initializing
 806           // stores (only zero-type or default value stores, because value types are immutable).
 807           Node* res = alloc->result_cast();
 808           for (DUIterator_Fast jmax, j = res->fast_outs(jmax); j < jmax; j++) {
 809             AddPNode* addp = res->fast_out(j)->isa_AddP();
 810             if (addp != NULL) {
 811               for (DUIterator_Fast kmax, k = addp->fast_outs(kmax); k < kmax; k++) {
 812                 StoreNode* store = addp->fast_out(k)->isa_Store();
 813                 if (store != NULL && store->outcnt() != 0) {
 814                   // Remove the useless store
 815                   Node* mem = store->in(MemNode::Memory);
 816                   Node* val = store->in(MemNode::ValueIn);
 817                   const Type* val_type = igvn->type(val);
 818                   assert(val_type->is_zero_type() || (val->is_Load() && val_type->make_ptr()->is_valuetypeptr()),
 819                          "must be zero-type or default value store");
 820                   igvn->replace_in_uses(store, mem);
 821                 }
 822               }
 823             }
 824           }
 825           // Replace allocation by pre-allocated oop
 826           Node* oop = load_default_oop(*phase, value_klass());
 827           igvn->replace_node(res, oop);
 828         } else if (user->is_ValueType()) {
 829           // Add value type user to worklist to give it a chance to get optimized as well
 830           igvn->_worklist.push(user);
 831         }
 832       }
 833     }
 834 
 835     if (is_allocated(igvn)) {
 836       // Value type is heap allocated, search for safepoint uses
 837       for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) {
 838         Node* out = fast_out(i);
 839         if (out->is_SafePoint()) {
 840           // Let SafePointNode::Ideal() take care of re-wiring the
 841           // safepoint to the oop input instead of the value type node.
 842           igvn->rehash_node_delayed(out);
 843         }
 844       }
 845     }
 846   }
 847   return NULL;




 316   for (uint i = 0; i < field_count(); ++i) {
 317     int offset = holder_offset + field_offset(i);
 318     Node* value = NULL;
 319     ciType* ft = field_type(i);
 320     if (field_is_flattened(i)) {
 321       // Recursively load the flattened value type field
 322       value = ValueTypeNode::make_from_flattened(kit, ft->as_value_klass(), base, ptr, holder, offset);
 323     } else {
 324       const TypeOopPtr* oop_ptr = kit->gvn().type(base)->isa_oopptr();
 325       bool is_array = (oop_ptr->isa_aryptr() != NULL);
 326       if (base->is_Con() && !is_array) {
 327         // If the oop to the value type is constant (static final field), we can
 328         // also treat the fields as constants because the value type is immutable.
 329         ciObject* constant_oop = oop_ptr->const_oop();
 330         ciField* field = holder->get_field_by_offset(offset, false);
 331         assert(field != NULL, "field not found");
 332         ciConstant constant = constant_oop->as_instance()->field_value(field);
 333         const Type* con_type = Type::make_from_constant(constant, /*require_const=*/ true);
 334         assert(con_type != NULL, "type not found");
 335         value = kit->gvn().transform(kit->makecon(con_type));




 336       } else {
 337         // Load field value from memory
 338         const TypePtr* adr_type = field_adr_type(base, offset, holder, kit->gvn());
 339         Node* adr = kit->basic_plus_adr(base, ptr, offset);
 340         BasicType bt = type2field[ft->basic_type()];
 341         assert(is_java_primitive(bt) || adr->bottom_type()->is_ptr_to_narrowoop() == UseCompressedOops, "inconsistent");
 342         const Type* val_type = Type::get_const_type(ft);
 343         DecoratorSet decorators = IN_HEAP | MO_UNORDERED;
 344         if (is_array) {
 345           decorators |= IS_ARRAY;
 346         }
 347         value = kit->access_load_at(base, adr, adr_type, val_type, bt, decorators);
 348       }
 349       if (ft->is_valuetype()) {
 350         // Loading a non-flattened value type from memory
 351         value = ValueTypeNode::make_from_oop(kit, value, ft->as_value_klass(), /* buffer_check */ false, /* null2default */ field_is_flattenable(i), trap_bci);
 352       }
 353     }

 354     set_field_value(i, value);
 355   }
 356 }
 357 
 358 void ValueTypeBaseNode::store_flattened(GraphKit* kit, Node* base, Node* ptr, ciInstanceKlass* holder, int holder_offset) const {
 359   // The value type is embedded into the object without an oop header. Subtract the
 360   // offset of the first field to account for the missing header when storing the values.
 361   if (holder == NULL) {
 362     holder = value_klass();
 363   }
 364   holder_offset -= value_klass()->first_field_offset();
 365   store(kit, base, ptr, holder, holder_offset);
 366 }
 367 
 368 void ValueTypeBaseNode::store(GraphKit* kit, Node* base, Node* ptr, ciInstanceKlass* holder, int holder_offset, bool deoptimize_on_exception) const {
 369   // Write field values to memory
 370   for (uint i = 0; i < field_count(); ++i) {
 371     int offset = holder_offset + field_offset(i);
 372     Node* value = field_value(i);
 373     ciType* ft = field_type(i);


 469       ciField* f = vk->nonstatic_field_at(field_nb - extra);
 470       Node* field = field_value_by_offset(f->offset(), true);
 471       if (field->is_ValueType()) {
 472         assert(f->is_flattened(), "should be flattened");
 473         field = field->as_ValueType()->allocate(kit)->get_oop();
 474       }
 475       C->gvn_replace_by(pn, field);
 476       C->initial_gvn()->hash_delete(pn);
 477       pn->set_req(0, C->top());
 478       --i; --imax;
 479     }
 480   }
 481 }
 482 
 483 ValueTypeNode* ValueTypeNode::make_uninitialized(PhaseGVN& gvn, ciValueKlass* klass) {
 484   // Create a new ValueTypeNode with uninitialized values and NULL oop
 485   const TypeValueType* type = TypeValueType::make(klass);
 486   return new ValueTypeNode(type, gvn.zerocon(T_VALUETYPE));
 487 }
 488 
 489 Node* ValueTypeNode::default_oop(PhaseGVN& gvn, ciValueKlass* vk) {
 490   // Returns the constant oop of the default value type allocation
 491   return gvn.makecon(TypeInstPtr::make(vk->default_value_instance()));




 492 }
 493 
 494 ValueTypeNode* ValueTypeNode::make_default(PhaseGVN& gvn, ciValueKlass* vk) {
 495   // Create a new ValueTypeNode with default values
 496   Node* oop = default_oop(gvn, vk);
 497   const TypeValueType* type = TypeValueType::make(vk);
 498   ValueTypeNode* vt = new ValueTypeNode(type, oop);
 499   for (uint i = 0; i < vt->field_count(); ++i) {
 500     ciType* field_type = vt->field_type(i);
 501     Node* value = NULL;
 502     if (field_type->is_valuetype()) {
 503       value = ValueTypeNode::make_default(gvn, field_type->as_value_klass());
 504     } else {
 505       value = gvn.zerocon(field_type->basic_type());
 506     }
 507     vt->set_field_value(i, value);
 508   }
 509   vt = gvn.transform(vt)->as_ValueType();
 510   assert(vt->is_default(gvn), "must be the default value type");
 511   return vt;
 512 }
 513 
 514 
 515 bool ValueTypeNode::is_default(PhaseGVN& gvn) const {
 516   for (uint i = 0; i < field_count(); ++i) {


 746       if (arg->is_ValueType()) {
 747         // non-flattened value type field
 748         ValueTypeNode* vt = arg->as_ValueType();
 749         assert(!assert_allocated || vt->is_allocated(&kit.gvn()), "value type field should be allocated");
 750         arg = vt->allocate(&kit)->get_oop();
 751       }
 752       n->init_req(base_input + j + extra, arg);
 753       edges++;
 754       BasicType bt = field_type(i)->basic_type();
 755       if (bt == T_LONG || bt == T_DOUBLE) {
 756         n->init_req(base_input + j + extra + 1, kit.top());
 757         edges++;
 758       }
 759     }
 760   }
 761   return edges;
 762 }
 763 
 764 Node* ValueTypeNode::Ideal(PhaseGVN* phase, bool can_reshape) {
 765   Node* oop = get_oop();
 766   if (is_default(*phase) && (!oop->is_Con() || phase->type(oop)->is_zero_type())) {


 767     // Use the pre-allocated oop for default value types
 768     set_oop(default_oop(*phase, value_klass()));

 769     return this;
 770   }
 771 
 772   if (!is_allocated(phase) && !value_klass()->is_bufferable()) {
 773     // Save base oop if fields are loaded from memory and the value
 774     // type is not buffered (in this case we should not use the oop).
 775     Node* base = is_loaded(phase);
 776     if (base != NULL) {
 777       set_oop(base);
 778       assert(is_allocated(phase), "should now be allocated");
 779       return this;
 780     }
 781   }
 782 
 783   if (can_reshape) {
 784     PhaseIterGVN* igvn = phase->is_IterGVN();
 785 
 786     if (is_default(*phase)) {
 787       // Search for users of the default value type
 788       for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) {
 789         Node* user = fast_out(i);
 790         AllocateNode* alloc = user->isa_Allocate();
 791         if (alloc != NULL && alloc->result_cast() != NULL && alloc->in(AllocateNode::ValueNode) == this) {
 792           // Found an allocation of the default value type.
 793           // If the code in StoreNode::Identity() that removes useless stores was not yet
 794           // executed or ReduceFieldZeroing is disabled, there can still be initializing
 795           // stores (only zero-type or default value stores, because value types are immutable).
 796           Node* res = alloc->result_cast();
 797           for (DUIterator_Fast jmax, j = res->fast_outs(jmax); j < jmax; j++) {
 798             AddPNode* addp = res->fast_out(j)->isa_AddP();
 799             if (addp != NULL) {
 800               for (DUIterator_Fast kmax, k = addp->fast_outs(kmax); k < kmax; k++) {
 801                 StoreNode* store = addp->fast_out(k)->isa_Store();
 802                 if (store != NULL && store->outcnt() != 0) {
 803                   // Remove the useless store
 804                   Node* mem = store->in(MemNode::Memory);
 805                   Node* val = store->in(MemNode::ValueIn);
 806                   const Type* val_type = igvn->type(val);
 807                   assert(val_type->is_zero_type() || (val->is_Con() && val_type->make_ptr()->is_valuetypeptr()),
 808                          "must be zero-type or default value store");
 809                   igvn->replace_in_uses(store, mem);
 810                 }
 811               }
 812             }
 813           }
 814           // Replace allocation by pre-allocated oop
 815           igvn->replace_node(res, default_oop(*phase, value_klass()));

 816         } else if (user->is_ValueType()) {
 817           // Add value type user to worklist to give it a chance to get optimized as well
 818           igvn->_worklist.push(user);
 819         }
 820       }
 821     }
 822 
 823     if (is_allocated(igvn)) {
 824       // Value type is heap allocated, search for safepoint uses
 825       for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) {
 826         Node* out = fast_out(i);
 827         if (out->is_SafePoint()) {
 828           // Let SafePointNode::Ideal() take care of re-wiring the
 829           // safepoint to the oop input instead of the value type node.
 830           igvn->rehash_node_delayed(out);
 831         }
 832       }
 833     }
 834   }
 835   return NULL;


< prev index next >