< prev index next >

src/hotspot/share/opto/valuetypenode.cpp

Print this page




 178 
 179 ciType* ValueTypeBaseNode::field_type(uint index) const {
 180   assert(index < field_count(), "index out of bounds");
 181   return value_klass()->declared_nonstatic_field_at(index)->type();
 182 }
 183 
 184 bool ValueTypeBaseNode::field_is_flattened(uint index) const {
 185   assert(index < field_count(), "index out of bounds");
 186   ciField* field = value_klass()->declared_nonstatic_field_at(index);
 187   assert(!field->is_flattened() || field->type()->is_valuetype(), "must be a value type");
 188   return field->is_flattened();
 189 }
 190 
 191 bool ValueTypeBaseNode::field_is_flattenable(uint index) const {
 192   assert(index < field_count(), "index out of bounds");
 193   ciField* field = value_klass()->declared_nonstatic_field_at(index);
 194   assert(!field->is_flattenable() || field->type()->is_valuetype(), "must be a value type");
 195   return field->is_flattenable();
 196 }
 197 
 198 int ValueTypeBaseNode::make_scalar_in_safepoint(Unique_Node_List& worklist, SafePointNode* sfpt, Node* root, PhaseGVN* gvn) {
 199   ciValueKlass* vk = value_klass();
 200   uint nfields = vk->nof_nonstatic_fields();
 201   JVMState* jvms = sfpt->jvms();
 202   int start = jvms->debug_start();
 203   int end   = jvms->debug_end();
 204   // Replace safepoint edge by SafePointScalarObjectNode and add field values
 205   assert(jvms != NULL, "missing JVMS");
 206   uint first_ind = (sfpt->req() - jvms->scloff());
 207   SafePointScalarObjectNode* sobj = new SafePointScalarObjectNode(value_ptr(),
 208 #ifdef ASSERT
 209                                                                   NULL,
 210 #endif
 211                                                                   first_ind, nfields);
 212   sobj->init_req(0, root);
 213   // Iterate over the value type fields in order of increasing
 214   // offset and add the field values to the safepoint.
 215   for (uint j = 0; j < nfields; ++j) {
 216     int offset = vk->nonstatic_field_at(j)->offset();
 217     Node* value = field_value_by_offset(offset, true /* include flattened value type fields */);
 218     if (value->is_ValueType()) {
 219       if (value->as_ValueType()->is_allocated(gvn)) {
 220         value = value->as_ValueType()->get_oop();
 221       } else {
 222         // Add non-flattened value type field to the worklist to process later
 223         worklist.push(value);
 224       }
 225     }
 226     sfpt->add_req(value);
 227   }
 228   jvms->set_endoff(sfpt->req());
 229   if (gvn != NULL) {
 230     sobj = gvn->transform(sobj)->as_SafePointScalarObject();
 231     gvn->igvn_rehash_node_delayed(sfpt);
 232   }
 233   return sfpt->replace_edges_in_range(this, sobj, start, end);
 234 }
 235 
 236 void ValueTypeBaseNode::make_scalar_in_safepoints(Node* root, PhaseGVN* gvn) {
 237   // Process all safepoint uses and scalarize value type
 238   Unique_Node_List worklist;
 239   for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) {
 240     Node* u = fast_out(i);
 241     if (u->is_SafePoint() && !u->is_CallLeaf() && (!u->is_Call() || u->as_Call()->has_debug_use(this))) {
 242       SafePointNode* sfpt = u->as_SafePoint();
 243       Node* in_oop = get_oop();
 244       const Type* oop_type = in_oop->bottom_type();
 245       assert(Opcode() == Op_ValueTypePtr || !isa_ValueType()->is_allocated(gvn), "already heap allocated value types should be linked directly");
 246       int nb = make_scalar_in_safepoint(worklist, sfpt, root, gvn);



 247       --i; imax -= nb;
 248     }
 249   }
 250   // Now scalarize non-flattened fields
 251   for (uint i = 0; i < worklist.size(); ++i) {
 252     Node* vt = worklist.at(i);
 253     vt->as_ValueType()->make_scalar_in_safepoints(root, gvn);
 254   }
 255 }
 256 
 257 void ValueTypeBaseNode::initialize(GraphKit* kit, MultiNode* multi, ciValueKlass* vk, int base_offset, uint& base_input, bool in) {
 258   assert(base_offset >= 0, "offset in value type must be positive");
 259   assert(base_input >= TypeFunc::Parms, "invalid base input");
 260   PhaseGVN& gvn = kit->gvn();
 261   for (uint i = 0; i < field_count(); i++) {
 262     ciType* ft = field_type(i);
 263     int offset = base_offset + field_offset(i);
 264     if (field_is_flattened(i)) {
 265       // Flattened value type field
 266       ValueTypeNode* vt = ValueTypeNode::make_uninitialized(gvn, ft->as_value_klass());
 267       uint base = base_input;
 268       vt->initialize(kit, multi, vk, offset - value_klass()->first_field_offset(), base, in);
 269       set_field_value(i, gvn.transform(vt));
 270     } else {
 271       int j = 0; int extra = 0;
 272       for (; j < vk->nof_nonstatic_fields(); j++) {
 273         ciField* f = vk->nonstatic_field_at(j);


 793     PhaseIterGVN* igvn = phase->is_IterGVN();
 794 
 795     if (is_default(*phase)) {
 796       // Search for users of the default value type
 797       for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) {
 798         Node* user = fast_out(i);
 799         AllocateNode* alloc = user->isa_Allocate();
 800         if (alloc != NULL && alloc->result_cast() != NULL && alloc->in(AllocateNode::ValueNode) == this) {
 801           // Found an allocation of the default value type.
 802           // If the code in StoreNode::Identity() that removes useless stores was not yet
 803           // executed or ReduceFieldZeroing is disabled, there can still be initializing
 804           // stores (only zero-type or default value stores, because value types are immutable).
 805           Node* res = alloc->result_cast();
 806           for (DUIterator_Fast jmax, j = res->fast_outs(jmax); j < jmax; j++) {
 807             AddPNode* addp = res->fast_out(j)->isa_AddP();
 808             if (addp != NULL) {
 809               for (DUIterator_Fast kmax, k = addp->fast_outs(kmax); k < kmax; k++) {
 810                 StoreNode* store = addp->fast_out(k)->isa_Store();
 811                 if (store != NULL && store->outcnt() != 0) {
 812                   // Remove the useless store
 813                   Node* mem = store->in(MemNode::Memory);
 814                   Node* val = store->in(MemNode::ValueIn);
 815                   val = val->is_EncodeP() ? val->in(1) : val;
 816                   const Type* val_type = igvn->type(val);
 817                   assert(val_type->is_zero_type() || (val->is_Con() && val_type->make_ptr()->is_valuetypeptr()),
 818                          "must be zero-type or default value store");
 819                   igvn->replace_in_uses(store, mem);
 820                 }
 821               }
 822             }
 823           }
 824           // Replace allocation by pre-allocated oop
 825           igvn->replace_node(res, default_oop(*phase, value_klass()));
 826         } else if (user->is_ValueType()) {
 827           // Add value type user to worklist to give it a chance to get optimized as well
 828           igvn->_worklist.push(user);
 829         }
 830       }
 831     }
 832 
 833     if (is_allocated(igvn)) {
 834       // Value type is heap allocated, search for safepoint uses
 835       for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) {
 836         Node* out = fast_out(i);
 837         if (out->is_SafePoint()) {
 838           // Let SafePointNode::Ideal() take care of re-wiring the
 839           // safepoint to the oop input instead of the value type node.




 178 
 179 ciType* ValueTypeBaseNode::field_type(uint index) const {
 180   assert(index < field_count(), "index out of bounds");
 181   return value_klass()->declared_nonstatic_field_at(index)->type();
 182 }
 183 
 184 bool ValueTypeBaseNode::field_is_flattened(uint index) const {
 185   assert(index < field_count(), "index out of bounds");
 186   ciField* field = value_klass()->declared_nonstatic_field_at(index);
 187   assert(!field->is_flattened() || field->type()->is_valuetype(), "must be a value type");
 188   return field->is_flattened();
 189 }
 190 
 191 bool ValueTypeBaseNode::field_is_flattenable(uint index) const {
 192   assert(index < field_count(), "index out of bounds");
 193   ciField* field = value_klass()->declared_nonstatic_field_at(index);
 194   assert(!field->is_flattenable() || field->type()->is_valuetype(), "must be a value type");
 195   return field->is_flattenable();
 196 }
 197 
 198 int ValueTypeBaseNode::make_scalar_in_safepoint(PhaseIterGVN* igvn, Unique_Node_List& worklist, SafePointNode* sfpt) {
 199   ciValueKlass* vk = value_klass();
 200   uint nfields = vk->nof_nonstatic_fields();
 201   JVMState* jvms = sfpt->jvms();
 202   int start = jvms->debug_start();
 203   int end   = jvms->debug_end();
 204   // Replace safepoint edge by SafePointScalarObjectNode and add field values
 205   assert(jvms != NULL, "missing JVMS");
 206   uint first_ind = (sfpt->req() - jvms->scloff());
 207   SafePointScalarObjectNode* sobj = new SafePointScalarObjectNode(value_ptr(),
 208 #ifdef ASSERT
 209                                                                   NULL,
 210 #endif
 211                                                                   first_ind, nfields);
 212   sobj->init_req(0, igvn->C->root());
 213   // Iterate over the value type fields in order of increasing
 214   // offset and add the field values to the safepoint.
 215   for (uint j = 0; j < nfields; ++j) {
 216     int offset = vk->nonstatic_field_at(j)->offset();
 217     Node* value = field_value_by_offset(offset, true /* include flattened value type fields */);
 218     if (value->is_ValueType()) {
 219       // Add value type field to the worklist to process later



 220       worklist.push(value);
 221     }

 222     sfpt->add_req(value);
 223   }
 224   jvms->set_endoff(sfpt->req());
 225   sobj = igvn->transform(sobj)->as_SafePointScalarObject();
 226   igvn->rehash_node_delayed(sfpt);


 227   return sfpt->replace_edges_in_range(this, sobj, start, end);
 228 }
 229 
 230 void ValueTypeBaseNode::make_scalar_in_safepoints(PhaseIterGVN* igvn) {
 231   // Process all safepoint uses and scalarize value type
 232   Unique_Node_List worklist;
 233   for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) {
 234     SafePointNode* sfpt = fast_out(i)->isa_SafePoint();
 235     if (sfpt != NULL && !sfpt->is_CallLeaf() && (!sfpt->is_Call() || sfpt->as_Call()->has_debug_use(this))) {
 236       int nb = 0;
 237       if (is_allocated(igvn) && get_oop()->is_Con()) {
 238         // Value type is allocated with a constant oop, link it directly
 239         nb = sfpt->replace_edges_in_range(this, get_oop(), sfpt->jvms()->debug_start(), sfpt->jvms()->debug_end());
 240         igvn->rehash_node_delayed(sfpt);
 241       } else {
 242         nb = make_scalar_in_safepoint(igvn, worklist, sfpt);
 243       }
 244       --i; imax -= nb;
 245     }
 246   }
 247   // Now scalarize non-flattened fields
 248   for (uint i = 0; i < worklist.size(); ++i) {
 249     Node* vt = worklist.at(i);
 250     vt->as_ValueType()->make_scalar_in_safepoints(igvn);
 251   }
 252 }
 253 
 254 void ValueTypeBaseNode::initialize(GraphKit* kit, MultiNode* multi, ciValueKlass* vk, int base_offset, uint& base_input, bool in) {
 255   assert(base_offset >= 0, "offset in value type must be positive");
 256   assert(base_input >= TypeFunc::Parms, "invalid base input");
 257   PhaseGVN& gvn = kit->gvn();
 258   for (uint i = 0; i < field_count(); i++) {
 259     ciType* ft = field_type(i);
 260     int offset = base_offset + field_offset(i);
 261     if (field_is_flattened(i)) {
 262       // Flattened value type field
 263       ValueTypeNode* vt = ValueTypeNode::make_uninitialized(gvn, ft->as_value_klass());
 264       uint base = base_input;
 265       vt->initialize(kit, multi, vk, offset - value_klass()->first_field_offset(), base, in);
 266       set_field_value(i, gvn.transform(vt));
 267     } else {
 268       int j = 0; int extra = 0;
 269       for (; j < vk->nof_nonstatic_fields(); j++) {
 270         ciField* f = vk->nonstatic_field_at(j);


 790     PhaseIterGVN* igvn = phase->is_IterGVN();
 791 
 792     if (is_default(*phase)) {
 793       // Search for users of the default value type
 794       for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) {
 795         Node* user = fast_out(i);
 796         AllocateNode* alloc = user->isa_Allocate();
 797         if (alloc != NULL && alloc->result_cast() != NULL && alloc->in(AllocateNode::ValueNode) == this) {
 798           // Found an allocation of the default value type.
 799           // If the code in StoreNode::Identity() that removes useless stores was not yet
 800           // executed or ReduceFieldZeroing is disabled, there can still be initializing
 801           // stores (only zero-type or default value stores, because value types are immutable).
 802           Node* res = alloc->result_cast();
 803           for (DUIterator_Fast jmax, j = res->fast_outs(jmax); j < jmax; j++) {
 804             AddPNode* addp = res->fast_out(j)->isa_AddP();
 805             if (addp != NULL) {
 806               for (DUIterator_Fast kmax, k = addp->fast_outs(kmax); k < kmax; k++) {
 807                 StoreNode* store = addp->fast_out(k)->isa_Store();
 808                 if (store != NULL && store->outcnt() != 0) {
 809                   // Remove the useless store
 810                   igvn->replace_in_uses(store, store->in(MemNode::Memory));






 811                 }
 812               }
 813             }
 814           }
 815           // Replace allocation by pre-allocated oop
 816           igvn->replace_node(res, default_oop(*phase, value_klass()));
 817         } else if (user->is_ValueType()) {
 818           // Add value type user to worklist to give it a chance to get optimized as well
 819           igvn->_worklist.push(user);
 820         }
 821       }
 822     }
 823 
 824     if (is_allocated(igvn)) {
 825       // Value type is heap allocated, search for safepoint uses
 826       for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) {
 827         Node* out = fast_out(i);
 828         if (out->is_SafePoint()) {
 829           // Let SafePointNode::Ideal() take care of re-wiring the
 830           // safepoint to the oop input instead of the value type node.


< prev index next >