1 /*
   2  * Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "ci/ciValueKlass.hpp"
  27 #include "opto/addnode.hpp"
  28 #include "opto/castnode.hpp"
  29 #include "opto/graphKit.hpp"
  30 #include "opto/rootnode.hpp"
  31 #include "opto/valuetypenode.hpp"
  32 #include "opto/phaseX.hpp"
  33 
  34 // Clones the values type to handle control flow merges involving multiple value types.
  35 // The inputs are replaced by PhiNodes to represent the merged values for the given region.
  36 ValueTypeBaseNode* ValueTypeBaseNode::clone_with_phis(PhaseGVN* gvn, Node* region) {
  37   assert(!has_phi_inputs(region), "already cloned with phis");
  38   ValueTypeBaseNode* vt = clone()->as_ValueTypeBase();
  39 
  40   // Create a PhiNode for merging the oop values
  41   const TypeValueTypePtr* vtptr = value_type_ptr();
  42   vtptr = vtptr->cast_to_ptr_type(TypePtr::BotPTR)->is_valuetypeptr();
  43   PhiNode* oop = PhiNode::make(region, vt->get_oop(), vtptr);
  44   gvn->set_type(oop, vtptr);
  45   vt->set_oop(oop);
  46 
  47   // Create a PhiNode each for merging the field values
  48   for (uint i = 0; i < vt->field_count(); ++i) {
  49     ciType* type = vt->field_type(i);
  50     Node*  value = vt->field_value(i);
  51     if (type->is_valuetype()) {
  52       // Handle flattened value type fields recursively
  53       value = value->as_ValueType()->clone_with_phis(gvn, region);
  54     } else {
  55       const Type* phi_type = Type::get_const_type(type);
  56       value = PhiNode::make(region, value, phi_type);
  57       gvn->set_type(value, phi_type);
  58     }
  59     vt->set_field_value(i, value);
  60   }
  61   gvn->set_type(vt, vt->bottom_type());
  62   return vt;
  63 }
  64 
  65 // Checks if the inputs of the ValueBaseTypeNode were replaced by PhiNodes
  66 // for the given region (see ValueBaseTypeNode::clone_with_phis).
  67 bool ValueTypeBaseNode::has_phi_inputs(Node* region) {
  68   // Check oop input
  69   bool result = get_oop()->is_Phi() && get_oop()->as_Phi()->region() == region;
  70 #ifdef ASSERT
  71   if (result) {
  72     // Check all field value inputs for consistency
  73     for (uint i = Oop; i < field_count(); ++i) {
  74       Node* n = in(i);
  75       if (n->is_ValueTypeBase()) {
  76         assert(n->as_ValueTypeBase()->has_phi_inputs(region), "inconsistent phi inputs");
  77       } else {
  78         assert(n->is_Phi() && n->as_Phi()->region() == region, "inconsistent phi inputs");
  79       }
  80     }
  81   }
  82 #endif
  83   return result;
  84 }
  85 
  86 // Merges 'this' with 'other' by updating the input PhiNodes added by 'clone_with_phis'
  87 ValueTypeBaseNode* ValueTypeBaseNode::merge_with(PhaseGVN* gvn, const ValueTypeBaseNode* other, int pnum, bool transform) {
  88   // Merge oop inputs
  89   PhiNode* phi = get_oop()->as_Phi();
  90   phi->set_req(pnum, other->get_oop());
  91   if (transform) {
  92     set_oop(gvn->transform(phi));
  93     gvn->record_for_igvn(phi);
  94   }
  95   // Merge field values
  96   for (uint i = 0; i < field_count(); ++i) {
  97     Node* val1 =        field_value(i);
  98     Node* val2 = other->field_value(i);
  99     if (val1->isa_ValueType()) {
 100       val1->as_ValueType()->merge_with(gvn, val2->as_ValueType(), pnum, transform);
 101     } else {
 102       assert(val1->is_Phi(), "must be a phi node");
 103       assert(!val2->is_ValueType(), "inconsistent merge values");
 104       val1->set_req(pnum, val2);
 105     }
 106     if (transform) {
 107       set_field_value(i, gvn->transform(val1));
 108       gvn->record_for_igvn(val1);
 109     }
 110   }
 111   return this;
 112 }
 113 
 114 Node* ValueTypeBaseNode::field_value(uint index) const {
 115   assert(index < field_count(), "index out of bounds");
 116   return in(Values + index);
 117 }
 118 
 119 // Get the value of the field at the given offset.
 120 // If 'recursive' is true, flattened value type fields will be resolved recursively.
 121 Node* ValueTypeBaseNode::field_value_by_offset(int offset, bool recursive) const {
 122   // If the field at 'offset' belongs to a flattened value type field, 'index' refers to the
 123   // corresponding ValueTypeNode input and 'sub_offset' is the offset in flattened value type.
 124   int index = value_klass()->field_index_by_offset(offset);
 125   int sub_offset = offset - field_offset(index);
 126   Node* value = field_value(index);
 127   assert(value != NULL, "field value not found");
 128   if (recursive && value->is_ValueType()) {
 129     ValueTypeNode* vt = value->as_ValueType();
 130     if (field_is_flattened(index)) {
 131       // Flattened value type field
 132       sub_offset += vt->value_klass()->first_field_offset(); // Add header size
 133       return vt->field_value_by_offset(sub_offset, recursive);
 134     } else {
 135       assert(sub_offset == 0, "should not have a sub offset");
 136       return vt;
 137     }
 138   }
 139   assert(!(recursive && value->is_ValueType()), "should not be a value type");
 140   assert(sub_offset == 0, "offset mismatch");
 141   return value;
 142 }
 143 
 144 void ValueTypeBaseNode::set_field_value(uint index, Node* value) {
 145   assert(index < field_count(), "index out of bounds");
 146   set_req(Values + index, value);
 147 }
 148 
 149 int ValueTypeBaseNode::field_offset(uint index) const {
 150   assert(index < field_count(), "index out of bounds");
 151   return value_klass()->declared_nonstatic_field_at(index)->offset();
 152 }
 153 
 154 ciType* ValueTypeBaseNode::field_type(uint index) const {
 155   assert(index < field_count(), "index out of bounds");
 156   return value_klass()->declared_nonstatic_field_at(index)->type();
 157 }
 158 
 159 bool ValueTypeBaseNode::field_is_flattened(uint index) const {
 160   assert(index < field_count(), "index out of bounds");
 161   return value_klass()->declared_nonstatic_field_at(index)->is_flattened();
 162 }
 163 
 164 int ValueTypeBaseNode::make_scalar_in_safepoint(Unique_Node_List& worklist, SafePointNode* sfpt, Node* root, PhaseGVN* gvn) {
 165   ciValueKlass* vk = value_klass();
 166   uint nfields = vk->nof_nonstatic_fields();
 167   JVMState* jvms = sfpt->jvms();
 168   int start = jvms->debug_start();
 169   int end   = jvms->debug_end();
 170   // Replace safepoint edge by SafePointScalarObjectNode and add field values
 171   assert(jvms != NULL, "missing JVMS");
 172   uint first_ind = (sfpt->req() - jvms->scloff());
 173   const TypeValueTypePtr* res_type = value_type_ptr();
 174   SafePointScalarObjectNode* sobj = new SafePointScalarObjectNode(res_type,
 175 #ifdef ASSERT
 176                                                                   NULL,
 177 #endif
 178                                                                   first_ind, nfields);
 179   sobj->init_req(0, root);
 180   // Iterate over the value type fields in order of increasing
 181   // offset and add the field values to the safepoint.
 182   for (uint j = 0; j < nfields; ++j) {
 183     int offset = vk->nonstatic_field_at(j)->offset();
 184     Node* value = field_value_by_offset(offset, true /* include flattened value type fields */);
 185     if (value->is_ValueType()) {
 186       if (value->as_ValueType()->is_allocated(gvn)) {
 187         value = value->as_ValueType()->get_oop();
 188       } else {
 189         // Add non-flattened value type field to the worklist to process later
 190         worklist.push(value);
 191       }
 192     }
 193     sfpt->add_req(value);
 194   }
 195   jvms->set_endoff(sfpt->req());
 196   if (gvn != NULL) {
 197     sobj = gvn->transform(sobj)->as_SafePointScalarObject();
 198     gvn->igvn_rehash_node_delayed(sfpt);
 199   }
 200   return sfpt->replace_edges_in_range(this, sobj, start, end);
 201 }
 202 
 203 void ValueTypeBaseNode::make_scalar_in_safepoints(Node* root, PhaseGVN* gvn) {
 204   Unique_Node_List worklist;
 205   for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) {
 206     Node* u = fast_out(i);
 207     if (u->is_SafePoint() && (!u->is_Call() || u->as_Call()->has_debug_use(this))) {
 208       SafePointNode* sfpt = u->as_SafePoint();
 209       Node* in_oop = get_oop();
 210       const Type* oop_type = in_oop->bottom_type();
 211       assert(Opcode() == Op_ValueTypePtr || !isa_ValueType()->is_allocated(gvn), "already heap allocated value types should be linked directly");
 212       int nb = make_scalar_in_safepoint(worklist, sfpt, root, gvn);
 213       --i; imax -= nb;
 214     }
 215   }
 216 
 217   for (uint next = 0; next < worklist.size(); ++next) {
 218     Node* vt = worklist.at(next);
 219     vt->as_ValueType()->make_scalar_in_safepoints(root, gvn);
 220   }
 221 }
 222 
 223 void ValueTypeBaseNode::make(PhaseGVN* gvn, Node*& ctl, Node* mem, Node* n, ValueTypeBaseNode* vt, ciValueKlass* base_vk, int base_offset, int base_input, bool in) {
 224   assert(base_offset >= 0, "offset in value type always positive");
 225   for (uint i = 0; i < vt->field_count(); i++) {
 226     ciType* field_type = vt->field_type(i);
 227     int offset = base_offset + vt->field_offset(i);
 228     if (field_type->is_valuetype() && vt->field_is_flattened(i)) {
 229       ciValueKlass* embedded_vk = field_type->as_value_klass();
 230       ValueTypeNode* embedded_vt = ValueTypeNode::make(*gvn, embedded_vk);
 231       ValueTypeBaseNode::make(gvn, ctl, mem, n, embedded_vt, base_vk, offset - vt->value_klass()->first_field_offset(), base_input, in);
 232       vt->set_field_value(i, gvn->transform(embedded_vt));
 233     } else {
 234       int j = 0; int extra = 0;
 235       for (; j < base_vk->nof_nonstatic_fields(); j++) {
 236         ciField* f = base_vk->nonstatic_field_at(j);
 237         if (offset == f->offset()) {
 238           assert(f->type() == field_type, "inconsistent field type");
 239           break;
 240         }
 241         BasicType bt = f->type()->basic_type();
 242         if (bt == T_LONG || bt == T_DOUBLE) {
 243           extra++;
 244         }
 245       }
 246       assert(j != base_vk->nof_nonstatic_fields(), "must find");
 247       Node* parm = NULL;
 248       if (n->is_Start()) {
 249         assert(in, "return from start?");
 250         parm = gvn->transform(new ParmNode(n->as_Start(), base_input + j + extra));
 251       } else {
 252         if (in) {
 253           assert(n->is_Call(), "nothing else here");
 254           parm = n->in(base_input + j + extra);
 255         } else {
 256           parm = gvn->transform(new ProjNode(n->as_Call(), base_input + j + extra));
 257         }
 258       }
 259       if (field_type->is_valuetype()) {
 260         // Non-flattened value type field, check for null
 261         parm = ValueTypeNode::make(*gvn, ctl, mem, parm, /* null_check */ true);
 262 
 263       }
 264       vt->set_field_value(i, parm);
 265       // Record all these guys for later GVN.
 266       gvn->record_for_igvn(parm);
 267     }
 268   }
 269 }
 270 
 271 void ValueTypeBaseNode::load(PhaseGVN& gvn, Node*& ctl, Node* mem, Node* base, Node* ptr, ciInstanceKlass* holder, int holder_offset) {
 272   // Initialize the value type by loading its field values from
 273   // memory and adding the values as input edges to the node.
 274   for (uint i = 0; i < field_count(); ++i) {
 275     int offset = holder_offset + field_offset(i);
 276     ciType* ftype = field_type(i);
 277     Node* value = NULL;
 278     if (ftype->is_valuetype() && field_is_flattened(i)) {
 279       // Recursively load the flattened value type field
 280       value = ValueTypeNode::make(gvn, ftype->as_value_klass(), ctl, mem, base, ptr, holder, offset);
 281     } else {
 282       const Type* con_type = NULL;
 283       if (base->is_Con()) {
 284         // If the oop to the value type is constant (static final field), we can
 285         // also treat the fields as constants because the value type is immutable.
 286         const TypeOopPtr* oop_ptr = base->bottom_type()->isa_oopptr();
 287         ciObject* constant_oop = oop_ptr->const_oop();
 288         ciField* field = holder->get_field_by_offset(offset, false);
 289         ciConstant constant = constant_oop->as_instance()->field_value(field);
 290         con_type = Type::make_from_constant(constant, /*require_const=*/ true);
 291       }
 292       if (con_type != NULL) {
 293         // Found a constant field value
 294         value = gvn.transform(gvn.makecon(con_type));
 295         if (con_type->isa_valuetypeptr()) {
 296           // Constant, non-flattened value type field
 297           value = ValueTypeNode::make(gvn, ctl, mem, value);
 298         }
 299       } else {
 300         // Load field value from memory
 301         const Type* base_type = gvn.type(base);
 302         const TypePtr* adr_type = NULL;
 303         if (base_type->isa_aryptr()) {
 304           // In the case of a flattened value type array, each field has its own slice
 305           adr_type = base_type->is_aryptr()->with_field_offset(offset)->add_offset(Type::OffsetBot);
 306         } else {
 307           ciField* field = holder->get_field_by_offset(offset, false);
 308           adr_type = gvn.C->alias_type(field)->adr_type();
 309         }
 310         Node* adr = gvn.transform(new AddPNode(base, ptr, gvn.MakeConX(offset)));
 311         BasicType bt = type2field[ftype->basic_type()];
 312         const Type* ft = Type::get_const_type(ftype);
 313         if (bt == T_VALUETYPE) {
 314           ft = ft->is_valuetypeptr()->cast_to_ptr_type(TypePtr::BotPTR);
 315         }
 316         assert(is_java_primitive(bt) || adr->bottom_type()->is_ptr_to_narrowoop() == UseCompressedOops, "inconsistent");
 317         value = gvn.transform(LoadNode::make(gvn, NULL, mem, adr, adr_type, ft, bt, MemNode::unordered));
 318         if (bt == T_VALUETYPE) {
 319           // Non-flattened value type field, check for null
 320           value = ValueTypeNode::make(gvn, ctl, mem, value, /* null_check */ true);
 321         }
 322       }
 323     }
 324     set_field_value(i, value);
 325   }
 326 }
 327 
 328 void ValueTypeBaseNode::store_flattened(GraphKit* kit, Node* base, Node* ptr, ciInstanceKlass* holder, int holder_offset) const {
 329   // The value type is embedded into the object without an oop header. Subtract the
 330   // offset of the first field to account for the missing header when storing the values.
 331   holder_offset -= value_klass()->first_field_offset();
 332   store(kit, base, ptr, holder, holder_offset);
 333 }
 334 
 335 void ValueTypeBaseNode::store(GraphKit* kit, Node* base, Node* ptr, ciInstanceKlass* holder, int holder_offset) const {
 336   if (holder == NULL) {
 337     holder = value_klass();
 338   }
 339   // Write field values to memory
 340   for (uint i = 0; i < field_count(); ++i) {
 341     int offset = holder_offset + field_offset(i);
 342     Node* value = field_value(i);
 343     if (value->is_ValueType() && field_is_flattened(i)) {
 344       // Recursively store the flattened value type field
 345       value->isa_ValueType()->store_flattened(kit, base, ptr, holder, offset);
 346     } else {
 347       const Type* base_type = kit->gvn().type(base);
 348       const TypePtr* adr_type = NULL;
 349       if (base_type->isa_aryptr()) {
 350         // In the case of a flattened value type array, each field has its own slice
 351         adr_type = base_type->is_aryptr()->with_field_offset(offset)->add_offset(Type::OffsetBot);
 352       } else {
 353         ciField* field = holder->get_field_by_offset(offset, false);
 354         adr_type = kit->C->alias_type(field)->adr_type();
 355       }
 356       Node* adr = kit->basic_plus_adr(base, ptr, offset);
 357       BasicType bt = type2field[field_type(i)->basic_type()];
 358       if (is_java_primitive(bt)) {
 359         kit->store_to_memory(kit->control(), adr, value, bt, adr_type, MemNode::unordered);
 360       } else {
 361         const TypeOopPtr* ft = TypeOopPtr::make_from_klass(field_type(i)->as_klass());
 362         // Field may be NULL
 363         ft = ft->cast_to_ptr_type(TypePtr::BotPTR)->is_oopptr();
 364         assert(adr->bottom_type()->is_ptr_to_narrowoop() == UseCompressedOops, "inconsistent");
 365         bool is_array = base_type->isa_aryptr() != NULL;
 366         kit->store_oop(kit->control(), base, adr, adr_type, value, ft, bt, is_array, MemNode::unordered);
 367       }
 368     }
 369   }
 370 }
 371 
 372 ValueTypeBaseNode* ValueTypeBaseNode::allocate(GraphKit* kit) {
 373   Node* in_oop = get_oop();
 374   Node* null_ctl = kit->top();
 375   // Check if value type is already allocated
 376   Node* not_null_oop = kit->null_check_oop(in_oop, &null_ctl);
 377   if (null_ctl->is_top()) {
 378     // Value type is allocated
 379     return this;
 380   }
 381   // Not able to prove that value type is allocated.
 382   // Emit runtime check that may be folded later.
 383   assert(!is_allocated(&kit->gvn()), "should not be allocated");
 384   const TypeValueTypePtr* vtptr_type = bottom_type()->isa_valuetypeptr();
 385   if (vtptr_type == NULL) {
 386     vtptr_type = TypeValueTypePtr::make(bottom_type()->isa_valuetype(), TypePtr::NotNull);
 387   }
 388   RegionNode* region = new RegionNode(3);
 389   PhiNode* oop = new PhiNode(region, vtptr_type);
 390   PhiNode* io  = new PhiNode(region, Type::ABIO);
 391   PhiNode* mem = new PhiNode(region, Type::MEMORY, TypePtr::BOTTOM);
 392 
 393   // Oop is non-NULL, use it
 394   region->init_req(1, kit->control());
 395   oop   ->init_req(1, not_null_oop);
 396   io    ->init_req(1, kit->i_o());
 397   mem   ->init_req(1, kit->merged_memory());
 398 
 399   // Oop is NULL, allocate value type
 400   kit->set_control(null_ctl);
 401   kit->kill_dead_locals();
 402   ciValueKlass* vk = value_klass();
 403   Node* klass_node = kit->makecon(TypeKlassPtr::make(vk));
 404   Node* alloc_oop  = kit->new_instance(klass_node, NULL, NULL, false, this);
 405   // Write field values to memory
 406   store(kit, alloc_oop, alloc_oop, vk);
 407   region->init_req(2, kit->control());
 408   oop   ->init_req(2, alloc_oop);
 409   io    ->init_req(2, kit->i_o());
 410   mem   ->init_req(2, kit->merged_memory());
 411 
 412   // Update GraphKit
 413   kit->set_control(kit->gvn().transform(region));
 414   kit->set_i_o(kit->gvn().transform(io));
 415   kit->set_all_memory(kit->gvn().transform(mem));
 416   kit->record_for_igvn(region);
 417   kit->record_for_igvn(oop);
 418   kit->record_for_igvn(io);
 419   kit->record_for_igvn(mem);
 420 
 421   // Use cloned ValueTypeNode to propagate oop from now on
 422   Node* res_oop = kit->gvn().transform(oop);
 423   ValueTypeBaseNode* vt = clone()->as_ValueTypeBase();
 424   vt->set_oop(res_oop);
 425   vt = kit->gvn().transform(vt)->as_ValueTypeBase();
 426   kit->replace_in_map(this, vt);
 427   return vt;
 428 }
 429 
 430 bool ValueTypeBaseNode::is_allocated(PhaseGVN* phase) const {
 431   Node* oop = get_oop();
 432   const Type* oop_type = (phase != NULL) ? phase->type(oop) : oop->bottom_type();
 433   return oop_type->meet(TypePtr::NULL_PTR) != oop_type;
 434 }
 435 
 436 // When a call returns multiple values, it has several result
 437 // projections, one per field. Replacing the result of the call by a
 438 // value type node (after late inlining) requires that for each result
 439 // projection, we find the corresponding value type field.
 440 void ValueTypeBaseNode::replace_call_results(GraphKit* kit, Node* call, Compile* C) {
 441   ciValueKlass* vk = value_klass();
 442   for (DUIterator_Fast imax, i = call->fast_outs(imax); i < imax; i++) {
 443     ProjNode* pn = call->fast_out(i)->as_Proj();
 444     uint con = pn->_con;
 445     if (con >= TypeFunc::Parms+1) {
 446       uint field_nb = con - (TypeFunc::Parms+1);
 447       int extra = 0;
 448       for (uint j = 0; j < field_nb - extra; j++) {
 449         ciField* f = vk->nonstatic_field_at(j);
 450         BasicType bt = f->type()->basic_type();
 451         if (bt == T_LONG || bt == T_DOUBLE) {
 452           extra++;
 453         }
 454       }
 455       ciField* f = vk->nonstatic_field_at(field_nb - extra);
 456       Node* field = field_value_by_offset(f->offset(), true);
 457       if (field->is_ValueType()) {
 458         assert(f->is_flattened(), "should be flattened");
 459         field = field->as_ValueType()->allocate(kit)->get_oop();
 460       }
 461       C->gvn_replace_by(pn, field);
 462       C->initial_gvn()->hash_delete(pn);
 463       pn->set_req(0, C->top());
 464       --i; --imax;
 465     }
 466   }
 467 }
 468 
 469 ValueTypeNode* ValueTypeNode::make(PhaseGVN& gvn, ciValueKlass* klass) {
 470   // Create a new ValueTypeNode with uninitialized values and NULL oop
 471   const TypeValueType* type = TypeValueType::make(klass);
 472   return new ValueTypeNode(type, gvn.zerocon(T_VALUETYPE));
 473 }
 474 
 475 Node* ValueTypeNode::make_default(PhaseGVN& gvn, ciValueKlass* vk) {
 476   // TODO re-use constant oop of pre-allocated default value type here?
 477   // Create a new ValueTypeNode with default values
 478   ValueTypeNode* vt = ValueTypeNode::make(gvn, vk);
 479   for (uint i = 0; i < vt->field_count(); ++i) {
 480     ciType* field_type = vt->field_type(i);
 481     Node* value = NULL;
 482     if (field_type->is_valuetype()) {
 483       value = ValueTypeNode::make_default(gvn, field_type->as_value_klass());
 484     } else {
 485       value = gvn.zerocon(field_type->basic_type());
 486     }
 487     vt->set_field_value(i, value);
 488   }
 489   return gvn.transform(vt);
 490 }
 491 
 492 Node* ValueTypeNode::make(PhaseGVN& gvn, Node*& ctl, Node* mem, Node* oop, bool null_check) {
 493   // Create and initialize a ValueTypeNode by loading all field
 494   // values from a heap-allocated version and also save the oop.
 495   const TypeValueType* type = gvn.type(oop)->is_valuetypeptr()->value_type();
 496   ValueTypeNode* vt = new ValueTypeNode(type, oop);
 497 
 498   if (null_check && !vt->is_allocated(&gvn)) {
 499     // Add oop null check
 500     Node* chk = gvn.transform(new CmpPNode(oop, gvn.zerocon(T_VALUETYPE)));
 501     Node* tst = gvn.transform(new BoolNode(chk, BoolTest::ne));
 502     IfNode* iff = gvn.transform(new IfNode(ctl, tst, PROB_MAX, COUNT_UNKNOWN))->as_If();
 503     Node* not_null = gvn.transform(new IfTrueNode(iff));
 504     Node* null = gvn.transform(new IfFalseNode(iff));
 505     Node* region = new RegionNode(3);
 506 
 507     // Load value type from memory if oop is non-null
 508     oop = new CastPPNode(oop, TypePtr::NOTNULL);
 509     oop->set_req(0, not_null);
 510     oop = gvn.transform(oop);
 511     vt->load(gvn, not_null, mem, oop, oop, type->value_klass());
 512     region->init_req(1, not_null);
 513 
 514     // Use default value type if oop is null
 515     Node* def = make_default(gvn, type->value_klass());
 516     region->init_req(2, null);
 517 
 518     // Merge the two value types and update control
 519     vt = vt->clone_with_phis(&gvn, region)->as_ValueType();
 520     vt->merge_with(&gvn, def->as_ValueType(), 2, true);
 521     ctl = gvn.transform(region);
 522   } else {
 523     Node* init_ctl = ctl;
 524     vt->load(gvn, ctl, mem, oop, oop, type->value_klass());
 525     vt = gvn.transform(vt)->as_ValueType();
 526     assert(vt->is_allocated(&gvn), "value type should be allocated");
 527     assert(init_ctl != ctl || oop->is_Con() || oop->is_CheckCastPP() || oop->Opcode() == Op_ValueTypePtr ||
 528            vt->is_loaded(&gvn, type) == oop, "value type should be loaded");
 529   }
 530   return vt;
 531 }
 532 
 533 Node* ValueTypeNode::make(GraphKit* kit, Node* oop, bool null_check) {
 534   Node* ctl = kit->control();
 535   Node* vt = make(kit->gvn(), ctl, kit->merged_memory(), oop, null_check);
 536   kit->set_control(ctl);
 537   return vt;
 538 }
 539 
 540 Node* ValueTypeNode::make(PhaseGVN& gvn, ciValueKlass* vk, Node*& ctl, Node* mem, Node* obj, Node* ptr, ciInstanceKlass* holder, int holder_offset) {
 541   // Create and initialize a ValueTypeNode by loading all field values from
 542   // a flattened value type field at 'holder_offset' or from a value type array.
 543   ValueTypeNode* vt = make(gvn, vk);
 544   // The value type is flattened into the object without an oop header. Subtract the
 545   // offset of the first field to account for the missing header when loading the values.
 546   holder_offset -= vk->first_field_offset();
 547   vt->load(gvn, ctl, mem, obj, ptr, holder, holder_offset);
 548   assert(vt->is_loaded(&gvn, vt->type()->isa_valuetype()) != obj, "holder oop should not be used as flattened value type oop");
 549   return gvn.transform(vt)->as_ValueType();
 550 }
 551 
 552 Node* ValueTypeNode::make(GraphKit* kit, ciValueKlass* vk, Node* obj, Node* ptr, ciInstanceKlass* holder, int holder_offset) {
 553   Node* ctl = kit->control();
 554   Node* vt = make(kit->gvn(), vk, ctl, kit->merged_memory(), obj, ptr, holder, holder_offset);
 555   kit->set_control(ctl);
 556   return vt;
 557 }
 558 
 559 Node* ValueTypeNode::make(PhaseGVN& gvn, Node*& ctl, Node* mem, Node* n, ciValueKlass* vk, int base_input, bool in) {
 560   ValueTypeNode* vt = ValueTypeNode::make(gvn, vk);
 561   ValueTypeBaseNode::make(&gvn, ctl, mem, n, vt, vk, 0, base_input, in);
 562   return gvn.transform(vt);
 563 }
 564 
 565 Node* ValueTypeNode::is_loaded(PhaseGVN* phase, const TypeValueType* t, Node* base, int holder_offset) {
 566   if (field_count() == 0) {
 567     assert(t->value_klass()->is__Value(), "unexpected value type klass");
 568     assert(is_allocated(phase), "must be allocated");
 569     return get_oop();
 570   }
 571   for (uint i = 0; i < field_count(); ++i) {
 572     int offset = holder_offset + field_offset(i);
 573     Node* value = field_value(i);
 574     if (value->isa_DecodeN()) {
 575       // Skip DecodeN
 576       value = value->in(1);
 577     }
 578     if (value->isa_Load()) {
 579       // Check if base and offset of field load matches value type layout
 580       intptr_t loffset = 0;
 581       Node* lbase = AddPNode::Ideal_base_and_offset(value->in(MemNode::Address), phase, loffset);
 582       if (lbase == NULL || (lbase != base && base != NULL) || loffset != offset) {
 583         return NULL;
 584       } else if (base == NULL) {
 585         // Set base and check if pointer type matches
 586         base = lbase;
 587         const TypeValueTypePtr* vtptr = phase->type(base)->isa_valuetypeptr();
 588         if (vtptr == NULL || !vtptr->value_type()->eq(t)) {
 589           return NULL;
 590         }
 591       }
 592     } else if (value->isa_ValueType()) {
 593       // Check value type field load recursively
 594       ValueTypeNode* vt = value->as_ValueType();
 595       base = vt->is_loaded(phase, t, base, offset - vt->value_klass()->first_field_offset());
 596       if (base == NULL) {
 597         return NULL;
 598       }
 599     } else {
 600       return NULL;
 601     }
 602   }
 603   return base;
 604 }
 605 
 606 Node* ValueTypeNode::allocate_fields(GraphKit* kit) {
 607   ValueTypeNode* vt = clone()->as_ValueType();
 608   for (uint i = 0; i < field_count(); i++) {
 609     Node* value = field_value(i);
 610     if (value->is_ValueType()) {
 611       if (field_is_flattened(i)) {
 612         value = value->as_ValueType()->allocate_fields(kit);
 613       } else {
 614         // Non-flattened value type field
 615         value = value->as_ValueType()->allocate(kit);
 616       }
 617       vt->set_field_value(i, value);
 618     }
 619   }
 620   vt = kit->gvn().transform(vt)->as_ValueType();
 621   kit->replace_in_map(this, vt);
 622   return vt;
 623 }
 624 
 625 Node* ValueTypeNode::tagged_klass(PhaseGVN& gvn) {
 626   ciValueKlass* vk = value_klass();
 627   const TypeKlassPtr* tk = TypeKlassPtr::make(vk);
 628   intptr_t bits = tk->get_con();
 629   set_nth_bit(bits, 0);
 630   return gvn.makecon(TypeRawPtr::make((address)bits));
 631 }
 632 
 633 void ValueTypeNode::pass_klass(Node* n, uint pos, const GraphKit& kit) {
 634   n->init_req(pos, tagged_klass(kit.gvn()));
 635 }
 636 
 637 uint ValueTypeNode::pass_fields(Node* n, int base_input, GraphKit& kit, bool assert_allocated, ciValueKlass* base_vk, int base_offset) {
 638   ciValueKlass* vk = value_klass();
 639   if (base_vk == NULL) {
 640     base_vk = vk;
 641   }
 642   uint edges = 0;
 643   for (uint i = 0; i < field_count(); i++) {
 644     ciType* f_type = field_type(i);
 645     int offset = base_offset + field_offset(i) - (base_offset > 0 ? vk->first_field_offset() : 0);
 646     Node* arg = field_value(i);
 647     if (f_type->is_valuetype() && field_is_flattened(i)) {
 648       ciValueKlass* embedded_vk = f_type->as_value_klass();
 649       edges += arg->as_ValueType()->pass_fields(n, base_input, kit, assert_allocated, base_vk, offset);
 650     } else {
 651       int j = 0; int extra = 0;
 652       for (; j < base_vk->nof_nonstatic_fields(); j++) {
 653         ciField* f = base_vk->nonstatic_field_at(j);
 654         if (offset == f->offset()) {
 655           assert(f->type() == f_type, "inconsistent field type");
 656           break;
 657         }
 658         BasicType bt = f->type()->basic_type();
 659         if (bt == T_LONG || bt == T_DOUBLE) {
 660           extra++;
 661         }
 662       }
 663       if (arg->is_ValueType()) {
 664         // non-flattened value type field
 665         ValueTypeNode* vt = arg->as_ValueType();
 666         assert(!assert_allocated || vt->is_allocated(&kit.gvn()), "value type field should be allocated");
 667         arg = vt->allocate(&kit)->get_oop();
 668       }
 669       n->init_req(base_input + j + extra, arg);
 670       edges++;
 671       BasicType bt = f_type->basic_type();
 672       if (bt == T_LONG || bt == T_DOUBLE) {
 673         n->init_req(base_input + j + extra + 1, kit.top());
 674         edges++;
 675       }
 676     }
 677   }
 678   return edges;
 679 }
 680 
 681 Node* ValueTypeNode::Ideal(PhaseGVN* phase, bool can_reshape) {
 682   if (!is_allocated(phase)) {
 683     // Check if this value type is loaded from memory
 684     Node* base = is_loaded(phase, type()->is_valuetype());
 685     if (base != NULL) {
 686       // Save the oop
 687       set_oop(base);
 688       assert(is_allocated(phase), "should now be allocated");
 689       return this;
 690     }
 691   }
 692 
 693   if (can_reshape) {
 694     PhaseIterGVN* igvn = phase->is_IterGVN();
 695     if (is_allocated(igvn)) {
 696       // Value type is heap allocated, search for safepoint uses
 697       for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) {
 698         Node* out = fast_out(i);
 699         if (out->is_SafePoint()) {
 700           // Let SafePointNode::Ideal() take care of re-wiring the
 701           // safepoint to the oop input instead of the value type node.
 702           igvn->rehash_node_delayed(out);
 703         }
 704       }
 705     }
 706   }
 707   return NULL;
 708 }
 709 
 710 // Search for multiple allocations of this value type
 711 // and try to replace them by dominating allocations.
 712 void ValueTypeNode::remove_redundant_allocations(PhaseIterGVN* igvn, PhaseIdealLoop* phase) {
 713   assert(EliminateAllocations, "allocation elimination should be enabled");
 714   Node_List dead_allocations;
 715   // Search for allocations of this value type
 716   for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) {
 717     AllocateNode* alloc = fast_out(i)->isa_Allocate();
 718     if (alloc != NULL && alloc->result_cast() != NULL && alloc->in(AllocateNode::ValueNode) == this) {
 719       Node* res_dom = NULL;
 720       if (is_allocated(igvn)) {
 721         // The value type is already allocated but still connected to an AllocateNode.
 722         // This can happen with late inlining when we first allocate a value type argument
 723         // but later decide to inline the call with the callee code also allocating.
 724         res_dom = get_oop();
 725       } else {
 726         // Search for a dominating allocation of the same value type
 727         for (DUIterator_Fast jmax, j = fast_outs(jmax); j < jmax; j++) {
 728           Node* out2 = fast_out(j);
 729           if (alloc != out2 && out2->is_Allocate() && out2->in(AllocateNode::ValueNode) == this &&
 730               phase->is_dominator(out2, alloc)) {
 731             AllocateNode* alloc_dom =  out2->as_Allocate();
 732             assert(alloc->in(AllocateNode::KlassNode) == alloc_dom->in(AllocateNode::KlassNode), "klasses should match");
 733             res_dom = alloc_dom->result_cast();
 734             break;
 735           }
 736         }
 737       }
 738       if (res_dom != NULL) {
 739         // Move users to dominating allocation
 740         Node* res = alloc->result_cast();
 741         igvn->replace_node(res, res_dom);
 742         // The dominated allocation is now dead, remove the
 743         // value type node connection and adjust the iterator.
 744         dead_allocations.push(alloc);
 745         igvn->replace_input_of(alloc, AllocateNode::ValueNode, NULL);
 746         --i; --imax;
 747 #ifdef ASSERT
 748         if (PrintEliminateAllocations) {
 749           tty->print("++++ Eliminated: %d Allocate ", alloc->_idx);
 750           dump_spec(tty);
 751           tty->cr();
 752         }
 753 #endif
 754       }
 755     }
 756   }
 757 
 758   // Remove dead value type allocations by replacing the projection nodes
 759   for (uint i = 0; i < dead_allocations.size(); ++i) {
 760     CallProjections projs;
 761     AllocateNode* alloc = dead_allocations.at(i)->as_Allocate();
 762     alloc->extract_projections(&projs, true);
 763     // Use lazy_replace to avoid corrupting the dominator tree of PhaseIdealLoop
 764     phase->lazy_replace(projs.fallthrough_catchproj, alloc->in(TypeFunc::Control));
 765     phase->lazy_replace(projs.fallthrough_memproj, alloc->in(TypeFunc::Memory));
 766     phase->lazy_replace(projs.catchall_memproj, phase->C->top());
 767     phase->lazy_replace(projs.fallthrough_ioproj, alloc->in(TypeFunc::I_O));
 768     phase->lazy_replace(projs.catchall_ioproj, phase->C->top());
 769     phase->lazy_replace(projs.catchall_catchproj, phase->C->top());
 770     phase->lazy_replace(projs.resproj, phase->C->top());
 771   }
 772 }
 773 
 774 #ifndef PRODUCT
 775 
 776 void ValueTypeNode::dump_spec(outputStream* st) const {
 777   TypeNode::dump_spec(st);
 778 }
 779 
 780 #endif
 781 
 782 ValueTypePtrNode* ValueTypePtrNode::make(GraphKit* kit, ciValueKlass* vk, CallNode* call) {
 783   ValueTypePtrNode* vt = new ValueTypePtrNode(vk, kit->zerocon(T_VALUETYPE), kit->C);
 784   Node* ctl = kit->control();
 785   ValueTypeBaseNode::make(&kit->gvn(), ctl, kit->merged_memory(), call, vt, vk, 0, TypeFunc::Parms+1, false);
 786   kit->set_control(ctl);
 787   return vt;
 788 }
 789 
 790 ValueTypePtrNode* ValueTypePtrNode::make(PhaseGVN& gvn, Node*& ctl, Node* mem, Node* oop) {
 791   // Create and initialize a ValueTypePtrNode by loading all field
 792   // values from a heap-allocated version and also save the oop.
 793   ciValueKlass* vk = gvn.type(oop)->is_valuetypeptr()->value_type()->value_klass();
 794   ValueTypePtrNode* vtptr = new ValueTypePtrNode(vk, oop, gvn.C);
 795   vtptr->load(gvn, ctl, mem, oop, oop, vk);
 796   return vtptr;
 797 }