< prev index next >

src/hotspot/share/opto/parse2.cpp

Print this page




  64   // Handle value type arrays
  65   const TypeOopPtr* elemptr = elemtype->make_oopptr();
  66   const TypeAryPtr* ary_t = _gvn.type(ary)->is_aryptr();
  67   if (elemtype->isa_valuetype() != NULL) {
  68     C->set_flattened_accesses();
  69     // Load from flattened value type array
  70     Node* vt = ValueTypeNode::make_from_flattened(this, elemtype->value_klass(), ary, adr);
  71     push(vt);
  72     return;
  73   } else if (elemptr != NULL && elemptr->is_valuetypeptr() && !elemptr->maybe_null()) {
  74     // Load from non-flattened but flattenable value type array (elements can never be null)
  75     bt = T_VALUETYPE;
  76   } else if (!ary_t->is_not_flat()) {
  77     // Cannot statically determine if array is flattened, emit runtime check
  78     assert(ValueArrayFlatten && elemptr->can_be_value_type() && !ary_t->klass_is_exact() && !ary_t->is_not_null_free() &&
  79            (!elemptr->is_valuetypeptr() || elemptr->value_klass()->flatten_array()), "array can't be flattened");
  80     Node* ctl = control();
  81     IdealKit ideal(this);
  82     IdealVariable res(ideal);
  83     ideal.declarations_done();
  84     Node* kls = load_object_klass(ary);
  85     Node* tag = load_lh_array_tag(kls);
  86     ideal.if_then(tag, BoolTest::ne, intcon(Klass::_lh_array_tag_vt_value)); {
  87       // non-flattened
  88       sync_kit(ideal);
  89       const TypeAryPtr* adr_type = TypeAryPtr::get_array_body_type(bt);
  90       Node* ld = access_load_at(ary, adr, adr_type, elemptr, bt,
  91                                 IN_HEAP | IS_ARRAY | C2_CONTROL_DEPENDENT_LOAD, ctl);
  92       ideal.sync_kit(this);
  93       ideal.set(res, ld);
  94     } ideal.else_(); {
  95       // flattened
  96       sync_kit(ideal);
  97       if (elemptr->is_valuetypeptr()) {
  98         // Element type is known, cast and load from flattened representation
  99         ciValueKlass* vk = elemptr->value_klass();
 100         assert(vk->flatten_array() && elemptr->maybe_null(), "must be a flattenable and nullable array");
 101         ciArrayKlass* array_klass = ciArrayKlass::make(vk, /* never_null */ true);
 102         const TypeAryPtr* arytype = TypeOopPtr::make_from_klass(array_klass)->isa_aryptr();
 103         Node* cast = _gvn.transform(new CheckCastPPNode(control(), ary, arytype));
 104         adr = array_element_address(cast, idx, T_VALUETYPE, ary_t->size(), control());
 105         Node* vt = ValueTypeNode::make_from_flattened(this, vk, cast, adr)->allocate(this, false, false)->get_oop();
 106         ideal.set(res, vt);
 107         ideal.sync_kit(this);
 108       } else {

 109         // Element type is unknown, emit runtime call
 110         Node* k_adr = basic_plus_adr(kls, in_bytes(ArrayKlass::element_klass_offset()));
 111         Node* elem_klass = _gvn.transform(LoadKlassNode::make(_gvn, NULL, immutable_memory(), k_adr, TypeInstPtr::KLASS));
 112         Node* obj_size  = NULL;
 113         kill_dead_locals();
 114         inc_sp(2);
 115         Node* alloc_obj = new_instance(elem_klass, NULL, &obj_size, /*deoptimize_on_exception=*/true);
 116         dec_sp(2);
 117 
 118         AllocateNode* alloc = AllocateNode::Ideal_allocation(alloc_obj, &_gvn);
 119         assert(alloc->maybe_set_complete(&_gvn), "");
 120         alloc->initialization()->set_complete_with_arraycopy();
 121 
 122         // This membar keeps this access to an unknown flattened array
 123         // correctly ordered with other unknown and known flattened
 124         // array accesses.
 125         insert_mem_bar_volatile(Op_MemBarCPUOrder, C->get_alias_index(TypeAryPtr::VALUES));
 126 
 127         BarrierSetC2* bs = BarrierSet::barrier_set()->barrier_set_c2();
 128         // Unknown value type might contain reference fields


 149                                CAST_FROM_FN_PTR(address, OptoRuntime::load_unknown_value),
 150                                "load_unknown_value",
 151                                ary, idx, alloc_obj);
 152           sync_kit(ideal);
 153         }
 154 
 155         // This makes sure no other thread sees a partially initialized buffered value
 156         insert_mem_bar_volatile(Op_MemBarStoreStore, Compile::AliasIdxRaw, alloc->proj_out_or_null(AllocateNode::RawAddress));
 157 
 158         // Same as MemBarCPUOrder above: keep this unknown flattened
 159         // array access correctly ordered with other flattened array
 160         // access
 161         insert_mem_bar_volatile(Op_MemBarCPUOrder, C->get_alias_index(TypeAryPtr::VALUES));
 162 
 163         // Prevent any use of the newly allocated value before it is
 164         // fully initialized
 165         alloc_obj = new CastPPNode(alloc_obj, _gvn.type(alloc_obj), true);
 166         alloc_obj->set_req(0, control());
 167         alloc_obj = _gvn.transform(alloc_obj);
 168 




 169         ideal.sync_kit(this);
 170 
 171         ideal.set(res, alloc_obj);
 172       }








 173     } ideal.end_if();
 174     sync_kit(ideal);
 175     push_node(bt, _gvn.transform(ideal.value(res)));
 176     return;
 177   }
 178 
 179   if (elemtype == TypeInt::BOOL) {
 180     bt = T_BOOLEAN;
 181   } else if (bt == T_OBJECT) {
 182     elemtype = ary_t->elem()->make_oopptr();
 183   }
 184 
 185   const TypeAryPtr* adr_type = TypeAryPtr::get_array_body_type(bt);
 186   Node* ld = access_load_at(ary, adr, adr_type, elemtype, bt,
 187                             IN_HEAP | IS_ARRAY | C2_CONTROL_DEPENDENT_LOAD);
 188   if (bt == T_VALUETYPE) {
 189     // Loading a non-flattened (but flattenable) value type from an array
 190     assert(!gvn().type(ld)->maybe_null(), "value type array elements should never be null");
 191     if (elemptr->value_klass()->is_scalarizable()) {
 192       ld = ValueTypeNode::make_from_oop(this, ld, elemptr->value_klass());


 252         cast_val = null_check(cast_val);
 253         if (stopped()) return;
 254         dec_sp(3);
 255         cast_val = ValueTypeNode::make_from_oop(this, cast_val, ary_t->elem()->value_klass());
 256       }
 257       cast_val->as_ValueType()->store_flattened(this, ary, adr);
 258       return;
 259     } else if (elemtype->is_valuetypeptr() && !elemtype->maybe_null()) {
 260       // Store to non-flattened but flattenable value type array (elements can never be null)
 261       if (!cast_val->is_ValueType() && tval->maybe_null()) {
 262         inc_sp(3);
 263         cast_val = null_check(cast_val);
 264         if (stopped()) return;
 265         dec_sp(3);
 266       }
 267     } else if (!ary_t->is_not_flat()) {
 268       // Array might be flattened, emit runtime checks
 269       assert(ValueArrayFlatten && !not_flattenable && elemtype->is_oopptr()->can_be_value_type() &&
 270              !ary_t->klass_is_exact() && !ary_t->is_not_null_free(), "array can't be flattened");
 271       IdealKit ideal(this);
 272       Node* kls = load_object_klass(ary);
 273       Node* layout_val = load_lh_array_tag(kls);
 274       ideal.if_then(layout_val, BoolTest::ne, intcon(Klass::_lh_array_tag_vt_value));
 275       {
 276         // non-flattened
 277         sync_kit(ideal);
 278         gen_value_array_null_guard(ary, cast_val, 3);
 279         access_store_at(ary, adr, adr_type, cast_val, elemtype, bt, MO_UNORDERED | IN_HEAP | IS_ARRAY, false, false);
 280         ideal.sync_kit(this);
 281       }
 282       ideal.else_();
 283       {
 284         // flattened
 285         if (!cast_val->is_ValueType() && tval->maybe_null()) {
 286           // Add null check
 287           sync_kit(ideal);
 288           Node* null_ctl = top();
 289           cast_val = null_check_oop(cast_val, &null_ctl);
 290           if (null_ctl != top()) {
 291             PreserveJVMState pjvms(this);
 292             inc_sp(3);
 293             set_control(null_ctl);
 294             uncommon_trap(Deoptimization::Reason_null_check, Deoptimization::Action_none);
 295             dec_sp(3);
 296           }
 297           ideal.sync_kit(this);
 298         }
 299         // Try to determine the value klass
 300         ciValueKlass* vk = NULL;
 301         if (tval->isa_valuetype() || tval->is_valuetypeptr()) {
 302           vk = tval->value_klass();
 303         } else if (tval_init->isa_valuetype() || tval_init->is_valuetypeptr()) {
 304           vk = tval_init->value_klass();
 305         } else if (elemtype->is_valuetypeptr()) {
 306           vk = elemtype->value_klass();
 307         }

 308         if (vk != NULL && !stopped()) {
 309           // Element type is known, cast and store to flattened representation
 310           sync_kit(ideal);
 311           assert(vk->flatten_array() && elemtype->maybe_null(), "must be a flattenable and nullable array");
 312           ciArrayKlass* array_klass = ciArrayKlass::make(vk, /* never_null */ true);
 313           const TypeAryPtr* arytype = TypeOopPtr::make_from_klass(array_klass)->isa_aryptr();
 314           ary = _gvn.transform(new CheckCastPPNode(control(), ary, arytype));
 315           adr = array_element_address(ary, idx, T_OBJECT, arytype->size(), control());
 316           if (!cast_val->is_ValueType()) {
 317             assert(!gvn().type(cast_val)->maybe_null(), "value type array elements should never be null");
 318             cast_val = ValueTypeNode::make_from_oop(this, cast_val, vk);
 319           }
 320           cast_val->as_ValueType()->store_flattened(this, ary, adr);
 321           ideal.sync_kit(this);
 322         } else if (!ideal.ctrl()->is_top()) {
 323           // Element type is unknown, emit runtime call
 324           sync_kit(ideal);
 325 
 326           // This membar keeps this access to an unknown flattened
 327           // array correctly ordered with other unknown and known
 328           // flattened array accesses.
 329           insert_mem_bar_volatile(Op_MemBarCPUOrder, C->get_alias_index(TypeAryPtr::VALUES));
 330           ideal.sync_kit(this);
 331 
 332           ideal.make_leaf_call(OptoRuntime::store_unknown_value_Type(),
 333                                CAST_FROM_FN_PTR(address, OptoRuntime::store_unknown_value),
 334                                "store_unknown_value",
 335                                cast_val, ary, idx);
 336 
 337           sync_kit(ideal);
 338           // Same as MemBarCPUOrder above: keep this unknown
 339           // flattened array access correctly ordered with other
 340           // flattened array access
 341           insert_mem_bar_volatile(Op_MemBarCPUOrder, C->get_alias_index(TypeAryPtr::VALUES));
 342           ideal.sync_kit(this);
 343         }
 344       }








 345       ideal.end_if();
 346       sync_kit(ideal);
 347       return;
 348     } else if (!ary_t->is_not_null_free()) {
 349       // Array is not flattened but may be null free
 350       assert(elemtype->is_oopptr()->can_be_value_type() && !ary_t->klass_is_exact(), "array can't be null free");
 351       ary = gen_value_array_null_guard(ary, cast_val, 3, true);
 352     }
 353   }
 354 
 355   access_store_at(ary, adr, adr_type, val, elemtype, bt, MO_UNORDERED | IN_HEAP | IS_ARRAY);
 356 }
 357 
 358 
 359 //------------------------------array_addressing-------------------------------
 360 // Pull array and index from the stack.  Compute pointer-to-element.
 361 Node* Parse::array_addressing(BasicType type, int vals, const Type* *result2) {
 362   Node *idx   = peek(0+vals);   // Get from stack without popping
 363   Node *ary   = peek(1+vals);   // in case of exception
 364 




  64   // Handle value type arrays
  65   const TypeOopPtr* elemptr = elemtype->make_oopptr();
  66   const TypeAryPtr* ary_t = _gvn.type(ary)->is_aryptr();
  67   if (elemtype->isa_valuetype() != NULL) {
  68     C->set_flattened_accesses();
  69     // Load from flattened value type array
  70     Node* vt = ValueTypeNode::make_from_flattened(this, elemtype->value_klass(), ary, adr);
  71     push(vt);
  72     return;
  73   } else if (elemptr != NULL && elemptr->is_valuetypeptr() && !elemptr->maybe_null()) {
  74     // Load from non-flattened but flattenable value type array (elements can never be null)
  75     bt = T_VALUETYPE;
  76   } else if (!ary_t->is_not_flat()) {
  77     // Cannot statically determine if array is flattened, emit runtime check
  78     assert(ValueArrayFlatten && elemptr->can_be_value_type() && !ary_t->klass_is_exact() && !ary_t->is_not_null_free() &&
  79            (!elemptr->is_valuetypeptr() || elemptr->value_klass()->flatten_array()), "array can't be flattened");
  80     Node* ctl = control();
  81     IdealKit ideal(this);
  82     IdealVariable res(ideal);
  83     ideal.declarations_done();
  84     Node* flattened = gen_flattened_array_test(ary);
  85     ideal.if_then(flattened, BoolTest::ne, zerocon(flattened->bottom_type()->basic_type())); {









  86       // flattened
  87       sync_kit(ideal);
  88       if (elemptr->is_valuetypeptr()) {
  89         // Element type is known, cast and load from flattened representation
  90         ciValueKlass* vk = elemptr->value_klass();
  91         assert(vk->flatten_array() && elemptr->maybe_null(), "must be a flattenable and nullable array");
  92         ciArrayKlass* array_klass = ciArrayKlass::make(vk, /* never_null */ true);
  93         const TypeAryPtr* arytype = TypeOopPtr::make_from_klass(array_klass)->isa_aryptr();
  94         Node* cast = _gvn.transform(new CheckCastPPNode(control(), ary, arytype));
  95         Node* casted_adr = array_element_address(cast, idx, T_VALUETYPE, ary_t->size(), control());
  96         Node* vt = ValueTypeNode::make_from_flattened(this, vk, cast, casted_adr)->allocate(this, false, false)->get_oop();
  97         ideal.set(res, vt);
  98         ideal.sync_kit(this);
  99       } else {
 100         Node* kls = load_object_klass(ary);
 101         // Element type is unknown, emit runtime call
 102         Node* k_adr = basic_plus_adr(kls, in_bytes(ArrayKlass::element_klass_offset()));
 103         Node* elem_klass = _gvn.transform(LoadKlassNode::make(_gvn, NULL, immutable_memory(), k_adr, TypeInstPtr::KLASS));
 104         Node* obj_size  = NULL;
 105         kill_dead_locals();
 106         inc_sp(2);
 107         Node* alloc_obj = new_instance(elem_klass, NULL, &obj_size, /*deoptimize_on_exception=*/true);
 108         dec_sp(2);
 109 
 110         AllocateNode* alloc = AllocateNode::Ideal_allocation(alloc_obj, &_gvn);
 111         assert(alloc->maybe_set_complete(&_gvn), "");
 112         alloc->initialization()->set_complete_with_arraycopy();
 113 
 114         // This membar keeps this access to an unknown flattened array
 115         // correctly ordered with other unknown and known flattened
 116         // array accesses.
 117         insert_mem_bar_volatile(Op_MemBarCPUOrder, C->get_alias_index(TypeAryPtr::VALUES));
 118 
 119         BarrierSetC2* bs = BarrierSet::barrier_set()->barrier_set_c2();
 120         // Unknown value type might contain reference fields


 141                                CAST_FROM_FN_PTR(address, OptoRuntime::load_unknown_value),
 142                                "load_unknown_value",
 143                                ary, idx, alloc_obj);
 144           sync_kit(ideal);
 145         }
 146 
 147         // This makes sure no other thread sees a partially initialized buffered value
 148         insert_mem_bar_volatile(Op_MemBarStoreStore, Compile::AliasIdxRaw, alloc->proj_out_or_null(AllocateNode::RawAddress));
 149 
 150         // Same as MemBarCPUOrder above: keep this unknown flattened
 151         // array access correctly ordered with other flattened array
 152         // access
 153         insert_mem_bar_volatile(Op_MemBarCPUOrder, C->get_alias_index(TypeAryPtr::VALUES));
 154 
 155         // Prevent any use of the newly allocated value before it is
 156         // fully initialized
 157         alloc_obj = new CastPPNode(alloc_obj, _gvn.type(alloc_obj), true);
 158         alloc_obj->set_req(0, control());
 159         alloc_obj = _gvn.transform(alloc_obj);
 160 
 161         const Type* unknown_value = TypeInstPtr::BOTTOM->cast_to_flatten_array();
 162         
 163         alloc_obj = _gvn.transform(new CheckCastPPNode(control(), alloc_obj, unknown_value));
 164         
 165         ideal.sync_kit(this);
 166 
 167         ideal.set(res, alloc_obj);
 168       }
 169     } ideal.else_(); {
 170       // non-flattened
 171       sync_kit(ideal);
 172       const TypeAryPtr* adr_type = TypeAryPtr::get_array_body_type(bt);
 173       Node* ld = access_load_at(ary, adr, adr_type, elemptr, bt,
 174                                 IN_HEAP | IS_ARRAY | C2_CONTROL_DEPENDENT_LOAD, ctl);
 175       ideal.sync_kit(this);
 176       ideal.set(res, ld);
 177     } ideal.end_if();
 178     sync_kit(ideal);
 179     push_node(bt, _gvn.transform(ideal.value(res)));
 180     return;
 181   }
 182 
 183   if (elemtype == TypeInt::BOOL) {
 184     bt = T_BOOLEAN;
 185   } else if (bt == T_OBJECT) {
 186     elemtype = ary_t->elem()->make_oopptr();
 187   }
 188 
 189   const TypeAryPtr* adr_type = TypeAryPtr::get_array_body_type(bt);
 190   Node* ld = access_load_at(ary, adr, adr_type, elemtype, bt,
 191                             IN_HEAP | IS_ARRAY | C2_CONTROL_DEPENDENT_LOAD);
 192   if (bt == T_VALUETYPE) {
 193     // Loading a non-flattened (but flattenable) value type from an array
 194     assert(!gvn().type(ld)->maybe_null(), "value type array elements should never be null");
 195     if (elemptr->value_klass()->is_scalarizable()) {
 196       ld = ValueTypeNode::make_from_oop(this, ld, elemptr->value_klass());


 256         cast_val = null_check(cast_val);
 257         if (stopped()) return;
 258         dec_sp(3);
 259         cast_val = ValueTypeNode::make_from_oop(this, cast_val, ary_t->elem()->value_klass());
 260       }
 261       cast_val->as_ValueType()->store_flattened(this, ary, adr);
 262       return;
 263     } else if (elemtype->is_valuetypeptr() && !elemtype->maybe_null()) {
 264       // Store to non-flattened but flattenable value type array (elements can never be null)
 265       if (!cast_val->is_ValueType() && tval->maybe_null()) {
 266         inc_sp(3);
 267         cast_val = null_check(cast_val);
 268         if (stopped()) return;
 269         dec_sp(3);
 270       }
 271     } else if (!ary_t->is_not_flat()) {
 272       // Array might be flattened, emit runtime checks
 273       assert(ValueArrayFlatten && !not_flattenable && elemtype->is_oopptr()->can_be_value_type() &&
 274              !ary_t->klass_is_exact() && !ary_t->is_not_null_free(), "array can't be flattened");
 275       IdealKit ideal(this);
 276       Node* flattened = gen_flattened_array_test(ary);
 277       ideal.if_then(flattened, BoolTest::ne, zerocon(flattened->bottom_type()->basic_type())); {
 278         Node* val = cast_val;









 279         // flattened
 280         if (!val->is_ValueType() && tval->maybe_null()) {
 281           // Add null check
 282           sync_kit(ideal);
 283           Node* null_ctl = top();
 284           val = null_check_oop(val, &null_ctl);
 285           if (null_ctl != top()) {
 286             PreserveJVMState pjvms(this);
 287             inc_sp(3);
 288             set_control(null_ctl);
 289             uncommon_trap(Deoptimization::Reason_null_check, Deoptimization::Action_none);
 290             dec_sp(3);
 291           }
 292           ideal.sync_kit(this);
 293         }
 294         // Try to determine the value klass
 295         ciValueKlass* vk = NULL;
 296         if (tval->isa_valuetype() || tval->is_valuetypeptr()) {
 297           vk = tval->value_klass();
 298         } else if (tval_init->isa_valuetype() || tval_init->is_valuetypeptr()) {
 299           vk = tval_init->value_klass();
 300         } else if (elemtype->is_valuetypeptr()) {
 301           vk = elemtype->value_klass();
 302         }
 303         Node* casted_ary = ary;
 304         if (vk != NULL && !stopped()) {
 305           // Element type is known, cast and store to flattened representation
 306           sync_kit(ideal);
 307           assert(vk->flatten_array() && elemtype->maybe_null(), "must be a flattenable and nullable array");
 308           ciArrayKlass* array_klass = ciArrayKlass::make(vk, /* never_null */ true);
 309           const TypeAryPtr* arytype = TypeOopPtr::make_from_klass(array_klass)->isa_aryptr();
 310           casted_ary = _gvn.transform(new CheckCastPPNode(control(), casted_ary, arytype));
 311           Node* casted_adr = array_element_address(casted_ary, idx, T_OBJECT, arytype->size(), control());
 312           if (!val->is_ValueType()) {
 313             assert(!gvn().type(val)->maybe_null(), "value type array elements should never be null");
 314             val = ValueTypeNode::make_from_oop(this, val, vk);
 315           }
 316           val->as_ValueType()->store_flattened(this, casted_ary, casted_adr);
 317           ideal.sync_kit(this);
 318         } else if (!ideal.ctrl()->is_top()) {
 319           // Element type is unknown, emit runtime call
 320           sync_kit(ideal);
 321 
 322           // This membar keeps this access to an unknown flattened
 323           // array correctly ordered with other unknown and known
 324           // flattened array accesses.
 325           insert_mem_bar_volatile(Op_MemBarCPUOrder, C->get_alias_index(TypeAryPtr::VALUES));
 326           ideal.sync_kit(this);
 327 
 328           ideal.make_leaf_call(OptoRuntime::store_unknown_value_Type(),
 329                                CAST_FROM_FN_PTR(address, OptoRuntime::store_unknown_value),
 330                                "store_unknown_value",
 331                                val, casted_ary, idx);
 332 
 333           sync_kit(ideal);
 334           // Same as MemBarCPUOrder above: keep this unknown
 335           // flattened array access correctly ordered with other
 336           // flattened array accesses.
 337           insert_mem_bar_volatile(Op_MemBarCPUOrder, C->get_alias_index(TypeAryPtr::VALUES));
 338           ideal.sync_kit(this);
 339         }
 340       }
 341       ideal.else_();
 342       {
 343         // non-flattened
 344         sync_kit(ideal);
 345         gen_value_array_null_guard(ary, cast_val, 3);
 346         access_store_at(ary, adr, adr_type, cast_val, elemtype, bt, MO_UNORDERED | IN_HEAP | IS_ARRAY, false, false);
 347         ideal.sync_kit(this);
 348       }
 349       ideal.end_if();
 350       sync_kit(ideal);
 351       return;
 352     } else if (!ary_t->is_not_null_free()) {
 353       // Array is not flattened but may be null free
 354       assert(elemtype->is_oopptr()->can_be_value_type() && !ary_t->klass_is_exact(), "array can't be null free");
 355       ary = gen_value_array_null_guard(ary, cast_val, 3, true);
 356     }
 357   }
 358 
 359   access_store_at(ary, adr, adr_type, val, elemtype, bt, MO_UNORDERED | IN_HEAP | IS_ARRAY);
 360 }
 361 
 362 
 363 //------------------------------array_addressing-------------------------------
 364 // Pull array and index from the stack.  Compute pointer-to-element.
 365 Node* Parse::array_addressing(BasicType type, int vals, const Type* *result2) {
 366   Node *idx   = peek(0+vals);   // Get from stack without popping
 367   Node *ary   = peek(1+vals);   // in case of exception
 368 


< prev index next >