< prev index next >

src/hotspot/share/opto/library_call.cpp

Print this page




2440     }
2441     value_klass = klass->as_value_klass();
2442   }
2443 
2444   receiver = null_check(receiver);
2445   if (stopped()) {
2446     return true;
2447   }
2448 
2449   if (base->is_ValueType()) {
2450     ValueTypeNode* vt = base->as_ValueType();
2451 
2452     if (is_store) {
2453       if (!vt->is_allocated(&_gvn) || !_gvn.type(vt)->is_valuetype()->larval()) {
2454         return false;
2455       }
2456       base = vt->get_oop();
2457     } else {
2458       if (offset->is_Con()) {
2459         long off = find_long_con(offset, 0);
2460         ciValueKlass* vk = _gvn.type(vt)->is_valuetype()->value_klass();
2461         if ((long)(int)off != off || !vk->contains_field_offset(off)) {
2462           return false;
2463         }
2464 
2465         ciField* f = vk->get_non_flattened_field_by_offset((int)off);
2466 
2467         if (f != NULL) {
2468           BasicType bt = f->layout_type();
2469           if (bt == T_ARRAY || bt == T_NARROWOOP) {
2470             bt = T_OBJECT;
2471           }
2472           if (bt == type) {
2473             if (bt != T_VALUETYPE || f->type() == value_klass) {
2474               set_result(vt->field_value_by_offset((int)off, false));
2475               return true;
2476             }
2477           }
2478         }
2479       }
2480       vt = vt->allocate(this)->as_ValueType();


2550       bt = T_OBJECT;
2551     }
2552     if ((bt == T_OBJECT) != (type == T_OBJECT)) {
2553       // Don't intrinsify mismatched object accesses
2554       return false;
2555     }
2556     mismatched = (bt != type);
2557   } else if (alias_type->adr_type()->isa_oopptr()) {
2558     mismatched = true; // conservatively mark all "wide" on-heap accesses as mismatched
2559   }
2560 
2561   if (type == T_VALUETYPE) {
2562     if (adr_type->isa_instptr()) {
2563       if (field == NULL || field->type() != value_klass) {
2564         mismatched = true;
2565       }
2566     } else if (adr_type->isa_aryptr()) {
2567       const Type* elem = adr_type->is_aryptr()->elem();
2568       if (!elem->isa_valuetype()) {
2569         mismatched = true;
2570       } else if (elem->is_valuetype()->value_klass() != value_klass) {
2571         mismatched = true;
2572       }
2573     }
2574     if (is_store) {
2575       const Type* val_t = _gvn.type(val);
2576       if (!val_t->isa_valuetype() ||
2577           val_t->is_valuetype()->value_klass() != value_klass) {
2578         return false;
2579       }
2580     }
2581   }
2582 
2583   assert(!mismatched || alias_type->adr_type()->is_oopptr(), "off-heap access can't be mismatched");
2584 
2585   if (mismatched) {
2586     decorators |= C2_MISMATCHED;
2587   }
2588 
2589   // First guess at the value type.
2590   const Type *value_type = Type::get_const_basic_type(type);
2591 
2592   // Figure out the memory ordering.
2593   decorators |= mo_decorator_for_access_kind(kind);
2594 
2595   if (!is_store) {
2596     if (type == T_OBJECT) {
2597       const TypeOopPtr* tjp = sharpen_unsafe_type(alias_type, adr_type);


3514     set_result(result);
3515     return true;
3516   }
3517   return false;
3518 }
3519 
3520 //-------------------------inline_Class_cast-------------------
3521 bool LibraryCallKit::inline_Class_cast() {
3522   Node* mirror = argument(0); // Class
3523   Node* obj    = argument(1);
3524   const TypeInstPtr* mirror_con = _gvn.type(mirror)->isa_instptr();
3525   if (mirror_con == NULL) {
3526     return false;  // dead path (mirror->is_top()).
3527   }
3528   if (obj == NULL || obj->is_top()) {
3529     return false;  // dead path
3530   }
3531 
3532   ciKlass* obj_klass = NULL;
3533   if (obj->is_ValueType()) {
3534     const TypeValueType* tvt = _gvn.type(obj)->is_valuetype();
3535     obj_klass = tvt->value_klass();
3536   } else {
3537     const TypeOopPtr* tp = _gvn.type(obj)->isa_oopptr();
3538     if (tp != NULL) {
3539       obj_klass = tp->klass();
3540     }
3541   }
3542 
3543   // First, see if Class.cast() can be folded statically.
3544   // java_mirror_type() returns non-null for compile-time Class constants.
3545   bool is_val_type = false;
3546   ciType* tm = mirror_con->java_mirror_type(&is_val_type);
3547   if (!obj->is_ValueType() && is_val_type) {
3548     obj = null_check(obj);
3549   }
3550   if (tm != NULL && tm->is_klass() && obj_klass != NULL) {
3551     if (!obj_klass->is_loaded()) {
3552       // Don't use intrinsic when class is not loaded.
3553       return false;
3554     } else {
3555       int static_res = C->static_subtype_check(tm->as_klass(), obj_klass);


3581     return true;
3582   }
3583 
3584   // Not-subtype or the mirror's klass ptr is NULL (in case it is a primitive).
3585   enum { _bad_type_path = 1, _prim_path = 2, _npe_path = 3, PATH_LIMIT };
3586   RegionNode* region = new RegionNode(PATH_LIMIT);
3587   record_for_igvn(region);
3588 
3589   // Now load the mirror's klass metaobject, and null-check it.
3590   // If kls is null, we have a primitive mirror and
3591   // nothing is an instance of a primitive type.
3592   Node* kls = load_klass_from_mirror(mirror, false, region, _prim_path);
3593 
3594   Node* res = top();
3595   if (!stopped()) {
3596     // TODO move this into do_checkcast?
3597     if (EnableValhalla && !obj->is_ValueType() && !is_val_type) {
3598       // Check if (mirror == value_mirror && obj == null)
3599       RegionNode* r = new RegionNode(3);
3600       Node* p = basic_plus_adr(mirror, java_lang_Class::value_mirror_offset_in_bytes());
3601       Node* value_mirror = access_load_at(mirror, p, _gvn.type(p)->is_ptr(), TypeInstPtr::MIRROR, T_OBJECT, IN_HEAP);
3602       Node* cmp = _gvn.transform(new CmpPNode(mirror, value_mirror));
3603       Node* bol = _gvn.transform(new BoolNode(cmp, BoolTest::ne));
3604       Node* if_ne = generate_fair_guard(bol, NULL);
3605       r->init_req(1, if_ne);
3606 
3607       // Casting to .val, check for null
3608       Node* null_ctr = top();
3609       null_check_oop(obj, &null_ctr);
3610       region->init_req(_npe_path, null_ctr);
3611       r->init_req(2, control());
3612 
3613       set_control(_gvn.transform(r));
3614     }
3615 
3616     Node* bad_type_ctrl = top();
3617     // Do checkcast optimizations.
3618     res = gen_checkcast(obj, kls, &bad_type_ctrl);
3619     region->init_req(_bad_type_path, bad_type_ctrl);
3620   }
3621   if (region->in(_prim_path) != top() ||


4309     result_val->init_req(_slow_path, slow_result);
4310     result_io  ->set_req(_slow_path, i_o());
4311     result_mem ->set_req(_slow_path, reset_memory());
4312   }
4313 
4314   // Return the combined state.
4315   set_i_o(        _gvn.transform(result_io)  );
4316   set_all_memory( _gvn.transform(result_mem));
4317 
4318   set_result(result_reg, result_val);
4319   return true;
4320 }
4321 
4322 //---------------------------inline_native_getClass----------------------------
4323 // public final native Class<?> java.lang.Object.getClass();
4324 //
4325 // Build special case code for calls to getClass on an object.
4326 bool LibraryCallKit::inline_native_getClass() {
4327   Node* obj = argument(0);
4328   if (obj->is_ValueType()) {
4329     ciKlass* vk = _gvn.type(obj)->is_valuetype()->value_klass();
4330     set_result(makecon(TypeInstPtr::make(vk->java_mirror())));
4331     return true;
4332   }
4333   obj = null_check_receiver();
4334   if (stopped())  return true;
4335   set_result(load_mirror_from_klass(load_object_klass(obj)));
4336   return true;
4337 }
4338 
4339 //-----------------inline_native_Reflection_getCallerClass---------------------
4340 // public static native Class<?> sun.reflect.Reflection.getCallerClass();
4341 //
4342 // In the presence of deep enough inlining, getCallerClass() becomes a no-op.
4343 //
4344 // NOTE: This code must perform the same logic as JVM_GetCallerClass
4345 // in that it must skip particular security frames and checks for
4346 // caller sensitive methods.
4347 bool LibraryCallKit::inline_native_Reflection_getCallerClass() {
4348 #ifndef PRODUCT
4349   if ((C->print_intrinsics() || C->print_inlining()) && Verbose) {


5190     Node* not_subtype_ctrl = gen_subtype_check(src_klass, dest_klass);
5191 
5192     if (not_subtype_ctrl != top()) {
5193       PreserveJVMState pjvms(this);
5194       set_control(not_subtype_ctrl);
5195       uncommon_trap(Deoptimization::Reason_intrinsic,
5196                     Deoptimization::Action_make_not_entrant);
5197       assert(stopped(), "Should be stopped");
5198     }
5199 
5200     const TypeKlassPtr* dest_klass_t = _gvn.type(dest_klass)->is_klassptr();
5201     const Type *toop = TypeOopPtr::make_from_klass(dest_klass_t->klass());
5202     src = _gvn.transform(new CheckCastPPNode(control(), src, toop));
5203 
5204     src_type = _gvn.type(src);
5205     top_src  = src_type->isa_aryptr();
5206 
5207     if (top_dest != NULL &&
5208         top_dest->elem()->make_oopptr() != NULL &&
5209         top_dest->elem()->make_oopptr()->can_be_value_type()) {
5210       generate_valueArray_guard(load_object_klass(dest), slow_region);
5211     }
5212 
5213     if (top_src != NULL &&
5214         top_src->elem()->make_oopptr() != NULL &&
5215         top_src->elem()->make_oopptr()->can_be_value_type()) {
5216       generate_valueArray_guard(load_object_klass(src), slow_region);
5217     }
5218 
5219     {
5220       PreserveJVMState pjvms(this);
5221       set_control(_gvn.transform(slow_region));
5222       uncommon_trap(Deoptimization::Reason_intrinsic,
5223                     Deoptimization::Action_make_not_entrant);
5224       assert(stopped(), "Should be stopped");
5225     }
5226   }
5227 
5228   arraycopy_move_allocation_here(alloc, dest, saved_jvms, saved_reexecute_sp, new_idx);
5229 
5230   if (stopped()) {
5231     return true;
5232   }
5233 
5234   Node* new_src = access_resolve(src, ACCESS_READ);
5235   Node* new_dest = access_resolve(dest, ACCESS_WRITE);
5236 




2440     }
2441     value_klass = klass->as_value_klass();
2442   }
2443 
2444   receiver = null_check(receiver);
2445   if (stopped()) {
2446     return true;
2447   }
2448 
2449   if (base->is_ValueType()) {
2450     ValueTypeNode* vt = base->as_ValueType();
2451 
2452     if (is_store) {
2453       if (!vt->is_allocated(&_gvn) || !_gvn.type(vt)->is_valuetype()->larval()) {
2454         return false;
2455       }
2456       base = vt->get_oop();
2457     } else {
2458       if (offset->is_Con()) {
2459         long off = find_long_con(offset, 0);
2460         ciValueKlass* vk = vt->type()->value_klass();
2461         if ((long)(int)off != off || !vk->contains_field_offset(off)) {
2462           return false;
2463         }
2464 
2465         ciField* f = vk->get_non_flattened_field_by_offset((int)off);
2466 
2467         if (f != NULL) {
2468           BasicType bt = f->layout_type();
2469           if (bt == T_ARRAY || bt == T_NARROWOOP) {
2470             bt = T_OBJECT;
2471           }
2472           if (bt == type) {
2473             if (bt != T_VALUETYPE || f->type() == value_klass) {
2474               set_result(vt->field_value_by_offset((int)off, false));
2475               return true;
2476             }
2477           }
2478         }
2479       }
2480       vt = vt->allocate(this)->as_ValueType();


2550       bt = T_OBJECT;
2551     }
2552     if ((bt == T_OBJECT) != (type == T_OBJECT)) {
2553       // Don't intrinsify mismatched object accesses
2554       return false;
2555     }
2556     mismatched = (bt != type);
2557   } else if (alias_type->adr_type()->isa_oopptr()) {
2558     mismatched = true; // conservatively mark all "wide" on-heap accesses as mismatched
2559   }
2560 
2561   if (type == T_VALUETYPE) {
2562     if (adr_type->isa_instptr()) {
2563       if (field == NULL || field->type() != value_klass) {
2564         mismatched = true;
2565       }
2566     } else if (adr_type->isa_aryptr()) {
2567       const Type* elem = adr_type->is_aryptr()->elem();
2568       if (!elem->isa_valuetype()) {
2569         mismatched = true;
2570       } else if (elem->value_klass() != value_klass) {
2571         mismatched = true;
2572       }
2573     }
2574     if (is_store) {
2575       const Type* val_t = _gvn.type(val);
2576       if (!val_t->isa_valuetype() || val_t->value_klass() != value_klass) {

2577         return false;
2578       }
2579     }
2580   }
2581 
2582   assert(!mismatched || alias_type->adr_type()->is_oopptr(), "off-heap access can't be mismatched");
2583 
2584   if (mismatched) {
2585     decorators |= C2_MISMATCHED;
2586   }
2587 
2588   // First guess at the value type.
2589   const Type *value_type = Type::get_const_basic_type(type);
2590 
2591   // Figure out the memory ordering.
2592   decorators |= mo_decorator_for_access_kind(kind);
2593 
2594   if (!is_store) {
2595     if (type == T_OBJECT) {
2596       const TypeOopPtr* tjp = sharpen_unsafe_type(alias_type, adr_type);


3513     set_result(result);
3514     return true;
3515   }
3516   return false;
3517 }
3518 
3519 //-------------------------inline_Class_cast-------------------
3520 bool LibraryCallKit::inline_Class_cast() {
3521   Node* mirror = argument(0); // Class
3522   Node* obj    = argument(1);
3523   const TypeInstPtr* mirror_con = _gvn.type(mirror)->isa_instptr();
3524   if (mirror_con == NULL) {
3525     return false;  // dead path (mirror->is_top()).
3526   }
3527   if (obj == NULL || obj->is_top()) {
3528     return false;  // dead path
3529   }
3530 
3531   ciKlass* obj_klass = NULL;
3532   if (obj->is_ValueType()) {
3533     obj_klass = _gvn.type(obj)->value_klass();

3534   } else {
3535     const TypeOopPtr* tp = _gvn.type(obj)->isa_oopptr();
3536     if (tp != NULL) {
3537       obj_klass = tp->klass();
3538     }
3539   }
3540 
3541   // First, see if Class.cast() can be folded statically.
3542   // java_mirror_type() returns non-null for compile-time Class constants.
3543   bool is_val_type = false;
3544   ciType* tm = mirror_con->java_mirror_type(&is_val_type);
3545   if (!obj->is_ValueType() && is_val_type) {
3546     obj = null_check(obj);
3547   }
3548   if (tm != NULL && tm->is_klass() && obj_klass != NULL) {
3549     if (!obj_klass->is_loaded()) {
3550       // Don't use intrinsic when class is not loaded.
3551       return false;
3552     } else {
3553       int static_res = C->static_subtype_check(tm->as_klass(), obj_klass);


3579     return true;
3580   }
3581 
3582   // Not-subtype or the mirror's klass ptr is NULL (in case it is a primitive).
3583   enum { _bad_type_path = 1, _prim_path = 2, _npe_path = 3, PATH_LIMIT };
3584   RegionNode* region = new RegionNode(PATH_LIMIT);
3585   record_for_igvn(region);
3586 
3587   // Now load the mirror's klass metaobject, and null-check it.
3588   // If kls is null, we have a primitive mirror and
3589   // nothing is an instance of a primitive type.
3590   Node* kls = load_klass_from_mirror(mirror, false, region, _prim_path);
3591 
3592   Node* res = top();
3593   if (!stopped()) {
3594     // TODO move this into do_checkcast?
3595     if (EnableValhalla && !obj->is_ValueType() && !is_val_type) {
3596       // Check if (mirror == value_mirror && obj == null)
3597       RegionNode* r = new RegionNode(3);
3598       Node* p = basic_plus_adr(mirror, java_lang_Class::value_mirror_offset_in_bytes());
3599       Node* value_mirror = access_load_at(mirror, p, _gvn.type(p)->is_ptr(), TypeInstPtr::MIRROR->cast_to_ptr_type(TypePtr::BotPTR), T_OBJECT, IN_HEAP);
3600       Node* cmp = _gvn.transform(new CmpPNode(mirror, value_mirror));
3601       Node* bol = _gvn.transform(new BoolNode(cmp, BoolTest::ne));
3602       Node* if_ne = generate_fair_guard(bol, NULL);
3603       r->init_req(1, if_ne);
3604 
3605       // Casting to .val, check for null
3606       Node* null_ctr = top();
3607       null_check_oop(obj, &null_ctr);
3608       region->init_req(_npe_path, null_ctr);
3609       r->init_req(2, control());
3610 
3611       set_control(_gvn.transform(r));
3612     }
3613 
3614     Node* bad_type_ctrl = top();
3615     // Do checkcast optimizations.
3616     res = gen_checkcast(obj, kls, &bad_type_ctrl);
3617     region->init_req(_bad_type_path, bad_type_ctrl);
3618   }
3619   if (region->in(_prim_path) != top() ||


4307     result_val->init_req(_slow_path, slow_result);
4308     result_io  ->set_req(_slow_path, i_o());
4309     result_mem ->set_req(_slow_path, reset_memory());
4310   }
4311 
4312   // Return the combined state.
4313   set_i_o(        _gvn.transform(result_io)  );
4314   set_all_memory( _gvn.transform(result_mem));
4315 
4316   set_result(result_reg, result_val);
4317   return true;
4318 }
4319 
4320 //---------------------------inline_native_getClass----------------------------
4321 // public final native Class<?> java.lang.Object.getClass();
4322 //
4323 // Build special case code for calls to getClass on an object.
4324 bool LibraryCallKit::inline_native_getClass() {
4325   Node* obj = argument(0);
4326   if (obj->is_ValueType()) {
4327     ciKlass* vk = _gvn.type(obj)->value_klass();
4328     set_result(makecon(TypeInstPtr::make(vk->java_mirror())));
4329     return true;
4330   }
4331   obj = null_check_receiver();
4332   if (stopped())  return true;
4333   set_result(load_mirror_from_klass(load_object_klass(obj)));
4334   return true;
4335 }
4336 
4337 //-----------------inline_native_Reflection_getCallerClass---------------------
4338 // public static native Class<?> sun.reflect.Reflection.getCallerClass();
4339 //
4340 // In the presence of deep enough inlining, getCallerClass() becomes a no-op.
4341 //
4342 // NOTE: This code must perform the same logic as JVM_GetCallerClass
4343 // in that it must skip particular security frames and checks for
4344 // caller sensitive methods.
4345 bool LibraryCallKit::inline_native_Reflection_getCallerClass() {
4346 #ifndef PRODUCT
4347   if ((C->print_intrinsics() || C->print_inlining()) && Verbose) {


5188     Node* not_subtype_ctrl = gen_subtype_check(src_klass, dest_klass);
5189 
5190     if (not_subtype_ctrl != top()) {
5191       PreserveJVMState pjvms(this);
5192       set_control(not_subtype_ctrl);
5193       uncommon_trap(Deoptimization::Reason_intrinsic,
5194                     Deoptimization::Action_make_not_entrant);
5195       assert(stopped(), "Should be stopped");
5196     }
5197 
5198     const TypeKlassPtr* dest_klass_t = _gvn.type(dest_klass)->is_klassptr();
5199     const Type *toop = TypeOopPtr::make_from_klass(dest_klass_t->klass());
5200     src = _gvn.transform(new CheckCastPPNode(control(), src, toop));
5201 
5202     src_type = _gvn.type(src);
5203     top_src  = src_type->isa_aryptr();
5204 
5205     if (top_dest != NULL &&
5206         top_dest->elem()->make_oopptr() != NULL &&
5207         top_dest->elem()->make_oopptr()->can_be_value_type()) {
5208       generate_valueArray_guard(dest_klass, slow_region);
5209     }
5210 
5211     if (top_src != NULL &&
5212         top_src->elem()->make_oopptr() != NULL &&
5213         top_src->elem()->make_oopptr()->can_be_value_type()) {
5214       generate_valueArray_guard(src_klass, slow_region);
5215     }
5216 
5217     {
5218       PreserveJVMState pjvms(this);
5219       set_control(_gvn.transform(slow_region));
5220       uncommon_trap(Deoptimization::Reason_intrinsic,
5221                     Deoptimization::Action_make_not_entrant);
5222       assert(stopped(), "Should be stopped");
5223     }
5224   }
5225 
5226   arraycopy_move_allocation_here(alloc, dest, saved_jvms, saved_reexecute_sp, new_idx);
5227 
5228   if (stopped()) {
5229     return true;
5230   }
5231 
5232   Node* new_src = access_resolve(src, ACCESS_READ);
5233   Node* new_dest = access_resolve(dest, ACCESS_WRITE);
5234 


< prev index next >