< prev index next >

src/hotspot/share/opto/library_call.cpp

Print this page




 195     TypeArray,
 196     ValueArray
 197   };
 198 
 199   Node* generate_array_guard(Node* kls, RegionNode* region) {
 200     return generate_array_guard_common(kls, region, AnyArray);
 201   }
 202   Node* generate_non_array_guard(Node* kls, RegionNode* region) {
 203     return generate_array_guard_common(kls, region, NonArray);
 204   }
 205   Node* generate_objArray_guard(Node* kls, RegionNode* region) {
 206     return generate_array_guard_common(kls, region, ObjectArray);
 207   }
 208   Node* generate_non_objArray_guard(Node* kls, RegionNode* region) {
 209     return generate_array_guard_common(kls, region, NonObjectArray);
 210   }
 211   Node* generate_typeArray_guard(Node* kls, RegionNode* region) {
 212     return generate_array_guard_common(kls, region, TypeArray);
 213   }
 214   Node* generate_valueArray_guard(Node* kls, RegionNode* region) {

 215     return generate_array_guard_common(kls, region, ValueArray);
 216   }
 217   Node* generate_array_guard_common(Node* kls, RegionNode* region, ArrayKind kind);
 218   Node* generate_virtual_guard(Node* obj_klass, RegionNode* slow_region);
 219   CallJavaNode* generate_method_call(vmIntrinsics::ID method_id,
 220                                      bool is_virtual = false, bool is_static = false);
 221   CallJavaNode* generate_method_call_static(vmIntrinsics::ID method_id) {
 222     return generate_method_call(method_id, false, true);
 223   }
 224   CallJavaNode* generate_method_call_virtual(vmIntrinsics::ID method_id) {
 225     return generate_method_call(method_id, true, false);
 226   }
 227   Node * load_field_from_object(Node * fromObj, const char * fieldName, const char * fieldTypeString, bool is_exact, bool is_static, ciInstanceKlass * fromKls);
 228   Node * field_address_from_object(Node * fromObj, const char * fieldName, const char * fieldTypeString, bool is_exact, bool is_static, ciInstanceKlass * fromKls);
 229 
 230   Node* make_string_method_node(int opcode, Node* str1_start, Node* cnt1, Node* str2_start, Node* cnt2, StrIntrinsicNode::ArgEnc ae);
 231   bool inline_string_compareTo(StrIntrinsicNode::ArgEnc ae);
 232   bool inline_string_indexOf(StrIntrinsicNode::ArgEnc ae);
 233   bool inline_string_indexOfI(StrIntrinsicNode::ArgEnc ae);
 234   Node* make_indexOf_node(Node* src_start, Node* src_count, Node* tgt_start, Node* tgt_count,


4018     // loads/stores but it is legal only if we're sure the
4019     // Arrays.copyOf would succeed. So we need all input arguments
4020     // to the copyOf to be validated, including that the copy to the
4021     // new array won't trigger an ArrayStoreException. That subtype
4022     // check can be optimized if we know something on the type of
4023     // the input array from type speculation.
4024     if (_gvn.type(klass_node)->singleton() && !stopped()) {
4025       ciKlass* subk   = _gvn.type(original_kls)->is_klassptr()->klass();
4026       ciKlass* superk = _gvn.type(klass_node)->is_klassptr()->klass();
4027 
4028       int test = C->static_subtype_check(superk, subk);
4029       if (test != Compile::SSC_always_true && test != Compile::SSC_always_false) {
4030         const TypeOopPtr* t_original = _gvn.type(original)->is_oopptr();
4031         if (t_original->speculative_type() != NULL) {
4032           original = maybe_cast_profiled_obj(original, t_original->speculative_type(), true);
4033           original_kls = load_object_klass(original);
4034         }
4035       }
4036     }
4037 
4038     if (EnableValhalla) {
4039       // Either both or neither new array klass and original array
4040       // klass must be flattened
4041       Node* flattened_klass = generate_valueArray_guard(klass_node, NULL);

4042       generate_valueArray_guard(original_kls, bailout);
4043       if (flattened_klass != NULL) {

4044         RegionNode* r = new RegionNode(2);
4045         record_for_igvn(r);
4046         r->init_req(1, control());
4047         set_control(flattened_klass);

4048         generate_valueArray_guard(original_kls, r);

4049         bailout->add_req(control());
4050         set_control(_gvn.transform(r));
4051       }
4052     }
4053 
4054     // Bail out if either start or end is negative.
4055     generate_negative_guard(start, bailout, &start);
4056     generate_negative_guard(end,   bailout, &end);
4057 
4058     Node* length = end;
4059     if (_gvn.type(start) != TypeInt::ZERO) {
4060       length = _gvn.transform(new SubINode(end, start));
4061     }
4062 
4063     // Bail out if length is negative.
4064     // Without this the new_array would throw
4065     // NegativeArraySizeException but IllegalArgumentException is what
4066     // should be thrown
4067     generate_negative_guard(length, bailout, &length);
4068 


4740       PATH_LIMIT
4741     };
4742     RegionNode* result_reg = new RegionNode(PATH_LIMIT);
4743     result_val             = new PhiNode(result_reg, TypeInstPtr::NOTNULL);
4744     PhiNode*    result_i_o = new PhiNode(result_reg, Type::ABIO);
4745     PhiNode*    result_mem = new PhiNode(result_reg, Type::MEMORY, TypePtr::BOTTOM);
4746     record_for_igvn(result_reg);
4747 
4748     // We only go to the fast case code if we pass a number of guards.
4749     // The paths which do not pass are accumulated in the slow_region.
4750     RegionNode* slow_region = new RegionNode(1);
4751     record_for_igvn(slow_region);
4752 
4753     Node* array_ctl = generate_array_guard(obj_klass, (RegionNode*)NULL);
4754     if (array_ctl != NULL) {
4755       // It's an array.
4756       PreserveJVMState pjvms(this);
4757       set_control(array_ctl);
4758 
4759       BarrierSetC2* bs = BarrierSet::barrier_set()->barrier_set_c2();
4760       if (bs->array_copy_requires_gc_barriers(true, T_OBJECT, true, BarrierSetC2::Parsing)) {

4761         // Flattened value type array may have object field that would require a
4762         // write barrier. Conservatively, go to slow path.
4763         generate_valueArray_guard(obj_klass, slow_region);
4764       }
4765 
4766       if (!stopped()) {
4767         Node* obj_length = load_array_length(obj);
4768         Node* obj_size  = NULL;
4769         // Load element mirror
4770         Node* array_type_mirror = load_mirror_from_klass(obj_klass);
4771         Node* p = basic_plus_adr(array_type_mirror, java_lang_Class::component_mirror_offset_in_bytes());
4772         Node* elem_mirror = access_load_at(array_type_mirror, p, _gvn.type(p)->is_ptr(), TypeInstPtr::MIRROR, T_OBJECT, IN_HEAP);
4773 
4774         Node* alloc_obj = new_array(obj_klass, obj_length, 0, &obj_size, false, elem_mirror);
4775 
4776         BarrierSetC2* bs = BarrierSet::barrier_set()->barrier_set_c2();
4777         if (bs->array_copy_requires_gc_barriers(true, T_OBJECT, true, BarrierSetC2::Parsing)) {
4778           // If it is an oop array, it requires very special treatment,
4779           // because gc barriers are required when accessing the array.
4780           Node* is_obja = generate_objArray_guard(obj_klass, (RegionNode*)NULL);


5225 
5226     // (9) each element of an oop array must be assignable
5227     Node* src_klass  = load_object_klass(src);
5228     Node* dest_klass = load_object_klass(dest);
5229     Node* not_subtype_ctrl = gen_subtype_check(src_klass, dest_klass);
5230 
5231     if (not_subtype_ctrl != top()) {
5232       PreserveJVMState pjvms(this);
5233       set_control(not_subtype_ctrl);
5234       uncommon_trap(Deoptimization::Reason_intrinsic,
5235                     Deoptimization::Action_make_not_entrant);
5236       assert(stopped(), "Should be stopped");
5237     }
5238 
5239     const TypeKlassPtr* dest_klass_t = _gvn.type(dest_klass)->is_klassptr();
5240     const Type* toop = TypeOopPtr::make_from_klass(dest_klass_t->klass());
5241     src = _gvn.transform(new CheckCastPPNode(control(), src, toop));
5242     src_type = _gvn.type(src);
5243     top_src  = src_type->isa_aryptr();
5244 
5245     if (top_dest != NULL &&
5246         top_dest->elem()->make_oopptr() != NULL &&
5247         top_dest->elem()->make_oopptr()->can_be_value_type()) {
5248       generate_valueArray_guard(dest_klass, slow_region);
5249     }
5250 
5251     if (top_src != NULL &&
5252         top_src->elem()->make_oopptr() != NULL &&
5253         top_src->elem()->make_oopptr()->can_be_value_type()) {
5254       generate_valueArray_guard(src_klass, slow_region);
5255     }
5256 
5257     {
5258       PreserveJVMState pjvms(this);
5259       set_control(_gvn.transform(slow_region));
5260       uncommon_trap(Deoptimization::Reason_intrinsic,
5261                     Deoptimization::Action_make_not_entrant);
5262       assert(stopped(), "Should be stopped");
5263     }
5264   }
5265 
5266   arraycopy_move_allocation_here(alloc, dest, saved_jvms, saved_reexecute_sp, new_idx);
5267 
5268   if (stopped()) {
5269     return true;
5270   }
5271 
5272   Node* new_src = access_resolve(src, ACCESS_READ);
5273   Node* new_dest = access_resolve(dest, ACCESS_WRITE);




 195     TypeArray,
 196     ValueArray
 197   };
 198 
 199   Node* generate_array_guard(Node* kls, RegionNode* region) {
 200     return generate_array_guard_common(kls, region, AnyArray);
 201   }
 202   Node* generate_non_array_guard(Node* kls, RegionNode* region) {
 203     return generate_array_guard_common(kls, region, NonArray);
 204   }
 205   Node* generate_objArray_guard(Node* kls, RegionNode* region) {
 206     return generate_array_guard_common(kls, region, ObjectArray);
 207   }
 208   Node* generate_non_objArray_guard(Node* kls, RegionNode* region) {
 209     return generate_array_guard_common(kls, region, NonObjectArray);
 210   }
 211   Node* generate_typeArray_guard(Node* kls, RegionNode* region) {
 212     return generate_array_guard_common(kls, region, TypeArray);
 213   }
 214   Node* generate_valueArray_guard(Node* kls, RegionNode* region) {
 215     assert(ValueArrayFlatten, "can never be flattened");
 216     return generate_array_guard_common(kls, region, ValueArray);
 217   }
 218   Node* generate_array_guard_common(Node* kls, RegionNode* region, ArrayKind kind);
 219   Node* generate_virtual_guard(Node* obj_klass, RegionNode* slow_region);
 220   CallJavaNode* generate_method_call(vmIntrinsics::ID method_id,
 221                                      bool is_virtual = false, bool is_static = false);
 222   CallJavaNode* generate_method_call_static(vmIntrinsics::ID method_id) {
 223     return generate_method_call(method_id, false, true);
 224   }
 225   CallJavaNode* generate_method_call_virtual(vmIntrinsics::ID method_id) {
 226     return generate_method_call(method_id, true, false);
 227   }
 228   Node * load_field_from_object(Node * fromObj, const char * fieldName, const char * fieldTypeString, bool is_exact, bool is_static, ciInstanceKlass * fromKls);
 229   Node * field_address_from_object(Node * fromObj, const char * fieldName, const char * fieldTypeString, bool is_exact, bool is_static, ciInstanceKlass * fromKls);
 230 
 231   Node* make_string_method_node(int opcode, Node* str1_start, Node* cnt1, Node* str2_start, Node* cnt2, StrIntrinsicNode::ArgEnc ae);
 232   bool inline_string_compareTo(StrIntrinsicNode::ArgEnc ae);
 233   bool inline_string_indexOf(StrIntrinsicNode::ArgEnc ae);
 234   bool inline_string_indexOfI(StrIntrinsicNode::ArgEnc ae);
 235   Node* make_indexOf_node(Node* src_start, Node* src_count, Node* tgt_start, Node* tgt_count,


4019     // loads/stores but it is legal only if we're sure the
4020     // Arrays.copyOf would succeed. So we need all input arguments
4021     // to the copyOf to be validated, including that the copy to the
4022     // new array won't trigger an ArrayStoreException. That subtype
4023     // check can be optimized if we know something on the type of
4024     // the input array from type speculation.
4025     if (_gvn.type(klass_node)->singleton() && !stopped()) {
4026       ciKlass* subk   = _gvn.type(original_kls)->is_klassptr()->klass();
4027       ciKlass* superk = _gvn.type(klass_node)->is_klassptr()->klass();
4028 
4029       int test = C->static_subtype_check(superk, subk);
4030       if (test != Compile::SSC_always_true && test != Compile::SSC_always_false) {
4031         const TypeOopPtr* t_original = _gvn.type(original)->is_oopptr();
4032         if (t_original->speculative_type() != NULL) {
4033           original = maybe_cast_profiled_obj(original, t_original->speculative_type(), true);
4034           original_kls = load_object_klass(original);
4035         }
4036       }
4037     }
4038 
4039     if (ValueArrayFlatten) {
4040       // Either both or neither new array klass and original array
4041       // klass must be flattened
4042       Node* is_flat = generate_valueArray_guard(klass_node, NULL);
4043       if (!original_t->is_not_flat()) {
4044         generate_valueArray_guard(original_kls, bailout);
4045       }
4046       if (is_flat != NULL) {
4047         RegionNode* r = new RegionNode(2);
4048         record_for_igvn(r);
4049         r->init_req(1, control());
4050         set_control(is_flat);
4051         if (!original_t->is_not_flat()) {
4052           generate_valueArray_guard(original_kls, r);
4053         }
4054         bailout->add_req(control());
4055         set_control(_gvn.transform(r));
4056       }
4057     }
4058 
4059     // Bail out if either start or end is negative.
4060     generate_negative_guard(start, bailout, &start);
4061     generate_negative_guard(end,   bailout, &end);
4062 
4063     Node* length = end;
4064     if (_gvn.type(start) != TypeInt::ZERO) {
4065       length = _gvn.transform(new SubINode(end, start));
4066     }
4067 
4068     // Bail out if length is negative.
4069     // Without this the new_array would throw
4070     // NegativeArraySizeException but IllegalArgumentException is what
4071     // should be thrown
4072     generate_negative_guard(length, bailout, &length);
4073 


4745       PATH_LIMIT
4746     };
4747     RegionNode* result_reg = new RegionNode(PATH_LIMIT);
4748     result_val             = new PhiNode(result_reg, TypeInstPtr::NOTNULL);
4749     PhiNode*    result_i_o = new PhiNode(result_reg, Type::ABIO);
4750     PhiNode*    result_mem = new PhiNode(result_reg, Type::MEMORY, TypePtr::BOTTOM);
4751     record_for_igvn(result_reg);
4752 
4753     // We only go to the fast case code if we pass a number of guards.
4754     // The paths which do not pass are accumulated in the slow_region.
4755     RegionNode* slow_region = new RegionNode(1);
4756     record_for_igvn(slow_region);
4757 
4758     Node* array_ctl = generate_array_guard(obj_klass, (RegionNode*)NULL);
4759     if (array_ctl != NULL) {
4760       // It's an array.
4761       PreserveJVMState pjvms(this);
4762       set_control(array_ctl);
4763 
4764       BarrierSetC2* bs = BarrierSet::barrier_set()->barrier_set_c2();
4765       if (bs->array_copy_requires_gc_barriers(true, T_OBJECT, true, BarrierSetC2::Parsing) &&
4766           (!obj_type->isa_aryptr() || !obj_type->is_aryptr()->is_not_flat())) {
4767         // Flattened value type array may have object field that would require a
4768         // write barrier. Conservatively, go to slow path.
4769         generate_valueArray_guard(obj_klass, slow_region);
4770       }
4771 
4772       if (!stopped()) {
4773         Node* obj_length = load_array_length(obj);
4774         Node* obj_size  = NULL;
4775         // Load element mirror
4776         Node* array_type_mirror = load_mirror_from_klass(obj_klass);
4777         Node* p = basic_plus_adr(array_type_mirror, java_lang_Class::component_mirror_offset_in_bytes());
4778         Node* elem_mirror = access_load_at(array_type_mirror, p, _gvn.type(p)->is_ptr(), TypeInstPtr::MIRROR, T_OBJECT, IN_HEAP);
4779 
4780         Node* alloc_obj = new_array(obj_klass, obj_length, 0, &obj_size, false, elem_mirror);
4781 
4782         BarrierSetC2* bs = BarrierSet::barrier_set()->barrier_set_c2();
4783         if (bs->array_copy_requires_gc_barriers(true, T_OBJECT, true, BarrierSetC2::Parsing)) {
4784           // If it is an oop array, it requires very special treatment,
4785           // because gc barriers are required when accessing the array.
4786           Node* is_obja = generate_objArray_guard(obj_klass, (RegionNode*)NULL);


5231 
5232     // (9) each element of an oop array must be assignable
5233     Node* src_klass  = load_object_klass(src);
5234     Node* dest_klass = load_object_klass(dest);
5235     Node* not_subtype_ctrl = gen_subtype_check(src_klass, dest_klass);
5236 
5237     if (not_subtype_ctrl != top()) {
5238       PreserveJVMState pjvms(this);
5239       set_control(not_subtype_ctrl);
5240       uncommon_trap(Deoptimization::Reason_intrinsic,
5241                     Deoptimization::Action_make_not_entrant);
5242       assert(stopped(), "Should be stopped");
5243     }
5244 
5245     const TypeKlassPtr* dest_klass_t = _gvn.type(dest_klass)->is_klassptr();
5246     const Type* toop = TypeOopPtr::make_from_klass(dest_klass_t->klass());
5247     src = _gvn.transform(new CheckCastPPNode(control(), src, toop));
5248     src_type = _gvn.type(src);
5249     top_src  = src_type->isa_aryptr();
5250 
5251     if (top_dest != NULL && !top_dest->elem()->isa_valuetype() && !top_dest->is_not_flat()) {


5252       generate_valueArray_guard(dest_klass, slow_region);
5253     }
5254 
5255     if (top_src != NULL && !top_src->elem()->isa_valuetype() && !top_src->is_not_flat()) {


5256       generate_valueArray_guard(src_klass, slow_region);
5257     }
5258 
5259     {
5260       PreserveJVMState pjvms(this);
5261       set_control(_gvn.transform(slow_region));
5262       uncommon_trap(Deoptimization::Reason_intrinsic,
5263                     Deoptimization::Action_make_not_entrant);
5264       assert(stopped(), "Should be stopped");
5265     }
5266   }
5267 
5268   arraycopy_move_allocation_here(alloc, dest, saved_jvms, saved_reexecute_sp, new_idx);
5269 
5270   if (stopped()) {
5271     return true;
5272   }
5273 
5274   Node* new_src = access_resolve(src, ACCESS_READ);
5275   Node* new_dest = access_resolve(dest, ACCESS_WRITE);


< prev index next >