< prev index next >

hotspot/src/share/vm/opto/library_call.cpp

Print this page
rev 6883 : 8057622: java/util/stream/test/org/openjdk/tests/java/util/stream/InfiniteStreamWithLimitOpTest: SEGV inside compiled code (sparc)
Summary: In Parse::array_store_check(), add control edge FROM IfTrue branch of runtime type check of the destination array TO loading _element_klass from destination array.
Reviewed-by: kvn, roland, anoll
Contributed-by: Zoltan Majo <zoltan.majo@oracle.com>


3381 Node* LibraryCallKit::load_mirror_from_klass(Node* klass) {
3382   Node* p = basic_plus_adr(klass, in_bytes(Klass::java_mirror_offset()));
3383   return make_load(NULL, p, TypeInstPtr::MIRROR, T_OBJECT, MemNode::unordered);
3384 }
3385 
3386 //-----------------------load_klass_from_mirror_common-------------------------
3387 // Given a java mirror (a java.lang.Class oop), load its corresponding klass oop.
3388 // Test the klass oop for null (signifying a primitive Class like Integer.TYPE),
3389 // and branch to the given path on the region.
3390 // If never_see_null, take an uncommon trap on null, so we can optimistically
3391 // compile for the non-null case.
3392 // If the region is NULL, force never_see_null = true.
3393 Node* LibraryCallKit::load_klass_from_mirror_common(Node* mirror,
3394                                                     bool never_see_null,
3395                                                     RegionNode* region,
3396                                                     int null_path,
3397                                                     int offset) {
3398   if (region == NULL)  never_see_null = true;
3399   Node* p = basic_plus_adr(mirror, offset);
3400   const TypeKlassPtr*  kls_type = TypeKlassPtr::OBJECT_OR_NULL;
3401   Node* kls = _gvn.transform( LoadKlassNode::make(_gvn, immutable_memory(), p, TypeRawPtr::BOTTOM, kls_type));
3402   Node* null_ctl = top();
3403   kls = null_check_oop(kls, &null_ctl, never_see_null);
3404   if (region != NULL) {
3405     // Set region->in(null_path) if the mirror is a primitive (e.g, int.class).
3406     region->init_req(null_path, null_ctl);
3407   } else {
3408     assert(null_ctl == top(), "no loose ends");
3409   }
3410   return kls;
3411 }
3412 
3413 //--------------------(inline_native_Class_query helpers)---------------------
3414 // Use this for JVM_ACC_INTERFACE, JVM_ACC_IS_CLONEABLE, JVM_ACC_HAS_FINALIZER.
3415 // Fall through if (mods & mask) == bits, take the guard otherwise.
3416 Node* LibraryCallKit::generate_access_flags_guard(Node* kls, int modifier_mask, int modifier_bits, RegionNode* region) {
3417   // Branch around if the given klass has the given modifier bit set.
3418   // Like generate_guard, adds a new path onto the region.
3419   Node* modp = basic_plus_adr(kls, in_bytes(Klass::access_flags_offset()));
3420   Node* mods = make_load(NULL, modp, TypeInt::INT, T_INT, MemNode::unordered);
3421   Node* mask = intcon(modifier_mask);


3557 
3558   case vmIntrinsics::_isPrimitive:
3559     query_value = intcon(0); // "normal" path produces false
3560     break;
3561 
3562   case vmIntrinsics::_getSuperclass:
3563     // The rules here are somewhat unfortunate, but we can still do better
3564     // with random logic than with a JNI call.
3565     // Interfaces store null or Object as _super, but must report null.
3566     // Arrays store an intermediate super as _super, but must report Object.
3567     // Other types can report the actual _super.
3568     // (To verify this code sequence, check the asserts in JVM_IsInterface.)
3569     if (generate_interface_guard(kls, region) != NULL)
3570       // A guard was added.  If the guard is taken, it was an interface.
3571       phi->add_req(null());
3572     if (generate_array_guard(kls, region) != NULL)
3573       // A guard was added.  If the guard is taken, it was an array.
3574       phi->add_req(makecon(TypeInstPtr::make(env()->Object_klass()->java_mirror())));
3575     // If we fall through, it's a plain class.  Get its _super.
3576     p = basic_plus_adr(kls, in_bytes(Klass::super_offset()));
3577     kls = _gvn.transform( LoadKlassNode::make(_gvn, immutable_memory(), p, TypeRawPtr::BOTTOM, TypeKlassPtr::OBJECT_OR_NULL));
3578     null_ctl = top();
3579     kls = null_check_oop(kls, &null_ctl);
3580     if (null_ctl != top()) {
3581       // If the guard is taken, Object.superClass is null (both klass and mirror).
3582       region->add_req(null_ctl);
3583       phi   ->add_req(null());
3584     }
3585     if (!stopped()) {
3586       query_value = load_mirror_from_klass(kls);
3587     }
3588     break;
3589 
3590   case vmIntrinsics::_getComponentType:
3591     if (generate_array_guard(kls, region) != NULL) {
3592       // Be sure to pin the oop load to the guard edge just created:
3593       Node* is_array_ctrl = region->in(region->req()-1);
3594       Node* cma = basic_plus_adr(kls, in_bytes(ArrayKlass::component_mirror_offset()));
3595       Node* cmo = make_load(is_array_ctrl, cma, TypeInstPtr::MIRROR, T_OBJECT, MemNode::unordered);
3596       phi->add_req(cmo);
3597     }


3639     PATH_LIMIT
3640   };
3641 
3642   RegionNode* region = new (C) RegionNode(PATH_LIMIT);
3643   Node*       phi    = new (C) PhiNode(region, TypeInt::BOOL);
3644   record_for_igvn(region);
3645 
3646   const TypePtr* adr_type = TypeRawPtr::BOTTOM;   // memory type of loads
3647   const TypeKlassPtr* kls_type = TypeKlassPtr::OBJECT_OR_NULL;
3648   int class_klass_offset = java_lang_Class::klass_offset_in_bytes();
3649 
3650   // First null-check both mirrors and load each mirror's klass metaobject.
3651   int which_arg;
3652   for (which_arg = 0; which_arg <= 1; which_arg++) {
3653     Node* arg = args[which_arg];
3654     arg = null_check(arg);
3655     if (stopped())  break;
3656     args[which_arg] = arg;
3657 
3658     Node* p = basic_plus_adr(arg, class_klass_offset);
3659     Node* kls = LoadKlassNode::make(_gvn, immutable_memory(), p, adr_type, kls_type);
3660     klasses[which_arg] = _gvn.transform(kls);
3661   }
3662 
3663   // Having loaded both klasses, test each for null.
3664   bool never_see_null = !too_many_traps(Deoptimization::Reason_null_check);
3665   for (which_arg = 0; which_arg <= 1; which_arg++) {
3666     Node* kls = klasses[which_arg];
3667     Node* null_ctl = top();
3668     kls = null_check_oop(kls, &null_ctl, never_see_null);
3669     int prim_path = (which_arg == 0 ? _prim_0_path : _prim_1_path);
3670     region->init_req(prim_path, null_ctl);
3671     if (stopped())  break;
3672     klasses[which_arg] = kls;
3673   }
3674 
3675   if (!stopped()) {
3676     // now we have two reference types, in klasses[0..1]
3677     Node* subk   = klasses[1];  // the argument to isAssignableFrom
3678     Node* superk = klasses[0];  // the receiver
3679     region->set_req(_both_ref_path, gen_subtype_check(subk, superk));


5155 
5156     // Generate the subtype check.
5157     // This might fold up statically, or then again it might not.
5158     //
5159     // Non-static example:  Copying List<String>.elements to a new String[].
5160     // The backing store for a List<String> is always an Object[],
5161     // but its elements are always type String, if the generic types
5162     // are correct at the source level.
5163     //
5164     // Test S[] against D[], not S against D, because (probably)
5165     // the secondary supertype cache is less busy for S[] than S.
5166     // This usually only matters when D is an interface.
5167     Node* not_subtype_ctrl = gen_subtype_check(src_klass, dest_klass);
5168     // Plug failing path into checked_oop_disjoint_arraycopy
5169     if (not_subtype_ctrl != top()) {
5170       PreserveJVMState pjvms(this);
5171       set_control(not_subtype_ctrl);
5172       // (At this point we can assume disjoint_bases, since types differ.)
5173       int ek_offset = in_bytes(ObjArrayKlass::element_klass_offset());
5174       Node* p1 = basic_plus_adr(dest_klass, ek_offset);
5175       Node* n1 = LoadKlassNode::make(_gvn, immutable_memory(), p1, TypeRawPtr::BOTTOM);
5176       Node* dest_elem_klass = _gvn.transform(n1);
5177       Node* cv = generate_checkcast_arraycopy(adr_type,
5178                                               dest_elem_klass,
5179                                               src, src_offset, dest, dest_offset,
5180                                               ConvI2X(copy_length), dest_uninitialized);
5181       if (cv == NULL)  cv = intcon(-1);  // failure (no stub available)
5182       checked_control = control();
5183       checked_i_o     = i_o();
5184       checked_mem     = memory(adr_type);
5185       checked_value   = cv;
5186     }
5187     // At this point we know we do not need type checks on oop stores.
5188 
5189     // Let's see if we need card marks:
5190     if (alloc != NULL && use_ReduceInitialCardMarks()) {
5191       // If we do not need card marks, copy using the jint or jlong stub.
5192       copy_type = LP64_ONLY(UseCompressedOops ? T_INT : T_LONG) NOT_LP64(T_INT);
5193       assert(type2aelembytes(basic_elem_type) == type2aelembytes(copy_type),
5194              "sizes agree");
5195     }




3381 Node* LibraryCallKit::load_mirror_from_klass(Node* klass) {
3382   Node* p = basic_plus_adr(klass, in_bytes(Klass::java_mirror_offset()));
3383   return make_load(NULL, p, TypeInstPtr::MIRROR, T_OBJECT, MemNode::unordered);
3384 }
3385 
3386 //-----------------------load_klass_from_mirror_common-------------------------
3387 // Given a java mirror (a java.lang.Class oop), load its corresponding klass oop.
3388 // Test the klass oop for null (signifying a primitive Class like Integer.TYPE),
3389 // and branch to the given path on the region.
3390 // If never_see_null, take an uncommon trap on null, so we can optimistically
3391 // compile for the non-null case.
3392 // If the region is NULL, force never_see_null = true.
3393 Node* LibraryCallKit::load_klass_from_mirror_common(Node* mirror,
3394                                                     bool never_see_null,
3395                                                     RegionNode* region,
3396                                                     int null_path,
3397                                                     int offset) {
3398   if (region == NULL)  never_see_null = true;
3399   Node* p = basic_plus_adr(mirror, offset);
3400   const TypeKlassPtr*  kls_type = TypeKlassPtr::OBJECT_OR_NULL;
3401   Node* kls = _gvn.transform(LoadKlassNode::make(_gvn, NULL, immutable_memory(), p, TypeRawPtr::BOTTOM, kls_type));
3402   Node* null_ctl = top();
3403   kls = null_check_oop(kls, &null_ctl, never_see_null);
3404   if (region != NULL) {
3405     // Set region->in(null_path) if the mirror is a primitive (e.g, int.class).
3406     region->init_req(null_path, null_ctl);
3407   } else {
3408     assert(null_ctl == top(), "no loose ends");
3409   }
3410   return kls;
3411 }
3412 
3413 //--------------------(inline_native_Class_query helpers)---------------------
3414 // Use this for JVM_ACC_INTERFACE, JVM_ACC_IS_CLONEABLE, JVM_ACC_HAS_FINALIZER.
3415 // Fall through if (mods & mask) == bits, take the guard otherwise.
3416 Node* LibraryCallKit::generate_access_flags_guard(Node* kls, int modifier_mask, int modifier_bits, RegionNode* region) {
3417   // Branch around if the given klass has the given modifier bit set.
3418   // Like generate_guard, adds a new path onto the region.
3419   Node* modp = basic_plus_adr(kls, in_bytes(Klass::access_flags_offset()));
3420   Node* mods = make_load(NULL, modp, TypeInt::INT, T_INT, MemNode::unordered);
3421   Node* mask = intcon(modifier_mask);


3557 
3558   case vmIntrinsics::_isPrimitive:
3559     query_value = intcon(0); // "normal" path produces false
3560     break;
3561 
3562   case vmIntrinsics::_getSuperclass:
3563     // The rules here are somewhat unfortunate, but we can still do better
3564     // with random logic than with a JNI call.
3565     // Interfaces store null or Object as _super, but must report null.
3566     // Arrays store an intermediate super as _super, but must report Object.
3567     // Other types can report the actual _super.
3568     // (To verify this code sequence, check the asserts in JVM_IsInterface.)
3569     if (generate_interface_guard(kls, region) != NULL)
3570       // A guard was added.  If the guard is taken, it was an interface.
3571       phi->add_req(null());
3572     if (generate_array_guard(kls, region) != NULL)
3573       // A guard was added.  If the guard is taken, it was an array.
3574       phi->add_req(makecon(TypeInstPtr::make(env()->Object_klass()->java_mirror())));
3575     // If we fall through, it's a plain class.  Get its _super.
3576     p = basic_plus_adr(kls, in_bytes(Klass::super_offset()));
3577     kls = _gvn.transform(LoadKlassNode::make(_gvn, NULL, immutable_memory(), p, TypeRawPtr::BOTTOM, TypeKlassPtr::OBJECT_OR_NULL));
3578     null_ctl = top();
3579     kls = null_check_oop(kls, &null_ctl);
3580     if (null_ctl != top()) {
3581       // If the guard is taken, Object.superClass is null (both klass and mirror).
3582       region->add_req(null_ctl);
3583       phi   ->add_req(null());
3584     }
3585     if (!stopped()) {
3586       query_value = load_mirror_from_klass(kls);
3587     }
3588     break;
3589 
3590   case vmIntrinsics::_getComponentType:
3591     if (generate_array_guard(kls, region) != NULL) {
3592       // Be sure to pin the oop load to the guard edge just created:
3593       Node* is_array_ctrl = region->in(region->req()-1);
3594       Node* cma = basic_plus_adr(kls, in_bytes(ArrayKlass::component_mirror_offset()));
3595       Node* cmo = make_load(is_array_ctrl, cma, TypeInstPtr::MIRROR, T_OBJECT, MemNode::unordered);
3596       phi->add_req(cmo);
3597     }


3639     PATH_LIMIT
3640   };
3641 
3642   RegionNode* region = new (C) RegionNode(PATH_LIMIT);
3643   Node*       phi    = new (C) PhiNode(region, TypeInt::BOOL);
3644   record_for_igvn(region);
3645 
3646   const TypePtr* adr_type = TypeRawPtr::BOTTOM;   // memory type of loads
3647   const TypeKlassPtr* kls_type = TypeKlassPtr::OBJECT_OR_NULL;
3648   int class_klass_offset = java_lang_Class::klass_offset_in_bytes();
3649 
3650   // First null-check both mirrors and load each mirror's klass metaobject.
3651   int which_arg;
3652   for (which_arg = 0; which_arg <= 1; which_arg++) {
3653     Node* arg = args[which_arg];
3654     arg = null_check(arg);
3655     if (stopped())  break;
3656     args[which_arg] = arg;
3657 
3658     Node* p = basic_plus_adr(arg, class_klass_offset);
3659     Node* kls = LoadKlassNode::make(_gvn, NULL, immutable_memory(), p, adr_type, kls_type);
3660     klasses[which_arg] = _gvn.transform(kls);
3661   }
3662 
3663   // Having loaded both klasses, test each for null.
3664   bool never_see_null = !too_many_traps(Deoptimization::Reason_null_check);
3665   for (which_arg = 0; which_arg <= 1; which_arg++) {
3666     Node* kls = klasses[which_arg];
3667     Node* null_ctl = top();
3668     kls = null_check_oop(kls, &null_ctl, never_see_null);
3669     int prim_path = (which_arg == 0 ? _prim_0_path : _prim_1_path);
3670     region->init_req(prim_path, null_ctl);
3671     if (stopped())  break;
3672     klasses[which_arg] = kls;
3673   }
3674 
3675   if (!stopped()) {
3676     // now we have two reference types, in klasses[0..1]
3677     Node* subk   = klasses[1];  // the argument to isAssignableFrom
3678     Node* superk = klasses[0];  // the receiver
3679     region->set_req(_both_ref_path, gen_subtype_check(subk, superk));


5155 
5156     // Generate the subtype check.
5157     // This might fold up statically, or then again it might not.
5158     //
5159     // Non-static example:  Copying List<String>.elements to a new String[].
5160     // The backing store for a List<String> is always an Object[],
5161     // but its elements are always type String, if the generic types
5162     // are correct at the source level.
5163     //
5164     // Test S[] against D[], not S against D, because (probably)
5165     // the secondary supertype cache is less busy for S[] than S.
5166     // This usually only matters when D is an interface.
5167     Node* not_subtype_ctrl = gen_subtype_check(src_klass, dest_klass);
5168     // Plug failing path into checked_oop_disjoint_arraycopy
5169     if (not_subtype_ctrl != top()) {
5170       PreserveJVMState pjvms(this);
5171       set_control(not_subtype_ctrl);
5172       // (At this point we can assume disjoint_bases, since types differ.)
5173       int ek_offset = in_bytes(ObjArrayKlass::element_klass_offset());
5174       Node* p1 = basic_plus_adr(dest_klass, ek_offset);
5175       Node* n1 = LoadKlassNode::make(_gvn, NULL, immutable_memory(), p1, TypeRawPtr::BOTTOM);
5176       Node* dest_elem_klass = _gvn.transform(n1);
5177       Node* cv = generate_checkcast_arraycopy(adr_type,
5178                                               dest_elem_klass,
5179                                               src, src_offset, dest, dest_offset,
5180                                               ConvI2X(copy_length), dest_uninitialized);
5181       if (cv == NULL)  cv = intcon(-1);  // failure (no stub available)
5182       checked_control = control();
5183       checked_i_o     = i_o();
5184       checked_mem     = memory(adr_type);
5185       checked_value   = cv;
5186     }
5187     // At this point we know we do not need type checks on oop stores.
5188 
5189     // Let's see if we need card marks:
5190     if (alloc != NULL && use_ReduceInitialCardMarks()) {
5191       // If we do not need card marks, copy using the jint or jlong stub.
5192       copy_type = LP64_ONLY(UseCompressedOops ? T_INT : T_LONG) NOT_LP64(T_INT);
5193       assert(type2aelembytes(basic_elem_type) == type2aelembytes(copy_type),
5194              "sizes agree");
5195     }


< prev index next >