src/share/vm/opto/library_call.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File 7118863 Sdiff src/share/vm/opto

src/share/vm/opto/library_call.cpp

Print this page




2148     }
2149   }
2150 
2151   // The compile time filters did not reject base_oop/offset so
2152   // we need to generate the following runtime filters
2153   //
2154   // if (offset == java_lang_ref_Reference::_reference_offset) {
2155   //   if (base != null) {
2156   //     if (klass(base)->reference_type() != REF_NONE)) {
2157   //       pre_barrier(_, pre_val, ...);
2158   //     }
2159   //   }
2160   // }
2161 
2162   float likely  = PROB_LIKELY(0.999);
2163   float unlikely  = PROB_UNLIKELY(0.999);
2164 
2165   IdealKit ideal(this);
2166 #define __ ideal.
2167 
2168   const int reference_type_offset = instanceKlass::reference_type_offset_in_bytes() +
2169                                         sizeof(oopDesc);
2170 
2171   Node* referent_off = __ ConX(java_lang_ref_Reference::referent_offset);
2172 
2173   __ if_then(offset, BoolTest::eq, referent_off, unlikely); {
2174     __ if_then(base_oop, BoolTest::ne, null(), likely); {
2175 
2176       // Update graphKit memory and control from IdealKit.
2177       sync_kit(ideal);
2178 
2179       Node* ref_klass_con = makecon(TypeKlassPtr::make(env()->Reference_klass()));
2180       Node* is_instof = gen_instanceof(base_oop, ref_klass_con);
2181 
2182       // Update IdealKit memory and control from graphKit.
2183       __ sync_kit(this);
2184 
2185       Node* one = __ ConI(1);
2186 
2187       __ if_then(is_instof, BoolTest::eq, one, unlikely); {
2188 
2189         // Update graphKit from IdeakKit.


2789 
2790 bool LibraryCallKit::inline_unsafe_allocate() {
2791   if (callee()->is_static())  return false;  // caller must have the capability!
2792   int nargs = 1 + 1;
2793   assert(signature()->size() == nargs-1, "alloc has 1 argument");
2794   null_check_receiver(callee());  // check then ignore argument(0)
2795   _sp += nargs;  // set original stack for use by uncommon_trap
2796   Node* cls = do_null_check(argument(1), T_OBJECT);
2797   _sp -= nargs;
2798   if (stopped())  return true;
2799 
2800   Node* kls = load_klass_from_mirror(cls, false, nargs, NULL, 0);
2801   _sp += nargs;  // set original stack for use by uncommon_trap
2802   kls = do_null_check(kls, T_OBJECT);
2803   _sp -= nargs;
2804   if (stopped())  return true;  // argument was like int.class
2805 
2806   // Note:  The argument might still be an illegal value like
2807   // Serializable.class or Object[].class.   The runtime will handle it.
2808   // But we must make an explicit check for initialization.
2809   Node* insp = basic_plus_adr(kls, instanceKlass::init_state_offset_in_bytes() + sizeof(oopDesc));
2810   Node* inst = make_load(NULL, insp, TypeInt::INT, T_INT);
2811   Node* bits = intcon(instanceKlass::fully_initialized);
2812   Node* test = _gvn.transform( new (C, 3) SubINode(inst, bits) );
2813   // The 'test' is non-zero if we need to take a slow path.
2814 
2815   Node* obj = new_instance(kls, test);
2816   push(obj);
2817 
2818   return true;
2819 }
2820 
2821 //------------------------inline_native_time_funcs--------------
2822 // inline code for System.currentTimeMillis() and System.nanoTime()
2823 // these have the same type and signature
2824 bool LibraryCallKit::inline_native_time_funcs(bool isNano) {
2825   address funcAddr = isNano ? CAST_FROM_FN_PTR(address, os::javaTimeNanos) :
2826                               CAST_FROM_FN_PTR(address, os::javaTimeMillis);
2827   const char * funcName = isNano ? "nanoTime" : "currentTimeMillis";
2828   const TypeFunc *tf = OptoRuntime::current_time_millis_Type();
2829   const TypePtr* no_memory_effects = NULL;


2937     Node* mem_phi  = PhiNode::make(result_rgn, fast_mem, Type::MEMORY, TypePtr::BOTTOM);
2938 
2939     result_rgn->init_req(slow_result_path, control());
2940     io_phi    ->init_req(slow_result_path, i_o());
2941     mem_phi   ->init_req(slow_result_path, reset_memory());
2942     result_val->init_req(slow_result_path, slow_val);
2943 
2944     set_all_memory( _gvn.transform(mem_phi) );
2945     set_i_o(        _gvn.transform(io_phi) );
2946   }
2947 
2948   push_result(result_rgn, result_val);
2949   C->set_has_split_ifs(true); // Has chance for split-if optimization
2950 
2951   return true;
2952 }
2953 
2954 //---------------------------load_mirror_from_klass----------------------------
2955 // Given a klass oop, load its java mirror (a java.lang.Class oop).
2956 Node* LibraryCallKit::load_mirror_from_klass(Node* klass) {
2957   Node* p = basic_plus_adr(klass, Klass::java_mirror_offset_in_bytes() + sizeof(oopDesc));
2958   return make_load(NULL, p, TypeInstPtr::MIRROR, T_OBJECT);
2959 }
2960 
2961 //-----------------------load_klass_from_mirror_common-------------------------
2962 // Given a java mirror (a java.lang.Class oop), load its corresponding klass oop.
2963 // Test the klass oop for null (signifying a primitive Class like Integer.TYPE),
2964 // and branch to the given path on the region.
2965 // If never_see_null, take an uncommon trap on null, so we can optimistically
2966 // compile for the non-null case.
2967 // If the region is NULL, force never_see_null = true.
2968 Node* LibraryCallKit::load_klass_from_mirror_common(Node* mirror,
2969                                                     bool never_see_null,
2970                                                     int nargs,
2971                                                     RegionNode* region,
2972                                                     int null_path,
2973                                                     int offset) {
2974   if (region == NULL)  never_see_null = true;
2975   Node* p = basic_plus_adr(mirror, offset);
2976   const TypeKlassPtr*  kls_type = TypeKlassPtr::OBJECT_OR_NULL;
2977   Node* kls = _gvn.transform( LoadKlassNode::make(_gvn, immutable_memory(), p, TypeRawPtr::BOTTOM, kls_type) );
2978   _sp += nargs; // any deopt will start just before call to enclosing method
2979   Node* null_ctl = top();
2980   kls = null_check_oop(kls, &null_ctl, never_see_null);
2981   if (region != NULL) {
2982     // Set region->in(null_path) if the mirror is a primitive (e.g, int.class).
2983     region->init_req(null_path, null_ctl);
2984   } else {
2985     assert(null_ctl == top(), "no loose ends");
2986   }
2987   _sp -= nargs;
2988   return kls;
2989 }
2990 
2991 //--------------------(inline_native_Class_query helpers)---------------------
2992 // Use this for JVM_ACC_INTERFACE, JVM_ACC_IS_CLONEABLE, JVM_ACC_HAS_FINALIZER.
2993 // Fall through if (mods & mask) == bits, take the guard otherwise.
2994 Node* LibraryCallKit::generate_access_flags_guard(Node* kls, int modifier_mask, int modifier_bits, RegionNode* region) {
2995   // Branch around if the given klass has the given modifier bit set.
2996   // Like generate_guard, adds a new path onto the region.
2997   Node* modp = basic_plus_adr(kls, Klass::access_flags_offset_in_bytes() + sizeof(oopDesc));
2998   Node* mods = make_load(NULL, modp, TypeInt::INT, T_INT);
2999   Node* mask = intcon(modifier_mask);
3000   Node* bits = intcon(modifier_bits);
3001   Node* mbit = _gvn.transform( new (C, 3) AndINode(mods, mask) );
3002   Node* cmp  = _gvn.transform( new (C, 3) CmpINode(mbit, bits) );
3003   Node* bol  = _gvn.transform( new (C, 2) BoolNode(cmp, BoolTest::ne) );
3004   return generate_fair_guard(bol, region);
3005 }
3006 Node* LibraryCallKit::generate_interface_guard(Node* kls, RegionNode* region) {
3007   return generate_access_flags_guard(kls, JVM_ACC_INTERFACE, 0, region);
3008 }
3009 
3010 //-------------------------inline_native_Class_query-------------------
3011 bool LibraryCallKit::inline_native_Class_query(vmIntrinsics::ID id) {
3012   int nargs = 1+0;  // just the Class mirror, in most cases
3013   const Type* return_type = TypeInt::BOOL;
3014   Node* prim_return_value = top();  // what happens if it's a primitive class?
3015   bool never_see_null = !too_many_traps(Deoptimization::Reason_null_check);
3016   bool expect_prim = false;     // most of these guys expect to work on refs
3017 


3098                                      region, _prim_path);
3099   // If kls is null, we have a primitive mirror.
3100   phi->init_req(_prim_path, prim_return_value);
3101   if (stopped()) { push_result(region, phi); return true; }
3102 
3103   Node* p;  // handy temp
3104   Node* null_ctl;
3105 
3106   // Now that we have the non-null klass, we can perform the real query.
3107   // For constant classes, the query will constant-fold in LoadNode::Value.
3108   Node* query_value = top();
3109   switch (id) {
3110   case vmIntrinsics::_isInstance:
3111     // nothing is an instance of a primitive type
3112     _sp += nargs;          // gen_instanceof might do an uncommon trap
3113     query_value = gen_instanceof(obj, kls);
3114     _sp -= nargs;
3115     break;
3116 
3117   case vmIntrinsics::_getModifiers:
3118     p = basic_plus_adr(kls, Klass::modifier_flags_offset_in_bytes() + sizeof(oopDesc));
3119     query_value = make_load(NULL, p, TypeInt::INT, T_INT);
3120     break;
3121 
3122   case vmIntrinsics::_isInterface:
3123     // (To verify this code sequence, check the asserts in JVM_IsInterface.)
3124     if (generate_interface_guard(kls, region) != NULL)
3125       // A guard was added.  If the guard is taken, it was an interface.
3126       phi->add_req(intcon(1));
3127     // If we fall through, it's a plain class.
3128     query_value = intcon(0);
3129     break;
3130 
3131   case vmIntrinsics::_isArray:
3132     // (To verify this code sequence, check the asserts in JVM_IsArrayClass.)
3133     if (generate_array_guard(kls, region) != NULL)
3134       // A guard was added.  If the guard is taken, it was an array.
3135       phi->add_req(intcon(1));
3136     // If we fall through, it's a plain class.
3137     query_value = intcon(0);
3138     break;
3139 
3140   case vmIntrinsics::_isPrimitive:
3141     query_value = intcon(0); // "normal" path produces false
3142     break;
3143 
3144   case vmIntrinsics::_getSuperclass:
3145     // The rules here are somewhat unfortunate, but we can still do better
3146     // with random logic than with a JNI call.
3147     // Interfaces store null or Object as _super, but must report null.
3148     // Arrays store an intermediate super as _super, but must report Object.
3149     // Other types can report the actual _super.
3150     // (To verify this code sequence, check the asserts in JVM_IsInterface.)
3151     if (generate_interface_guard(kls, region) != NULL)
3152       // A guard was added.  If the guard is taken, it was an interface.
3153       phi->add_req(null());
3154     if (generate_array_guard(kls, region) != NULL)
3155       // A guard was added.  If the guard is taken, it was an array.
3156       phi->add_req(makecon(TypeInstPtr::make(env()->Object_klass()->java_mirror())));
3157     // If we fall through, it's a plain class.  Get its _super.
3158     p = basic_plus_adr(kls, Klass::super_offset_in_bytes() + sizeof(oopDesc));
3159     kls = _gvn.transform( LoadKlassNode::make(_gvn, immutable_memory(), p, TypeRawPtr::BOTTOM, TypeKlassPtr::OBJECT_OR_NULL) );
3160     null_ctl = top();
3161     kls = null_check_oop(kls, &null_ctl);
3162     if (null_ctl != top()) {
3163       // If the guard is taken, Object.superClass is null (both klass and mirror).
3164       region->add_req(null_ctl);
3165       phi   ->add_req(null());
3166     }
3167     if (!stopped()) {
3168       query_value = load_mirror_from_klass(kls);
3169     }
3170     break;
3171 
3172   case vmIntrinsics::_getComponentType:
3173     if (generate_array_guard(kls, region) != NULL) {
3174       // Be sure to pin the oop load to the guard edge just created:
3175       Node* is_array_ctrl = region->in(region->req()-1);
3176       Node* cma = basic_plus_adr(kls, in_bytes(arrayKlass::component_mirror_offset()) + sizeof(oopDesc));
3177       Node* cmo = make_load(is_array_ctrl, cma, TypeInstPtr::MIRROR, T_OBJECT);
3178       phi->add_req(cmo);
3179     }
3180     query_value = null();  // non-array case is null
3181     break;
3182 
3183   case vmIntrinsics::_getClassAccessFlags:
3184     p = basic_plus_adr(kls, Klass::access_flags_offset_in_bytes() + sizeof(oopDesc));
3185     query_value = make_load(NULL, p, TypeInt::INT, T_INT);
3186     break;
3187 
3188   default:
3189     ShouldNotReachHere();
3190   }
3191 
3192   // Fall-through is the normal case of a query to a real class.
3193   phi->init_req(1, query_value);
3194   region->init_req(1, control());
3195 
3196   push_result(region, phi);
3197   C->set_has_split_ifs(true); // Has chance for split-if optimization
3198 
3199   return true;
3200 }
3201 
3202 //--------------------------inline_native_subtype_check------------------------
3203 // This intrinsic takes the JNI calls out of the heart of
3204 // UnsafeFieldAccessorImpl.set, which improves Field.set, readObject, etc.


4840     Node* src_klass  = load_object_klass(src);
4841     Node* dest_klass = load_object_klass(dest);
4842 
4843     // Generate the subtype check.
4844     // This might fold up statically, or then again it might not.
4845     //
4846     // Non-static example:  Copying List<String>.elements to a new String[].
4847     // The backing store for a List<String> is always an Object[],
4848     // but its elements are always type String, if the generic types
4849     // are correct at the source level.
4850     //
4851     // Test S[] against D[], not S against D, because (probably)
4852     // the secondary supertype cache is less busy for S[] than S.
4853     // This usually only matters when D is an interface.
4854     Node* not_subtype_ctrl = gen_subtype_check(src_klass, dest_klass);
4855     // Plug failing path into checked_oop_disjoint_arraycopy
4856     if (not_subtype_ctrl != top()) {
4857       PreserveJVMState pjvms(this);
4858       set_control(not_subtype_ctrl);
4859       // (At this point we can assume disjoint_bases, since types differ.)
4860       int ek_offset = objArrayKlass::element_klass_offset_in_bytes() + sizeof(oopDesc);
4861       Node* p1 = basic_plus_adr(dest_klass, ek_offset);
4862       Node* n1 = LoadKlassNode::make(_gvn, immutable_memory(), p1, TypeRawPtr::BOTTOM);
4863       Node* dest_elem_klass = _gvn.transform(n1);
4864       Node* cv = generate_checkcast_arraycopy(adr_type,
4865                                               dest_elem_klass,
4866                                               src, src_offset, dest, dest_offset,
4867                                               ConvI2X(copy_length), dest_uninitialized);
4868       if (cv == NULL)  cv = intcon(-1);  // failure (no stub available)
4869       checked_control = control();
4870       checked_i_o     = i_o();
4871       checked_mem     = memory(adr_type);
4872       checked_value   = cv;
4873     }
4874     // At this point we know we do not need type checks on oop stores.
4875 
4876     // Let's see if we need card marks:
4877     if (alloc != NULL && use_ReduceInitialCardMarks()) {
4878       // If we do not need card marks, copy using the jint or jlong stub.
4879       copy_type = LP64_ONLY(UseCompressedOops ? T_INT : T_LONG) NOT_LP64(T_INT);
4880       assert(type2aelembytes(basic_elem_type) == type2aelembytes(copy_type),


5291 }
5292 
5293 // Helper function; generates code for cases requiring runtime checks.
5294 Node*
5295 LibraryCallKit::generate_checkcast_arraycopy(const TypePtr* adr_type,
5296                                              Node* dest_elem_klass,
5297                                              Node* src,  Node* src_offset,
5298                                              Node* dest, Node* dest_offset,
5299                                              Node* copy_length, bool dest_uninitialized) {
5300   if (stopped())  return NULL;
5301 
5302   address copyfunc_addr = StubRoutines::checkcast_arraycopy(dest_uninitialized);
5303   if (copyfunc_addr == NULL) { // Stub was not generated, go slow path.
5304     return NULL;
5305   }
5306 
5307   // Pick out the parameters required to perform a store-check
5308   // for the target array.  This is an optimistic check.  It will
5309   // look in each non-null element's class, at the desired klass's
5310   // super_check_offset, for the desired klass.
5311   int sco_offset = Klass::super_check_offset_offset_in_bytes() + sizeof(oopDesc);
5312   Node* p3 = basic_plus_adr(dest_elem_klass, sco_offset);
5313   Node* n3 = new(C, 3) LoadINode(NULL, memory(p3), p3, _gvn.type(p3)->is_ptr());
5314   Node* check_offset = ConvI2X(_gvn.transform(n3));
5315   Node* check_value  = dest_elem_klass;
5316 
5317   Node* src_start  = array_element_address(src,  src_offset,  T_OBJECT);
5318   Node* dest_start = array_element_address(dest, dest_offset, T_OBJECT);
5319 
5320   // (We know the arrays are never conjoint, because their types differ.)
5321   Node* call = make_runtime_call(RC_LEAF|RC_NO_FP,
5322                                  OptoRuntime::checkcast_arraycopy_Type(),
5323                                  copyfunc_addr, "checkcast_arraycopy", adr_type,
5324                                  // five arguments, of which two are
5325                                  // intptr_t (jlong in LP64)
5326                                  src_start, dest_start,
5327                                  copy_length XTOP,
5328                                  check_offset XTOP,
5329                                  check_value);
5330 
5331   return _gvn.transform(new (C, 1) ProjNode(call, TypeFunc::Parms));




2148     }
2149   }
2150 
2151   // The compile time filters did not reject base_oop/offset so
2152   // we need to generate the following runtime filters
2153   //
2154   // if (offset == java_lang_ref_Reference::_reference_offset) {
2155   //   if (base != null) {
2156   //     if (klass(base)->reference_type() != REF_NONE)) {
2157   //       pre_barrier(_, pre_val, ...);
2158   //     }
2159   //   }
2160   // }
2161 
2162   float likely  = PROB_LIKELY(0.999);
2163   float unlikely  = PROB_UNLIKELY(0.999);
2164 
2165   IdealKit ideal(this);
2166 #define __ ideal.
2167 
2168   const int reference_type_offset = instanceKlass::reference_type_offset_in_bytes();

2169 
2170   Node* referent_off = __ ConX(java_lang_ref_Reference::referent_offset);
2171 
2172   __ if_then(offset, BoolTest::eq, referent_off, unlikely); {
2173     __ if_then(base_oop, BoolTest::ne, null(), likely); {
2174 
2175       // Update graphKit memory and control from IdealKit.
2176       sync_kit(ideal);
2177 
2178       Node* ref_klass_con = makecon(TypeKlassPtr::make(env()->Reference_klass()));
2179       Node* is_instof = gen_instanceof(base_oop, ref_klass_con);
2180 
2181       // Update IdealKit memory and control from graphKit.
2182       __ sync_kit(this);
2183 
2184       Node* one = __ ConI(1);
2185 
2186       __ if_then(is_instof, BoolTest::eq, one, unlikely); {
2187 
2188         // Update graphKit from IdeakKit.


2788 
2789 bool LibraryCallKit::inline_unsafe_allocate() {
2790   if (callee()->is_static())  return false;  // caller must have the capability!
2791   int nargs = 1 + 1;
2792   assert(signature()->size() == nargs-1, "alloc has 1 argument");
2793   null_check_receiver(callee());  // check then ignore argument(0)
2794   _sp += nargs;  // set original stack for use by uncommon_trap
2795   Node* cls = do_null_check(argument(1), T_OBJECT);
2796   _sp -= nargs;
2797   if (stopped())  return true;
2798 
2799   Node* kls = load_klass_from_mirror(cls, false, nargs, NULL, 0);
2800   _sp += nargs;  // set original stack for use by uncommon_trap
2801   kls = do_null_check(kls, T_OBJECT);
2802   _sp -= nargs;
2803   if (stopped())  return true;  // argument was like int.class
2804 
2805   // Note:  The argument might still be an illegal value like
2806   // Serializable.class or Object[].class.   The runtime will handle it.
2807   // But we must make an explicit check for initialization.
2808   Node* insp = basic_plus_adr(kls, instanceKlass::init_state_offset_in_bytes());
2809   Node* inst = make_load(NULL, insp, TypeInt::INT, T_INT);
2810   Node* bits = intcon(instanceKlass::fully_initialized);
2811   Node* test = _gvn.transform( new (C, 3) SubINode(inst, bits) );
2812   // The 'test' is non-zero if we need to take a slow path.
2813 
2814   Node* obj = new_instance(kls, test);
2815   push(obj);
2816 
2817   return true;
2818 }
2819 
2820 //------------------------inline_native_time_funcs--------------
2821 // inline code for System.currentTimeMillis() and System.nanoTime()
2822 // these have the same type and signature
2823 bool LibraryCallKit::inline_native_time_funcs(bool isNano) {
2824   address funcAddr = isNano ? CAST_FROM_FN_PTR(address, os::javaTimeNanos) :
2825                               CAST_FROM_FN_PTR(address, os::javaTimeMillis);
2826   const char * funcName = isNano ? "nanoTime" : "currentTimeMillis";
2827   const TypeFunc *tf = OptoRuntime::current_time_millis_Type();
2828   const TypePtr* no_memory_effects = NULL;


2936     Node* mem_phi  = PhiNode::make(result_rgn, fast_mem, Type::MEMORY, TypePtr::BOTTOM);
2937 
2938     result_rgn->init_req(slow_result_path, control());
2939     io_phi    ->init_req(slow_result_path, i_o());
2940     mem_phi   ->init_req(slow_result_path, reset_memory());
2941     result_val->init_req(slow_result_path, slow_val);
2942 
2943     set_all_memory( _gvn.transform(mem_phi) );
2944     set_i_o(        _gvn.transform(io_phi) );
2945   }
2946 
2947   push_result(result_rgn, result_val);
2948   C->set_has_split_ifs(true); // Has chance for split-if optimization
2949 
2950   return true;
2951 }
2952 
2953 //---------------------------load_mirror_from_klass----------------------------
2954 // Given a klass oop, load its java mirror (a java.lang.Class oop).
2955 Node* LibraryCallKit::load_mirror_from_klass(Node* klass) {
2956   Node* p = basic_plus_adr(klass, Klass::java_mirror_offset_in_bytes());
2957   return make_load(NULL, p, TypeInstPtr::MIRROR, T_OBJECT);
2958 }
2959 
2960 //-----------------------load_klass_from_mirror_common-------------------------
2961 // Given a java mirror (a java.lang.Class oop), load its corresponding klass oop.
2962 // Test the klass oop for null (signifying a primitive Class like Integer.TYPE),
2963 // and branch to the given path on the region.
2964 // If never_see_null, take an uncommon trap on null, so we can optimistically
2965 // compile for the non-null case.
2966 // If the region is NULL, force never_see_null = true.
2967 Node* LibraryCallKit::load_klass_from_mirror_common(Node* mirror,
2968                                                     bool never_see_null,
2969                                                     int nargs,
2970                                                     RegionNode* region,
2971                                                     int null_path,
2972                                                     int offset) {
2973   if (region == NULL)  never_see_null = true;
2974   Node* p = basic_plus_adr(mirror, offset);
2975   const TypeKlassPtr*  kls_type = TypeKlassPtr::OBJECT_OR_NULL;
2976   Node* kls = _gvn.transform( LoadKlassNode::make(_gvn, immutable_memory(), p, TypeRawPtr::BOTTOM, kls_type) );
2977   _sp += nargs; // any deopt will start just before call to enclosing method
2978   Node* null_ctl = top();
2979   kls = null_check_oop(kls, &null_ctl, never_see_null);
2980   if (region != NULL) {
2981     // Set region->in(null_path) if the mirror is a primitive (e.g, int.class).
2982     region->init_req(null_path, null_ctl);
2983   } else {
2984     assert(null_ctl == top(), "no loose ends");
2985   }
2986   _sp -= nargs;
2987   return kls;
2988 }
2989 
2990 //--------------------(inline_native_Class_query helpers)---------------------
2991 // Use this for JVM_ACC_INTERFACE, JVM_ACC_IS_CLONEABLE, JVM_ACC_HAS_FINALIZER.
2992 // Fall through if (mods & mask) == bits, take the guard otherwise.
2993 Node* LibraryCallKit::generate_access_flags_guard(Node* kls, int modifier_mask, int modifier_bits, RegionNode* region) {
2994   // Branch around if the given klass has the given modifier bit set.
2995   // Like generate_guard, adds a new path onto the region.
2996   Node* modp = basic_plus_adr(kls, Klass::access_flags_offset_in_bytes());
2997   Node* mods = make_load(NULL, modp, TypeInt::INT, T_INT);
2998   Node* mask = intcon(modifier_mask);
2999   Node* bits = intcon(modifier_bits);
3000   Node* mbit = _gvn.transform( new (C, 3) AndINode(mods, mask) );
3001   Node* cmp  = _gvn.transform( new (C, 3) CmpINode(mbit, bits) );
3002   Node* bol  = _gvn.transform( new (C, 2) BoolNode(cmp, BoolTest::ne) );
3003   return generate_fair_guard(bol, region);
3004 }
3005 Node* LibraryCallKit::generate_interface_guard(Node* kls, RegionNode* region) {
3006   return generate_access_flags_guard(kls, JVM_ACC_INTERFACE, 0, region);
3007 }
3008 
3009 //-------------------------inline_native_Class_query-------------------
3010 bool LibraryCallKit::inline_native_Class_query(vmIntrinsics::ID id) {
3011   int nargs = 1+0;  // just the Class mirror, in most cases
3012   const Type* return_type = TypeInt::BOOL;
3013   Node* prim_return_value = top();  // what happens if it's a primitive class?
3014   bool never_see_null = !too_many_traps(Deoptimization::Reason_null_check);
3015   bool expect_prim = false;     // most of these guys expect to work on refs
3016 


3097                                      region, _prim_path);
3098   // If kls is null, we have a primitive mirror.
3099   phi->init_req(_prim_path, prim_return_value);
3100   if (stopped()) { push_result(region, phi); return true; }
3101 
3102   Node* p;  // handy temp
3103   Node* null_ctl;
3104 
3105   // Now that we have the non-null klass, we can perform the real query.
3106   // For constant classes, the query will constant-fold in LoadNode::Value.
3107   Node* query_value = top();
3108   switch (id) {
3109   case vmIntrinsics::_isInstance:
3110     // nothing is an instance of a primitive type
3111     _sp += nargs;          // gen_instanceof might do an uncommon trap
3112     query_value = gen_instanceof(obj, kls);
3113     _sp -= nargs;
3114     break;
3115 
3116   case vmIntrinsics::_getModifiers:
3117     p = basic_plus_adr(kls, Klass::modifier_flags_offset_in_bytes());
3118     query_value = make_load(NULL, p, TypeInt::INT, T_INT);
3119     break;
3120 
3121   case vmIntrinsics::_isInterface:
3122     // (To verify this code sequence, check the asserts in JVM_IsInterface.)
3123     if (generate_interface_guard(kls, region) != NULL)
3124       // A guard was added.  If the guard is taken, it was an interface.
3125       phi->add_req(intcon(1));
3126     // If we fall through, it's a plain class.
3127     query_value = intcon(0);
3128     break;
3129 
3130   case vmIntrinsics::_isArray:
3131     // (To verify this code sequence, check the asserts in JVM_IsArrayClass.)
3132     if (generate_array_guard(kls, region) != NULL)
3133       // A guard was added.  If the guard is taken, it was an array.
3134       phi->add_req(intcon(1));
3135     // If we fall through, it's a plain class.
3136     query_value = intcon(0);
3137     break;
3138 
3139   case vmIntrinsics::_isPrimitive:
3140     query_value = intcon(0); // "normal" path produces false
3141     break;
3142 
3143   case vmIntrinsics::_getSuperclass:
3144     // The rules here are somewhat unfortunate, but we can still do better
3145     // with random logic than with a JNI call.
3146     // Interfaces store null or Object as _super, but must report null.
3147     // Arrays store an intermediate super as _super, but must report Object.
3148     // Other types can report the actual _super.
3149     // (To verify this code sequence, check the asserts in JVM_IsInterface.)
3150     if (generate_interface_guard(kls, region) != NULL)
3151       // A guard was added.  If the guard is taken, it was an interface.
3152       phi->add_req(null());
3153     if (generate_array_guard(kls, region) != NULL)
3154       // A guard was added.  If the guard is taken, it was an array.
3155       phi->add_req(makecon(TypeInstPtr::make(env()->Object_klass()->java_mirror())));
3156     // If we fall through, it's a plain class.  Get its _super.
3157     p = basic_plus_adr(kls, Klass::super_offset_in_bytes());
3158     kls = _gvn.transform( LoadKlassNode::make(_gvn, immutable_memory(), p, TypeRawPtr::BOTTOM, TypeKlassPtr::OBJECT_OR_NULL) );
3159     null_ctl = top();
3160     kls = null_check_oop(kls, &null_ctl);
3161     if (null_ctl != top()) {
3162       // If the guard is taken, Object.superClass is null (both klass and mirror).
3163       region->add_req(null_ctl);
3164       phi   ->add_req(null());
3165     }
3166     if (!stopped()) {
3167       query_value = load_mirror_from_klass(kls);
3168     }
3169     break;
3170 
3171   case vmIntrinsics::_getComponentType:
3172     if (generate_array_guard(kls, region) != NULL) {
3173       // Be sure to pin the oop load to the guard edge just created:
3174       Node* is_array_ctrl = region->in(region->req()-1);
3175       Node* cma = basic_plus_adr(kls, in_bytes(arrayKlass::component_mirror_offset()));
3176       Node* cmo = make_load(is_array_ctrl, cma, TypeInstPtr::MIRROR, T_OBJECT);
3177       phi->add_req(cmo);
3178     }
3179     query_value = null();  // non-array case is null
3180     break;
3181 
3182   case vmIntrinsics::_getClassAccessFlags:
3183     p = basic_plus_adr(kls, Klass::access_flags_offset_in_bytes());
3184     query_value = make_load(NULL, p, TypeInt::INT, T_INT);
3185     break;
3186 
3187   default:
3188     ShouldNotReachHere();
3189   }
3190 
3191   // Fall-through is the normal case of a query to a real class.
3192   phi->init_req(1, query_value);
3193   region->init_req(1, control());
3194 
3195   push_result(region, phi);
3196   C->set_has_split_ifs(true); // Has chance for split-if optimization
3197 
3198   return true;
3199 }
3200 
3201 //--------------------------inline_native_subtype_check------------------------
3202 // This intrinsic takes the JNI calls out of the heart of
3203 // UnsafeFieldAccessorImpl.set, which improves Field.set, readObject, etc.


4839     Node* src_klass  = load_object_klass(src);
4840     Node* dest_klass = load_object_klass(dest);
4841 
4842     // Generate the subtype check.
4843     // This might fold up statically, or then again it might not.
4844     //
4845     // Non-static example:  Copying List<String>.elements to a new String[].
4846     // The backing store for a List<String> is always an Object[],
4847     // but its elements are always type String, if the generic types
4848     // are correct at the source level.
4849     //
4850     // Test S[] against D[], not S against D, because (probably)
4851     // the secondary supertype cache is less busy for S[] than S.
4852     // This usually only matters when D is an interface.
4853     Node* not_subtype_ctrl = gen_subtype_check(src_klass, dest_klass);
4854     // Plug failing path into checked_oop_disjoint_arraycopy
4855     if (not_subtype_ctrl != top()) {
4856       PreserveJVMState pjvms(this);
4857       set_control(not_subtype_ctrl);
4858       // (At this point we can assume disjoint_bases, since types differ.)
4859       int ek_offset = objArrayKlass::element_klass_offset_in_bytes();
4860       Node* p1 = basic_plus_adr(dest_klass, ek_offset);
4861       Node* n1 = LoadKlassNode::make(_gvn, immutable_memory(), p1, TypeRawPtr::BOTTOM);
4862       Node* dest_elem_klass = _gvn.transform(n1);
4863       Node* cv = generate_checkcast_arraycopy(adr_type,
4864                                               dest_elem_klass,
4865                                               src, src_offset, dest, dest_offset,
4866                                               ConvI2X(copy_length), dest_uninitialized);
4867       if (cv == NULL)  cv = intcon(-1);  // failure (no stub available)
4868       checked_control = control();
4869       checked_i_o     = i_o();
4870       checked_mem     = memory(adr_type);
4871       checked_value   = cv;
4872     }
4873     // At this point we know we do not need type checks on oop stores.
4874 
4875     // Let's see if we need card marks:
4876     if (alloc != NULL && use_ReduceInitialCardMarks()) {
4877       // If we do not need card marks, copy using the jint or jlong stub.
4878       copy_type = LP64_ONLY(UseCompressedOops ? T_INT : T_LONG) NOT_LP64(T_INT);
4879       assert(type2aelembytes(basic_elem_type) == type2aelembytes(copy_type),


5290 }
5291 
5292 // Helper function; generates code for cases requiring runtime checks.
5293 Node*
5294 LibraryCallKit::generate_checkcast_arraycopy(const TypePtr* adr_type,
5295                                              Node* dest_elem_klass,
5296                                              Node* src,  Node* src_offset,
5297                                              Node* dest, Node* dest_offset,
5298                                              Node* copy_length, bool dest_uninitialized) {
5299   if (stopped())  return NULL;
5300 
5301   address copyfunc_addr = StubRoutines::checkcast_arraycopy(dest_uninitialized);
5302   if (copyfunc_addr == NULL) { // Stub was not generated, go slow path.
5303     return NULL;
5304   }
5305 
5306   // Pick out the parameters required to perform a store-check
5307   // for the target array.  This is an optimistic check.  It will
5308   // look in each non-null element's class, at the desired klass's
5309   // super_check_offset, for the desired klass.
5310   int sco_offset = Klass::super_check_offset_offset_in_bytes();
5311   Node* p3 = basic_plus_adr(dest_elem_klass, sco_offset);
5312   Node* n3 = new(C, 3) LoadINode(NULL, memory(p3), p3, _gvn.type(p3)->is_ptr());
5313   Node* check_offset = ConvI2X(_gvn.transform(n3));
5314   Node* check_value  = dest_elem_klass;
5315 
5316   Node* src_start  = array_element_address(src,  src_offset,  T_OBJECT);
5317   Node* dest_start = array_element_address(dest, dest_offset, T_OBJECT);
5318 
5319   // (We know the arrays are never conjoint, because their types differ.)
5320   Node* call = make_runtime_call(RC_LEAF|RC_NO_FP,
5321                                  OptoRuntime::checkcast_arraycopy_Type(),
5322                                  copyfunc_addr, "checkcast_arraycopy", adr_type,
5323                                  // five arguments, of which two are
5324                                  // intptr_t (jlong in LP64)
5325                                  src_start, dest_start,
5326                                  copy_length XTOP,
5327                                  check_offset XTOP,
5328                                  check_value);
5329 
5330   return _gvn.transform(new (C, 1) ProjNode(call, TypeFunc::Parms));


src/share/vm/opto/library_call.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File