< prev index next >

src/share/vm/opto/library_call.cpp

Print this page
rev 8838 : 8155635: C2: Mixed unsafe accesses break alias analysis
Reviewed-by: kvn
rev 8910 : full patch for jfr
   1 /*
   2  * Copyright (c) 1999, 2013, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *


 221   bool inline_exp();
 222   bool inline_pow();
 223   Node* finish_pow_exp(Node* result, Node* x, Node* y, const TypeFunc* call_type, address funcAddr, const char* funcName);
 224   bool inline_min_max(vmIntrinsics::ID id);
 225   Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
 226   // This returns Type::AnyPtr, RawPtr, or OopPtr.
 227   int classify_unsafe_addr(Node* &base, Node* &offset);
 228   Node* make_unsafe_address(Node* base, Node* offset);
 229   // Helper for inline_unsafe_access.
 230   // Generates the guards that check whether the result of
 231   // Unsafe.getObject should be recorded in an SATB log buffer.
 232   void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar);
 233   bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile, bool is_unaligned);
 234   bool inline_unsafe_prefetch(bool is_native_ptr, bool is_store, bool is_static);
 235   static bool klass_needs_init_guard(Node* kls);
 236   bool inline_unsafe_allocate();
 237   bool inline_unsafe_copyMemory();
 238   bool inline_native_currentThread();
 239 #ifdef TRACE_HAVE_INTRINSICS
 240   bool inline_native_classID();
 241   bool inline_native_threadID();
 242 #endif
 243   bool inline_native_time_funcs(address method, const char* funcName);
 244   bool inline_native_isInterrupted();
 245   bool inline_native_Class_query(vmIntrinsics::ID id);
 246   bool inline_native_subtype_check();
 247 
 248   bool inline_native_newArray();
 249   bool inline_native_getLength();
 250   bool inline_array_copyOf(bool is_copyOfRange);
 251   bool inline_array_equals();
 252   void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark);
 253   bool inline_native_clone(bool is_virtual);
 254   bool inline_native_Reflection_getCallerClass();
 255   // Helper function for inlining native object hash method
 256   bool inline_native_hashcode(bool is_virtual, bool is_static);
 257   bool inline_native_getClass();
 258 
 259   // Helper functions for inlining arraycopy
 260   bool inline_arraycopy();
 261   void generate_arraycopy(const TypePtr* adr_type,


 863   case vmIntrinsics::_compareAndSwapLong:       return inline_unsafe_load_store(T_LONG,   LS_cmpxchg);
 864 
 865   case vmIntrinsics::_putOrderedObject:         return inline_unsafe_ordered_store(T_OBJECT);
 866   case vmIntrinsics::_putOrderedInt:            return inline_unsafe_ordered_store(T_INT);
 867   case vmIntrinsics::_putOrderedLong:           return inline_unsafe_ordered_store(T_LONG);
 868 
 869   case vmIntrinsics::_getAndAddInt:             return inline_unsafe_load_store(T_INT,    LS_xadd);
 870   case vmIntrinsics::_getAndAddLong:            return inline_unsafe_load_store(T_LONG,   LS_xadd);
 871   case vmIntrinsics::_getAndSetInt:             return inline_unsafe_load_store(T_INT,    LS_xchg);
 872   case vmIntrinsics::_getAndSetLong:            return inline_unsafe_load_store(T_LONG,   LS_xchg);
 873   case vmIntrinsics::_getAndSetObject:          return inline_unsafe_load_store(T_OBJECT, LS_xchg);
 874 
 875   case vmIntrinsics::_loadFence:
 876   case vmIntrinsics::_storeFence:
 877   case vmIntrinsics::_fullFence:                return inline_unsafe_fence(intrinsic_id());
 878 
 879   case vmIntrinsics::_currentThread:            return inline_native_currentThread();
 880   case vmIntrinsics::_isInterrupted:            return inline_native_isInterrupted();
 881 
 882 #ifdef TRACE_HAVE_INTRINSICS
 883   case vmIntrinsics::_classID:                  return inline_native_classID();
 884   case vmIntrinsics::_threadID:                 return inline_native_threadID();
 885   case vmIntrinsics::_counterTime:              return inline_native_time_funcs(CAST_FROM_FN_PTR(address, TRACE_TIME_METHOD), "counterTime");


 886 #endif
 887   case vmIntrinsics::_currentTimeMillis:        return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeMillis), "currentTimeMillis");
 888   case vmIntrinsics::_nanoTime:                 return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeNanos), "nanoTime");
 889   case vmIntrinsics::_allocateInstance:         return inline_unsafe_allocate();
 890   case vmIntrinsics::_copyMemory:               return inline_unsafe_copyMemory();
 891   case vmIntrinsics::_newArray:                 return inline_native_newArray();
 892   case vmIntrinsics::_getLength:                return inline_native_getLength();
 893   case vmIntrinsics::_copyOf:                   return inline_array_copyOf(false);
 894   case vmIntrinsics::_copyOfRange:              return inline_array_copyOf(true);
 895   case vmIntrinsics::_equalsC:                  return inline_array_equals();
 896   case vmIntrinsics::_clone:                    return inline_native_clone(intrinsic()->is_virtual());
 897 
 898   case vmIntrinsics::_isAssignableFrom:         return inline_native_subtype_check();
 899 
 900   case vmIntrinsics::_isInstance:
 901   case vmIntrinsics::_getModifiers:
 902   case vmIntrinsics::_isInterface:
 903   case vmIntrinsics::_isArray:
 904   case vmIntrinsics::_isPrimitive:
 905   case vmIntrinsics::_getSuperclass:


3255     // Use T_BOOLEAN for InstanceKlass::_init_state so the compiler
3256     // can generate code to load it as unsigned byte.
3257     Node* inst = make_load(NULL, insp, TypeInt::UBYTE, T_BOOLEAN, MemNode::unordered);
3258     Node* bits = intcon(InstanceKlass::fully_initialized);
3259     test = _gvn.transform(new (C) SubINode(inst, bits));
3260     // The 'test' is non-zero if we need to take a slow path.
3261   }
3262 
3263   Node* obj = new_instance(kls, test);
3264   set_result(obj);
3265   return true;
3266 }
3267 
3268 #ifdef TRACE_HAVE_INTRINSICS
3269 /*
3270  * oop -> myklass
3271  * myklass->trace_id |= USED
3272  * return myklass->trace_id & ~0x3
3273  */
3274 bool LibraryCallKit::inline_native_classID() {
3275   null_check_receiver();  // null-check, then ignore
3276   Node* cls = null_check(argument(1), T_OBJECT);
3277   Node* kls = load_klass_from_mirror(cls, false, NULL, 0);
3278   kls = null_check(kls, T_OBJECT);
3279   ByteSize offset = TRACE_ID_OFFSET;

3280   Node* insp = basic_plus_adr(kls, in_bytes(offset));
3281   Node* tvalue = make_load(NULL, insp, TypeLong::LONG, T_LONG, MemNode::unordered);
3282   Node* bits = longcon(~0x03l); // ignore bit 0 & 1
3283   Node* andl = _gvn.transform(new (C) AndLNode(tvalue, bits));
3284   Node* clsused = longcon(0x01l); // set the class bit
3285   Node* orl = _gvn.transform(new (C) OrLNode(tvalue, clsused));
3286 
3287   const TypePtr *adr_type = _gvn.type(insp)->isa_ptr();
3288   store_to_memory(control(), insp, orl, T_LONG, adr_type, MemNode::unordered);
3289   set_result(andl);










3290   return true;



3291 }
3292 
3293 bool LibraryCallKit::inline_native_threadID() {
3294   Node* tls_ptr = NULL;
3295   Node* cur_thr = generate_current_thread(tls_ptr);
3296   Node* p = basic_plus_adr(top()/*!oop*/, tls_ptr, in_bytes(JavaThread::osthread_offset()));
3297   Node* osthread = make_load(NULL, p, TypeRawPtr::NOTNULL, T_ADDRESS, MemNode::unordered);
3298   p = basic_plus_adr(top()/*!oop*/, osthread, in_bytes(OSThread::thread_id_offset()));




















3299 
3300   Node* threadid = NULL;
3301   size_t thread_id_size = OSThread::thread_id_size();
3302   if (thread_id_size == (size_t) BytesPerLong) {
3303     threadid = ConvL2I(make_load(control(), p, TypeLong::LONG, T_LONG, MemNode::unordered));
3304   } else if (thread_id_size == (size_t) BytesPerInt) {
3305     threadid = make_load(control(), p, TypeInt::INT, T_INT, MemNode::unordered);



3306   } else {
3307     ShouldNotReachHere();
3308   }
3309   set_result(threadid);
3310   return true;
3311 }
3312 #endif
3313 
3314 //------------------------inline_native_time_funcs--------------
3315 // inline code for System.currentTimeMillis() and System.nanoTime()
3316 // these have the same type and signature
3317 bool LibraryCallKit::inline_native_time_funcs(address funcAddr, const char* funcName) {
3318   const TypeFunc* tf = OptoRuntime::void_long_Type();
3319   const TypePtr* no_memory_effects = NULL;
3320   Node* time = make_runtime_call(RC_LEAF, tf, funcAddr, funcName, no_memory_effects);
3321   Node* value = _gvn.transform(new (C) ProjNode(time, TypeFunc::Parms+0));
3322 #ifdef ASSERT
3323   Node* value_top = _gvn.transform(new (C) ProjNode(time, TypeFunc::Parms+1));
3324   assert(value_top == top(), "second value must be top");
3325 #endif
3326   set_result(value);
3327   return true;
3328 }
3329 
3330 //------------------------inline_native_currentThread------------------


   1 /*
   2  * Copyright (c) 1999, 2019, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *


 221   bool inline_exp();
 222   bool inline_pow();
 223   Node* finish_pow_exp(Node* result, Node* x, Node* y, const TypeFunc* call_type, address funcAddr, const char* funcName);
 224   bool inline_min_max(vmIntrinsics::ID id);
 225   Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
 226   // This returns Type::AnyPtr, RawPtr, or OopPtr.
 227   int classify_unsafe_addr(Node* &base, Node* &offset);
 228   Node* make_unsafe_address(Node* base, Node* offset);
 229   // Helper for inline_unsafe_access.
 230   // Generates the guards that check whether the result of
 231   // Unsafe.getObject should be recorded in an SATB log buffer.
 232   void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar);
 233   bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile, bool is_unaligned);
 234   bool inline_unsafe_prefetch(bool is_native_ptr, bool is_store, bool is_static);
 235   static bool klass_needs_init_guard(Node* kls);
 236   bool inline_unsafe_allocate();
 237   bool inline_unsafe_copyMemory();
 238   bool inline_native_currentThread();
 239 #ifdef TRACE_HAVE_INTRINSICS
 240   bool inline_native_classID();
 241   bool inline_native_getEventWriter();
 242 #endif
 243   bool inline_native_time_funcs(address method, const char* funcName);
 244   bool inline_native_isInterrupted();
 245   bool inline_native_Class_query(vmIntrinsics::ID id);
 246   bool inline_native_subtype_check();
 247 
 248   bool inline_native_newArray();
 249   bool inline_native_getLength();
 250   bool inline_array_copyOf(bool is_copyOfRange);
 251   bool inline_array_equals();
 252   void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark);
 253   bool inline_native_clone(bool is_virtual);
 254   bool inline_native_Reflection_getCallerClass();
 255   // Helper function for inlining native object hash method
 256   bool inline_native_hashcode(bool is_virtual, bool is_static);
 257   bool inline_native_getClass();
 258 
 259   // Helper functions for inlining arraycopy
 260   bool inline_arraycopy();
 261   void generate_arraycopy(const TypePtr* adr_type,


 863   case vmIntrinsics::_compareAndSwapLong:       return inline_unsafe_load_store(T_LONG,   LS_cmpxchg);
 864 
 865   case vmIntrinsics::_putOrderedObject:         return inline_unsafe_ordered_store(T_OBJECT);
 866   case vmIntrinsics::_putOrderedInt:            return inline_unsafe_ordered_store(T_INT);
 867   case vmIntrinsics::_putOrderedLong:           return inline_unsafe_ordered_store(T_LONG);
 868 
 869   case vmIntrinsics::_getAndAddInt:             return inline_unsafe_load_store(T_INT,    LS_xadd);
 870   case vmIntrinsics::_getAndAddLong:            return inline_unsafe_load_store(T_LONG,   LS_xadd);
 871   case vmIntrinsics::_getAndSetInt:             return inline_unsafe_load_store(T_INT,    LS_xchg);
 872   case vmIntrinsics::_getAndSetLong:            return inline_unsafe_load_store(T_LONG,   LS_xchg);
 873   case vmIntrinsics::_getAndSetObject:          return inline_unsafe_load_store(T_OBJECT, LS_xchg);
 874 
 875   case vmIntrinsics::_loadFence:
 876   case vmIntrinsics::_storeFence:
 877   case vmIntrinsics::_fullFence:                return inline_unsafe_fence(intrinsic_id());
 878 
 879   case vmIntrinsics::_currentThread:            return inline_native_currentThread();
 880   case vmIntrinsics::_isInterrupted:            return inline_native_isInterrupted();
 881 
 882 #ifdef TRACE_HAVE_INTRINSICS


 883   case vmIntrinsics::_counterTime:              return inline_native_time_funcs(CAST_FROM_FN_PTR(address, TRACE_TIME_METHOD), "counterTime");
 884   case vmIntrinsics::_getClassId:               return inline_native_classID();
 885   case vmIntrinsics::_getEventWriter:           return inline_native_getEventWriter();
 886 #endif
 887   case vmIntrinsics::_currentTimeMillis:        return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeMillis), "currentTimeMillis");
 888   case vmIntrinsics::_nanoTime:                 return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeNanos), "nanoTime");
 889   case vmIntrinsics::_allocateInstance:         return inline_unsafe_allocate();
 890   case vmIntrinsics::_copyMemory:               return inline_unsafe_copyMemory();
 891   case vmIntrinsics::_newArray:                 return inline_native_newArray();
 892   case vmIntrinsics::_getLength:                return inline_native_getLength();
 893   case vmIntrinsics::_copyOf:                   return inline_array_copyOf(false);
 894   case vmIntrinsics::_copyOfRange:              return inline_array_copyOf(true);
 895   case vmIntrinsics::_equalsC:                  return inline_array_equals();
 896   case vmIntrinsics::_clone:                    return inline_native_clone(intrinsic()->is_virtual());
 897 
 898   case vmIntrinsics::_isAssignableFrom:         return inline_native_subtype_check();
 899 
 900   case vmIntrinsics::_isInstance:
 901   case vmIntrinsics::_getModifiers:
 902   case vmIntrinsics::_isInterface:
 903   case vmIntrinsics::_isArray:
 904   case vmIntrinsics::_isPrimitive:
 905   case vmIntrinsics::_getSuperclass:


3255     // Use T_BOOLEAN for InstanceKlass::_init_state so the compiler
3256     // can generate code to load it as unsigned byte.
3257     Node* inst = make_load(NULL, insp, TypeInt::UBYTE, T_BOOLEAN, MemNode::unordered);
3258     Node* bits = intcon(InstanceKlass::fully_initialized);
3259     test = _gvn.transform(new (C) SubINode(inst, bits));
3260     // The 'test' is non-zero if we need to take a slow path.
3261   }
3262 
3263   Node* obj = new_instance(kls, test);
3264   set_result(obj);
3265   return true;
3266 }
3267 
3268 #ifdef TRACE_HAVE_INTRINSICS
3269 /*
3270  * oop -> myklass
3271  * myklass->trace_id |= USED
3272  * return myklass->trace_id & ~0x3
3273  */
3274 bool LibraryCallKit::inline_native_classID() {
3275   if (EnableJFR) {
3276     Node* cls = null_check(argument(0), T_OBJECT);
3277     Node* kls = load_klass_from_mirror(cls, false, NULL, 0);
3278     kls = null_check(kls, T_OBJECT);
3279 
3280     ByteSize offset = TRACE_KLASS_TRACE_ID_OFFSET;
3281     Node* insp = basic_plus_adr(kls, in_bytes(offset));
3282     Node* tvalue = make_load(NULL, insp, TypeLong::LONG, T_LONG, MemNode::unordered);
3283 

3284     Node* clsused = longcon(0x01l); // set the class bit
3285     Node* orl = _gvn.transform(new (C) OrLNode(tvalue, clsused));

3286     const TypePtr *adr_type = _gvn.type(insp)->isa_ptr();
3287     store_to_memory(control(), insp, orl, T_LONG, adr_type, MemNode::unordered);
3288 
3289 #ifdef TRACE_ID_META_BITS
3290     Node* mbits = longcon(~TRACE_ID_META_BITS);
3291     tvalue = _gvn.transform(new (C) AndLNode(tvalue, mbits));
3292 #endif
3293 #ifdef TRACE_ID_SHIFT
3294     Node* cbits = intcon(TRACE_ID_SHIFT);
3295     tvalue = _gvn.transform(new (C) URShiftLNode(tvalue, cbits));
3296 #endif
3297 
3298     set_result(tvalue);
3299     return true;
3300   } else {
3301     return false;
3302   }
3303 }
3304 
3305 bool LibraryCallKit::inline_native_getEventWriter() {
3306   if (EnableJFR) {
3307     Node* tls_ptr = _gvn.transform(new (C) ThreadLocalNode());
3308 
3309     Node* jobj_ptr = basic_plus_adr(top(), tls_ptr,
3310                                   in_bytes(TRACE_THREAD_DATA_WRITER_OFFSET)
3311                                   );
3312 
3313     Node* jobj = make_load(control(), jobj_ptr, TypeRawPtr::BOTTOM, T_ADDRESS, MemNode::unordered);
3314 
3315     Node* jobj_cmp_null = _gvn.transform( new (C) CmpPNode(jobj, null()) );
3316     Node* test_jobj_eq_null  = _gvn.transform( new (C) BoolNode(jobj_cmp_null, BoolTest::eq) );
3317 
3318     IfNode* iff_jobj_null =
3319       create_and_map_if(control(), test_jobj_eq_null, PROB_MIN, COUNT_UNKNOWN);
3320 
3321     enum { _normal_path = 1,
3322            _null_path = 2,
3323            PATH_LIMIT };
3324 
3325     RegionNode* result_rgn = new (C) RegionNode(PATH_LIMIT);
3326     PhiNode*    result_val = new (C) PhiNode(result_rgn, TypePtr::BOTTOM);
3327 
3328     Node* jobj_is_null = _gvn.transform(new (C) IfTrueNode(iff_jobj_null));
3329     result_rgn->init_req(_null_path, jobj_is_null);
3330     result_val->init_req(_null_path, null());
3331 
3332     Node* jobj_is_not_null = _gvn.transform(new (C) IfFalseNode(iff_jobj_null));
3333     result_rgn->init_req(_normal_path, jobj_is_not_null);
3334 
3335     Node* res = make_load(jobj_is_not_null, jobj, TypeInstPtr::NOTNULL, T_OBJECT, MemNode::unordered);
3336     result_val->init_req(_normal_path, res);
3337 
3338     set_result(result_rgn, result_val);
3339 
3340     return true;
3341   } else {
3342     return false;
3343   }


3344 }
3345 #endif
3346 
3347 //------------------------inline_native_time_funcs--------------
3348 // inline code for System.currentTimeMillis() and System.nanoTime()
3349 // these have the same type and signature
3350 bool LibraryCallKit::inline_native_time_funcs(address funcAddr, const char* funcName) {
3351   const TypeFunc* tf = OptoRuntime::void_long_Type();
3352   const TypePtr* no_memory_effects = NULL;
3353   Node* time = make_runtime_call(RC_LEAF, tf, funcAddr, funcName, no_memory_effects);
3354   Node* value = _gvn.transform(new (C) ProjNode(time, TypeFunc::Parms+0));
3355 #ifdef ASSERT
3356   Node* value_top = _gvn.transform(new (C) ProjNode(time, TypeFunc::Parms+1));
3357   assert(value_top == top(), "second value must be top");
3358 #endif
3359   set_result(value);
3360   return true;
3361 }
3362 
3363 //------------------------inline_native_currentThread------------------


< prev index next >