src/share/vm/prims/unsafe.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/share/vm/prims

src/share/vm/prims/unsafe.cpp

Print this page
rev 11111 : imported patch 8150921_unsafegetset2reg
rev 11112 : [mq]: 8150921_unsafegetset2reg.02


 122 }
 123 
 124 // Externally callable versions:
 125 // (Use these in compiler intrinsics which emulate unsafe primitives.)
 126 jlong Unsafe_field_offset_to_byte_offset(jlong field_offset) {
 127   return field_offset;
 128 }
 129 jlong Unsafe_field_offset_from_byte_offset(jlong byte_offset) {
 130   return byte_offset;
 131 }
 132 
 133 
 134 ///// Data read/writes on the Java heap and in native (off-heap) memory
 135 
 136 /**
 137  * Helper class for accessing memory.
 138  *
 139  * Normalizes values and wraps accesses in
 140  * JavaThread::doing_unsafe_access() if needed.
 141  */
 142 class MemoryAccess {
 143   JavaThread* _thread;
 144   jobject _obj;
 145   jlong _offset;
 146 
 147   // Resolves and returns the address of the memory access
 148   void* addr() {
 149     return index_oop_from_field_offset_long(JNIHandles::resolve(_obj), _offset);
 150   }
 151 









 152   /**
 153    * Helper class to wrap memory accesses in JavaThread::doing_unsafe_access()
 154    */
 155   class GuardUnsafeAccess {
 156     JavaThread* _thread;
 157     bool _active;
 158  
 159   public:
 160     GuardUnsafeAccess(JavaThread* thread, jobject _obj) : _thread(thread) {
 161       if (JNIHandles::resolve(_obj) == NULL) {
 162         // native/off-heap access which may raise SIGBUS if accessing
 163         // memory mapped file data in a region of the file which has
 164         // been truncated and is now invalid
 165         _thread->set_doing_unsafe_access(true);
 166         _active = true;
 167       } else {
 168         _active = false;
 169       }
 170     }
 171 
 172     ~GuardUnsafeAccess() {
 173       if (_active) {
 174         _thread->set_doing_unsafe_access(false);
 175       }
 176     }
 177   };
 178 
 179 public:
 180   MemoryAccess(JavaThread* thread, jobject obj, jlong offset)
 181     : _thread(thread), _obj(obj), _offset(offset) {
 182   }
 183 
 184   template <typename T>
 185   T normalize(T x) {
 186     return x;
 187   }
 188 
 189   jboolean normalize(jboolean x) {
 190     return x & 1;
 191   }
 192 
 193   template <typename T>
 194   T get() {
 195     GuardUnsafeAccess guard(_thread, _obj);
 196 
 197     T* p = (T*)addr();
 198 
 199     T x = *p;
 200 
 201     return x;
 202   }
 203 
 204   template <typename T>
 205   void put(T x) {
 206     GuardUnsafeAccess guard(_thread, _obj);
 207 
 208     T* p = (T*)addr();
 209 
 210     *p = normalize(x);
 211   }
 212 
 213 


 225 
 226     return x;
 227   }
 228 
 229   template <typename T>
 230   void put_volatile(T x) {
 231     GuardUnsafeAccess guard(_thread, _obj);
 232 
 233     T* p = (T*)addr();
 234     
 235     OrderAccess::release_store_fence((volatile T*)p, normalize(x));
 236   }
 237 
 238 
 239 #ifndef SUPPORTS_NATIVE_CX8
 240   // 
 241   template <typename T>
 242   T get_mutex() {
 243     GuardUnsafeAccess guard(_thread, _obj);
 244 
 245     T* p = (T*)addr();
 246 
 247     MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
 248 


 249     T x = Atomic::load(p);
 250 
 251     return x;
 252   }
 253 
 254   template <typename T>
 255   void put_mutex(T x) {
 256     GuardUnsafeAccess guard(_thread, _obj);
 257 
 258     T* p = (T*)addr();
 259 
 260     MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
 261 


 262     Atomic::store(normalize(x),  p);
 263   }
 264 #endif
 265 };
 266 
 267 // Get/PutObject must be special-cased, since it works with handles.
 268 
 269 // These functions allow a null base pointer with an arbitrary address.
 270 // But if the base pointer is non-null, the offset should make some sense.
 271 // That is, it should be in the range [0, MAX_OBJECT_SIZE].
 272 UNSAFE_ENTRY(jobject, Unsafe_GetObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 273   oop p = JNIHandles::resolve(obj);
 274   oop v;
 275 
 276   if (UseCompressedOops) {
 277     narrowOop n = *(narrowOop*)index_oop_from_field_offset_long(p, offset);
 278     v = oopDesc::decode_heap_oop(n);
 279   } else {
 280     v = *(oop*)index_oop_from_field_offset_long(p, offset);
 281   }


 963 UNSAFE_ENTRY(jobject, Unsafe_CompareAndExchangeObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h)) {
 964   oop x = JNIHandles::resolve(x_h);
 965   oop e = JNIHandles::resolve(e_h);
 966   oop p = JNIHandles::resolve(obj);
 967   HeapWord* addr = (HeapWord *)index_oop_from_field_offset_long(p, offset);
 968   oop res = oopDesc::atomic_compare_exchange_oop(x, addr, e, true);
 969   if (res == e) {
 970     update_barrier_set((void*)addr, x);
 971   }
 972   return JNIHandles::make_local(env, res);
 973 } UNSAFE_END
 974 
 975 UNSAFE_ENTRY(jint, Unsafe_CompareAndExchangeInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
 976   oop p = JNIHandles::resolve(obj);
 977   jint* addr = (jint *) index_oop_from_field_offset_long(p, offset);
 978 
 979   return (jint)(Atomic::cmpxchg(x, addr, e));
 980 } UNSAFE_END
 981 
 982 UNSAFE_ENTRY(jlong, Unsafe_CompareAndExchangeLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
 983   Handle p (THREAD, JNIHandles::resolve(obj));
 984   jlong* addr = (jlong*)index_oop_from_field_offset_long(p(), offset);
 985 
 986 #ifdef SUPPORTS_NATIVE_CX8
 987   return (jlong)(Atomic::cmpxchg(x, addr, e));
 988 #else
 989   if (VM_Version::supports_cx8()) {
 990     return (jlong)(Atomic::cmpxchg(x, addr, e));
 991   } else {
 992     MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
 993 
 994     jlong val = Atomic::load(addr);
 995     if (val == e) {
 996       Atomic::store(x, addr);
 997     }
 998     return val;
 999   }
1000 #endif
1001 } UNSAFE_END
1002 
1003 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSwapObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h)) {


1006   oop p = JNIHandles::resolve(obj);
1007   HeapWord* addr = (HeapWord *)index_oop_from_field_offset_long(p, offset);
1008   oop res = oopDesc::atomic_compare_exchange_oop(x, addr, e, true);
1009   if (res != e) {
1010     return false;
1011   }
1012 
1013   update_barrier_set((void*)addr, x);
1014 
1015   return true;
1016 } UNSAFE_END
1017 
1018 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSwapInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
1019   oop p = JNIHandles::resolve(obj);
1020   jint* addr = (jint *)index_oop_from_field_offset_long(p, offset);
1021 
1022   return (jint)(Atomic::cmpxchg(x, addr, e)) == e;
1023 } UNSAFE_END
1024 
1025 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSwapLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
1026   oop p = JNIHandles::resolve(obj);
1027   jlong* addr = (jlong*)index_oop_from_field_offset_long(p, offset);
1028 
1029 #ifdef SUPPORTS_NATIVE_CX8
1030   return (jlong)(Atomic::cmpxchg(x, addr, e)) == e;
1031 #else
1032   if (VM_Version::supports_cx8()) {
1033     return (jlong)(Atomic::cmpxchg(x, addr, e)) == e;
1034   } else {
1035     MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
1036 
1037     jlong val = Atomic::load(addr);
1038     if (val != e) {
1039       return false;
1040     }
1041 
1042     Atomic::store(x, addr);
1043     return true;
1044   }
1045 #endif
1046 } UNSAFE_END
1047 




 122 }
 123 
 124 // Externally callable versions:
 125 // (Use these in compiler intrinsics which emulate unsafe primitives.)
 126 jlong Unsafe_field_offset_to_byte_offset(jlong field_offset) {
 127   return field_offset;
 128 }
 129 jlong Unsafe_field_offset_from_byte_offset(jlong byte_offset) {
 130   return byte_offset;
 131 }
 132 
 133 
 134 ///// Data read/writes on the Java heap and in native (off-heap) memory
 135 
 136 /**
 137  * Helper class for accessing memory.
 138  *
 139  * Normalizes values and wraps accesses in
 140  * JavaThread::doing_unsafe_access() if needed.
 141  */
 142 class MemoryAccess : StackObj {
 143   JavaThread* _thread;
 144   jobject _obj;
 145   jlong _offset;
 146 
 147   // Resolves and returns the address of the memory access
 148   void* addr() {
 149     return index_oop_from_field_offset_long(JNIHandles::resolve(_obj), _offset);
 150   }
 151 
 152   template <typename T>
 153   T normalize(T x) {
 154     return x;
 155   }
 156 
 157   jboolean normalize(jboolean x) {
 158     return x & 1;
 159   }
 160 
 161   /**
 162    * Helper class to wrap memory accesses in JavaThread::doing_unsafe_access()
 163    */
 164   class GuardUnsafeAccess {
 165     JavaThread* _thread;
 166     bool _active;
 167  
 168   public:
 169     GuardUnsafeAccess(JavaThread* thread, jobject _obj) : _thread(thread) {
 170       if (JNIHandles::resolve(_obj) == NULL) {
 171         // native/off-heap access which may raise SIGBUS if accessing
 172         // memory mapped file data in a region of the file which has
 173         // been truncated and is now invalid
 174         _thread->set_doing_unsafe_access(true);
 175         _active = true;
 176       } else {
 177         _active = false;
 178       }
 179     }
 180 
 181     ~GuardUnsafeAccess() {
 182       if (_active) {
 183         _thread->set_doing_unsafe_access(false);
 184       }
 185     }
 186   };
 187 
 188 public:
 189   MemoryAccess(JavaThread* thread, jobject obj, jlong offset)
 190     : _thread(thread), _obj(obj), _offset(offset) {
 191   }
 192 
 193   template <typename T>









 194   T get() {
 195     GuardUnsafeAccess guard(_thread, _obj);
 196 
 197     T* p = (T*)addr();
 198 
 199     T x = *p;
 200 
 201     return x;
 202   }
 203 
 204   template <typename T>
 205   void put(T x) {
 206     GuardUnsafeAccess guard(_thread, _obj);
 207 
 208     T* p = (T*)addr();
 209 
 210     *p = normalize(x);
 211   }
 212 
 213 


 225 
 226     return x;
 227   }
 228 
 229   template <typename T>
 230   void put_volatile(T x) {
 231     GuardUnsafeAccess guard(_thread, _obj);
 232 
 233     T* p = (T*)addr();
 234     
 235     OrderAccess::release_store_fence((volatile T*)p, normalize(x));
 236   }
 237 
 238 
 239 #ifndef SUPPORTS_NATIVE_CX8
 240   // 
 241   template <typename T>
 242   T get_mutex() {
 243     GuardUnsafeAccess guard(_thread, _obj);
 244 


 245     MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
 246 
 247     T* p = (T*)addr();
 248 
 249     T x = Atomic::load(p);
 250 
 251     return x;
 252   }
 253 
 254   template <typename T>
 255   void put_mutex(T x) {
 256     GuardUnsafeAccess guard(_thread, _obj);
 257 


 258     MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
 259 
 260     T* p = (T*)addr();
 261 
 262     Atomic::store(normalize(x),  p);
 263   }
 264 #endif
 265 };
 266 
 267 // Get/PutObject must be special-cased, since it works with handles.
 268 
 269 // These functions allow a null base pointer with an arbitrary address.
 270 // But if the base pointer is non-null, the offset should make some sense.
 271 // That is, it should be in the range [0, MAX_OBJECT_SIZE].
 272 UNSAFE_ENTRY(jobject, Unsafe_GetObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 273   oop p = JNIHandles::resolve(obj);
 274   oop v;
 275 
 276   if (UseCompressedOops) {
 277     narrowOop n = *(narrowOop*)index_oop_from_field_offset_long(p, offset);
 278     v = oopDesc::decode_heap_oop(n);
 279   } else {
 280     v = *(oop*)index_oop_from_field_offset_long(p, offset);
 281   }


 963 UNSAFE_ENTRY(jobject, Unsafe_CompareAndExchangeObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h)) {
 964   oop x = JNIHandles::resolve(x_h);
 965   oop e = JNIHandles::resolve(e_h);
 966   oop p = JNIHandles::resolve(obj);
 967   HeapWord* addr = (HeapWord *)index_oop_from_field_offset_long(p, offset);
 968   oop res = oopDesc::atomic_compare_exchange_oop(x, addr, e, true);
 969   if (res == e) {
 970     update_barrier_set((void*)addr, x);
 971   }
 972   return JNIHandles::make_local(env, res);
 973 } UNSAFE_END
 974 
 975 UNSAFE_ENTRY(jint, Unsafe_CompareAndExchangeInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
 976   oop p = JNIHandles::resolve(obj);
 977   jint* addr = (jint *) index_oop_from_field_offset_long(p, offset);
 978 
 979   return (jint)(Atomic::cmpxchg(x, addr, e));
 980 } UNSAFE_END
 981 
 982 UNSAFE_ENTRY(jlong, Unsafe_CompareAndExchangeLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
 983   Handle p(THREAD, JNIHandles::resolve(obj));
 984   jlong* addr = (jlong*)index_oop_from_field_offset_long(p(), offset);
 985 
 986 #ifdef SUPPORTS_NATIVE_CX8
 987   return (jlong)(Atomic::cmpxchg(x, addr, e));
 988 #else
 989   if (VM_Version::supports_cx8()) {
 990     return (jlong)(Atomic::cmpxchg(x, addr, e));
 991   } else {
 992     MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
 993 
 994     jlong val = Atomic::load(addr);
 995     if (val == e) {
 996       Atomic::store(x, addr);
 997     }
 998     return val;
 999   }
1000 #endif
1001 } UNSAFE_END
1002 
1003 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSwapObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h)) {


1006   oop p = JNIHandles::resolve(obj);
1007   HeapWord* addr = (HeapWord *)index_oop_from_field_offset_long(p, offset);
1008   oop res = oopDesc::atomic_compare_exchange_oop(x, addr, e, true);
1009   if (res != e) {
1010     return false;
1011   }
1012 
1013   update_barrier_set((void*)addr, x);
1014 
1015   return true;
1016 } UNSAFE_END
1017 
1018 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSwapInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
1019   oop p = JNIHandles::resolve(obj);
1020   jint* addr = (jint *)index_oop_from_field_offset_long(p, offset);
1021 
1022   return (jint)(Atomic::cmpxchg(x, addr, e)) == e;
1023 } UNSAFE_END
1024 
1025 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSwapLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
1026   Handle p(THREAD, JNIHandles::resolve(obj));
1027   jlong* addr = (jlong*)index_oop_from_field_offset_long(p(), offset);
1028 
1029 #ifdef SUPPORTS_NATIVE_CX8
1030   return (jlong)(Atomic::cmpxchg(x, addr, e)) == e;
1031 #else
1032   if (VM_Version::supports_cx8()) {
1033     return (jlong)(Atomic::cmpxchg(x, addr, e)) == e;
1034   } else {
1035     MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
1036 
1037     jlong val = Atomic::load(addr);
1038     if (val != e) {
1039       return false;
1040     }
1041 
1042     Atomic::store(x, addr);
1043     return true;
1044   }
1045 #endif
1046 } UNSAFE_END
1047 


src/share/vm/prims/unsafe.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File