< prev index next >

src/hotspot/share/prims/unsafe.cpp

Print this page
rev 48017 : 8186787: clang-4.0 SIGSEGV in Unsafe_PutByte
Reviewed-by: coleenp, dholmes
rev 48018 : imported patch Access_unsafe_mo_volatile_v2


 129 }
 130 
 131 // Externally callable versions:
 132 // (Use these in compiler intrinsics which emulate unsafe primitives.)
 133 jlong Unsafe_field_offset_to_byte_offset(jlong field_offset) {
 134   return field_offset;
 135 }
 136 jlong Unsafe_field_offset_from_byte_offset(jlong byte_offset) {
 137   return byte_offset;
 138 }
 139 
 140 
 141 ///// Data read/writes on the Java heap and in native (off-heap) memory
 142 
 143 /**
 144  * Helper class for accessing memory.
 145  *
 146  * Normalizes values and wraps accesses in
 147  * JavaThread::doing_unsafe_access() if needed.
 148  */

 149 class MemoryAccess : StackObj {
 150   JavaThread* _thread;
 151   oop _obj;
 152   ptrdiff_t _offset;
 153 
 154   // Resolves and returns the address of the memory access
 155   void* addr() {
 156     return index_oop_from_field_offset_long(_obj, _offset);






 157   }
 158 
 159   template <typename T>
 160   T normalize_for_write(T x) {
 161     return x;
 162   }
 163 
 164   jboolean normalize_for_write(jboolean x) {
 165     return x & 1;
 166   }
 167 
 168   template <typename T>
 169   T normalize_for_read(T x) {
 170     return x;
 171   }
 172 
 173   jboolean normalize_for_read(jboolean x) {
 174     return x != 0;
 175   }
 176 
 177   /**
 178    * Helper class to wrap memory accesses in JavaThread::doing_unsafe_access()
 179    */
 180   class GuardUnsafeAccess {
 181     JavaThread* _thread;
 182 
 183   public:
 184     GuardUnsafeAccess(JavaThread* thread) : _thread(thread) {
 185       // native/off-heap access which may raise SIGBUS if accessing
 186       // memory mapped file data in a region of the file which has
 187       // been truncated and is now invalid
 188       _thread->set_doing_unsafe_access(true);
 189     }
 190 
 191     ~GuardUnsafeAccess() {
 192       _thread->set_doing_unsafe_access(false);
 193     }
 194   };
 195 
 196 public:
 197   MemoryAccess(JavaThread* thread, jobject obj, jlong offset)
 198     : _thread(thread), _obj(JNIHandles::resolve(obj)), _offset((ptrdiff_t)offset) {
 199     assert_field_offset_sane(_obj, offset);
 200   }
 201 
 202   template <typename T>
 203   T get() {
 204     if (oopDesc::is_null(_obj)) {
 205       GuardUnsafeAccess guard(_thread);
 206       T ret = RawAccess<>::load((volatile T*)addr());
 207       return normalize_for_read(ret);
 208     } else {
 209       T ret = HeapAccess<>::load_at(_obj, _offset);
 210       return normalize_for_read(ret);
 211     }
 212   }
 213 
 214   template <typename T>
 215   void put(T x) {
 216     if (oopDesc::is_null(_obj)) {
 217       GuardUnsafeAccess guard(_thread);
 218       RawAccess<>::store((volatile T*)addr(), normalize_for_write(x));
 219     } else {
 220       HeapAccess<>::store_at(_obj, _offset, normalize_for_write(x));
 221     }
 222   }
 223 
 224 
 225   template <typename T>
 226   T get_volatile() {
 227     if (oopDesc::is_null(_obj)) {
 228       GuardUnsafeAccess guard(_thread);
 229       volatile T ret = RawAccess<MO_SEQ_CST>::load((volatile T*)addr());
 230       return normalize_for_read(ret);
 231     } else {
 232       T ret = HeapAccess<MO_SEQ_CST>::load_at(_obj, _offset);
 233       return normalize_for_read(ret);
 234     }
 235   }
 236 
 237   template <typename T>
 238   void put_volatile(T x) {
 239     if (oopDesc::is_null(_obj)) {
 240       GuardUnsafeAccess guard(_thread);
 241       RawAccess<MO_SEQ_CST>::store((volatile T*)addr(), normalize_for_write(x));
 242     } else {
 243       HeapAccess<MO_SEQ_CST>::store_at(_obj, _offset, normalize_for_write(x));
 244     }
 245   }
 246 };
 247 
 248 // These functions allow a null base pointer with an arbitrary address.
 249 // But if the base pointer is non-null, the offset should make some sense.
 250 // That is, it should be in the range [0, MAX_OBJECT_SIZE].
 251 UNSAFE_ENTRY(jobject, Unsafe_GetObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 252   oop p = JNIHandles::resolve(obj);
 253   assert_field_offset_sane(p, offset);
 254   oop v = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_load_at(p, offset);
 255   return JNIHandles::make_local(env, v);
 256 } UNSAFE_END
 257 
 258 UNSAFE_ENTRY(void, Unsafe_PutObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
 259   oop x = JNIHandles::resolve(x_h);
 260   oop p = JNIHandles::resolve(obj);
 261   assert_field_offset_sane(p, offset);


 279 UNSAFE_ENTRY(jobject, Unsafe_GetUncompressedObject(JNIEnv *env, jobject unsafe, jlong addr)) {
 280   oop v = *(oop*) (address) addr;
 281   return JNIHandles::make_local(env, v);
 282 } UNSAFE_END
 283 
 284 UNSAFE_LEAF(jboolean, Unsafe_isBigEndian0(JNIEnv *env, jobject unsafe)) {
 285 #ifdef VM_LITTLE_ENDIAN
 286   return false;
 287 #else
 288   return true;
 289 #endif
 290 } UNSAFE_END
 291 
 292 UNSAFE_LEAF(jint, Unsafe_unalignedAccess0(JNIEnv *env, jobject unsafe)) {
 293   return UseUnalignedAccesses;
 294 } UNSAFE_END
 295 
 296 #define DEFINE_GETSETOOP(java_type, Type) \
 297  \
 298 UNSAFE_ENTRY(java_type, Unsafe_Get##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
 299   return MemoryAccess(thread, obj, offset).get<java_type>(); \
 300 } UNSAFE_END \
 301  \
 302 UNSAFE_ENTRY(void, Unsafe_Put##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
 303   MemoryAccess(thread, obj, offset).put<java_type>(x); \
 304 } UNSAFE_END \
 305  \
 306 // END DEFINE_GETSETOOP.
 307 
 308 DEFINE_GETSETOOP(jboolean, Boolean)
 309 DEFINE_GETSETOOP(jbyte, Byte)
 310 DEFINE_GETSETOOP(jshort, Short);
 311 DEFINE_GETSETOOP(jchar, Char);
 312 DEFINE_GETSETOOP(jint, Int);
 313 DEFINE_GETSETOOP(jlong, Long);
 314 DEFINE_GETSETOOP(jfloat, Float);
 315 DEFINE_GETSETOOP(jdouble, Double);
 316 
 317 #undef DEFINE_GETSETOOP
 318 
 319 #define DEFINE_GETSETOOP_VOLATILE(java_type, Type) \
 320  \
 321 UNSAFE_ENTRY(java_type, Unsafe_Get##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
 322   return MemoryAccess(thread, obj, offset).get_volatile<java_type>(); \
 323 } UNSAFE_END \
 324  \
 325 UNSAFE_ENTRY(void, Unsafe_Put##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
 326   MemoryAccess(thread, obj, offset).put_volatile<java_type>(x); \
 327 } UNSAFE_END \
 328  \
 329 // END DEFINE_GETSETOOP_VOLATILE.
 330 
 331 DEFINE_GETSETOOP_VOLATILE(jboolean, Boolean)
 332 DEFINE_GETSETOOP_VOLATILE(jbyte, Byte)
 333 DEFINE_GETSETOOP_VOLATILE(jshort, Short);
 334 DEFINE_GETSETOOP_VOLATILE(jchar, Char);
 335 DEFINE_GETSETOOP_VOLATILE(jint, Int);
 336 DEFINE_GETSETOOP_VOLATILE(jlong, Long);
 337 DEFINE_GETSETOOP_VOLATILE(jfloat, Float);
 338 DEFINE_GETSETOOP_VOLATILE(jdouble, Double);
 339 
 340 #undef DEFINE_GETSETOOP_VOLATILE
 341 
 342 UNSAFE_LEAF(void, Unsafe_LoadFence(JNIEnv *env, jobject unsafe)) {
 343   OrderAccess::acquire();
 344 } UNSAFE_END
 345 
 346 UNSAFE_LEAF(void, Unsafe_StoreFence(JNIEnv *env, jobject unsafe)) {




 129 }
 130 
 131 // Externally callable versions:
 132 // (Use these in compiler intrinsics which emulate unsafe primitives.)
 133 jlong Unsafe_field_offset_to_byte_offset(jlong field_offset) {
 134   return field_offset;
 135 }
 136 jlong Unsafe_field_offset_from_byte_offset(jlong byte_offset) {
 137   return byte_offset;
 138 }
 139 
 140 
 141 ///// Data read/writes on the Java heap and in native (off-heap) memory
 142 
 143 /**
 144  * Helper class for accessing memory.
 145  *
 146  * Normalizes values and wraps accesses in
 147  * JavaThread::doing_unsafe_access() if needed.
 148  */
 149 template <typename T>
 150 class MemoryAccess : StackObj {
 151   JavaThread* _thread;
 152   oop _obj;
 153   ptrdiff_t _offset;
 154 
 155   // Resolves and returns the address of the memory access.
 156   // This raw memory access may fault, so we make sure it happens within the
 157   // guarded scope by making the access volatile at least. Since the store
 158   // of Thread::set_doing_unsafe_access() is also volatile, these accesses
 159   // can not be reordered by the compiler. Therefore, if the access triggers
 160   // a fault, we will know that Thread::doing_unsafe_access() returns true.
 161   volatile T* addr() {
 162     void* addr = index_oop_from_field_offset_long(_obj, _offset);
 163     return static_cast<volatile T*>(addr);
 164   }
 165 
 166   template <typename U>
 167   U normalize_for_write(U x) {
 168     return x;
 169   }
 170 
 171   jboolean normalize_for_write(jboolean x) {
 172     return x & 1;
 173   }
 174 
 175   template <typename U>
 176   U normalize_for_read(U x) {
 177     return x;
 178   }
 179 
 180   jboolean normalize_for_read(jboolean x) {
 181     return x != 0;
 182   }
 183 
 184   /**
 185    * Helper class to wrap memory accesses in JavaThread::doing_unsafe_access()
 186    */
 187   class GuardUnsafeAccess {
 188     JavaThread* _thread;
 189 
 190   public:
 191     GuardUnsafeAccess(JavaThread* thread) : _thread(thread) {
 192       // native/off-heap access which may raise SIGBUS if accessing
 193       // memory mapped file data in a region of the file which has
 194       // been truncated and is now invalid
 195       _thread->set_doing_unsafe_access(true);
 196     }
 197 
 198     ~GuardUnsafeAccess() {
 199       _thread->set_doing_unsafe_access(false);
 200     }
 201   };
 202 
 203 public:
 204   MemoryAccess(JavaThread* thread, jobject obj, jlong offset)
 205     : _thread(thread), _obj(JNIHandles::resolve(obj)), _offset((ptrdiff_t)offset) {
 206     assert_field_offset_sane(_obj, offset);
 207   }
 208 

 209   T get() {
 210     if (oopDesc::is_null(_obj)) {
 211       GuardUnsafeAccess guard(_thread);
 212       T ret = RawAccess<>::load(addr());
 213       return normalize_for_read(ret);
 214     } else {
 215       T ret = HeapAccess<>::load_at(_obj, _offset);
 216       return normalize_for_read(ret);
 217     }
 218   }
 219 

 220   void put(T x) {
 221     if (oopDesc::is_null(_obj)) {
 222       GuardUnsafeAccess guard(_thread);
 223       RawAccess<>::store(addr(), normalize_for_write(x));
 224     } else {
 225       HeapAccess<>::store_at(_obj, _offset, normalize_for_write(x));
 226     }
 227   }
 228 
 229 

 230   T get_volatile() {
 231     if (oopDesc::is_null(_obj)) {
 232       GuardUnsafeAccess guard(_thread);
 233       volatile T ret = RawAccess<MO_SEQ_CST>::load(addr());
 234       return normalize_for_read(ret);
 235     } else {
 236       T ret = HeapAccess<MO_SEQ_CST>::load_at(_obj, _offset);
 237       return normalize_for_read(ret);
 238     }
 239   }
 240 

 241   void put_volatile(T x) {
 242     if (oopDesc::is_null(_obj)) {
 243       GuardUnsafeAccess guard(_thread);
 244       RawAccess<MO_SEQ_CST>::store(addr(), normalize_for_write(x));
 245     } else {
 246       HeapAccess<MO_SEQ_CST>::store_at(_obj, _offset, normalize_for_write(x));
 247     }
 248   }
 249 };
 250 
 251 // These functions allow a null base pointer with an arbitrary address.
 252 // But if the base pointer is non-null, the offset should make some sense.
 253 // That is, it should be in the range [0, MAX_OBJECT_SIZE].
 254 UNSAFE_ENTRY(jobject, Unsafe_GetObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 255   oop p = JNIHandles::resolve(obj);
 256   assert_field_offset_sane(p, offset);
 257   oop v = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_load_at(p, offset);
 258   return JNIHandles::make_local(env, v);
 259 } UNSAFE_END
 260 
 261 UNSAFE_ENTRY(void, Unsafe_PutObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
 262   oop x = JNIHandles::resolve(x_h);
 263   oop p = JNIHandles::resolve(obj);
 264   assert_field_offset_sane(p, offset);


 282 UNSAFE_ENTRY(jobject, Unsafe_GetUncompressedObject(JNIEnv *env, jobject unsafe, jlong addr)) {
 283   oop v = *(oop*) (address) addr;
 284   return JNIHandles::make_local(env, v);
 285 } UNSAFE_END
 286 
 287 UNSAFE_LEAF(jboolean, Unsafe_isBigEndian0(JNIEnv *env, jobject unsafe)) {
 288 #ifdef VM_LITTLE_ENDIAN
 289   return false;
 290 #else
 291   return true;
 292 #endif
 293 } UNSAFE_END
 294 
 295 UNSAFE_LEAF(jint, Unsafe_unalignedAccess0(JNIEnv *env, jobject unsafe)) {
 296   return UseUnalignedAccesses;
 297 } UNSAFE_END
 298 
 299 #define DEFINE_GETSETOOP(java_type, Type) \
 300  \
 301 UNSAFE_ENTRY(java_type, Unsafe_Get##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
 302   return MemoryAccess<java_type>(thread, obj, offset).get(); \
 303 } UNSAFE_END \
 304  \
 305 UNSAFE_ENTRY(void, Unsafe_Put##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
 306   MemoryAccess<java_type>(thread, obj, offset).put(x); \
 307 } UNSAFE_END \
 308  \
 309 // END DEFINE_GETSETOOP.
 310 
 311 DEFINE_GETSETOOP(jboolean, Boolean)
 312 DEFINE_GETSETOOP(jbyte, Byte)
 313 DEFINE_GETSETOOP(jshort, Short);
 314 DEFINE_GETSETOOP(jchar, Char);
 315 DEFINE_GETSETOOP(jint, Int);
 316 DEFINE_GETSETOOP(jlong, Long);
 317 DEFINE_GETSETOOP(jfloat, Float);
 318 DEFINE_GETSETOOP(jdouble, Double);
 319 
 320 #undef DEFINE_GETSETOOP
 321 
 322 #define DEFINE_GETSETOOP_VOLATILE(java_type, Type) \
 323  \
 324 UNSAFE_ENTRY(java_type, Unsafe_Get##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
 325   return MemoryAccess<java_type>(thread, obj, offset).get_volatile(); \
 326 } UNSAFE_END \
 327  \
 328 UNSAFE_ENTRY(void, Unsafe_Put##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
 329   MemoryAccess<java_type>(thread, obj, offset).put_volatile(x); \
 330 } UNSAFE_END \
 331  \
 332 // END DEFINE_GETSETOOP_VOLATILE.
 333 
 334 DEFINE_GETSETOOP_VOLATILE(jboolean, Boolean)
 335 DEFINE_GETSETOOP_VOLATILE(jbyte, Byte)
 336 DEFINE_GETSETOOP_VOLATILE(jshort, Short);
 337 DEFINE_GETSETOOP_VOLATILE(jchar, Char);
 338 DEFINE_GETSETOOP_VOLATILE(jint, Int);
 339 DEFINE_GETSETOOP_VOLATILE(jlong, Long);
 340 DEFINE_GETSETOOP_VOLATILE(jfloat, Float);
 341 DEFINE_GETSETOOP_VOLATILE(jdouble, Double);
 342 
 343 #undef DEFINE_GETSETOOP_VOLATILE
 344 
 345 UNSAFE_LEAF(void, Unsafe_LoadFence(JNIEnv *env, jobject unsafe)) {
 346   OrderAccess::acquire();
 347 } UNSAFE_END
 348 
 349 UNSAFE_LEAF(void, Unsafe_StoreFence(JNIEnv *env, jobject unsafe)) {


< prev index next >