--- old/src/hotspot/share/prims/unsafe.cpp 2017-11-29 13:53:47.195300308 +0100 +++ new/src/hotspot/share/prims/unsafe.cpp 2017-11-29 13:53:46.995300315 +0100 @@ -146,18 +146,25 @@ * Normalizes values and wraps accesses in * JavaThread::doing_unsafe_access() if needed. */ +template class MemoryAccess : StackObj { JavaThread* _thread; oop _obj; ptrdiff_t _offset; - // Resolves and returns the address of the memory access - void* addr() { - return index_oop_from_field_offset_long(_obj, _offset); + // Resolves and returns the address of the memory access. + // This raw memory access may fault, so we make sure it happens within the + // guarded scope by making the access volatile at least. Since the store + // of Thread::set_doing_unsafe_access() is also volatile, these accesses + // can not be reordered by the compiler. Therefore, if the access triggers + // a fault, we will know that Thread::doing_unsafe_access() returns true. + volatile T* addr() { + void* addr = index_oop_from_field_offset_long(_obj, _offset); + return static_cast(addr); } - template - T normalize_for_write(T x) { + template + U normalize_for_write(U x) { return x; } @@ -165,8 +172,8 @@ return x & 1; } - template - T normalize_for_read(T x) { + template + U normalize_for_read(U x) { return x; } @@ -199,11 +206,10 @@ assert_field_offset_sane(_obj, offset); } - template T get() { if (oopDesc::is_null(_obj)) { GuardUnsafeAccess guard(_thread); - T ret = RawAccess<>::load((volatile T*)addr()); + T ret = RawAccess<>::load(addr()); return normalize_for_read(ret); } else { T ret = HeapAccess<>::load_at(_obj, _offset); @@ -211,22 +217,20 @@ } } - template void put(T x) { if (oopDesc::is_null(_obj)) { GuardUnsafeAccess guard(_thread); - RawAccess<>::store((volatile T*)addr(), normalize_for_write(x)); + RawAccess<>::store(addr(), normalize_for_write(x)); } else { HeapAccess<>::store_at(_obj, _offset, normalize_for_write(x)); } } - template T get_volatile() { if (oopDesc::is_null(_obj)) { GuardUnsafeAccess guard(_thread); - volatile T ret = RawAccess::load((volatile T*)addr()); + volatile T ret = RawAccess::load(addr()); return normalize_for_read(ret); } else { T ret = HeapAccess::load_at(_obj, _offset); @@ -234,11 +238,10 @@ } } - template void put_volatile(T x) { if (oopDesc::is_null(_obj)) { GuardUnsafeAccess guard(_thread); - RawAccess::store((volatile T*)addr(), normalize_for_write(x)); + RawAccess::store(addr(), normalize_for_write(x)); } else { HeapAccess::store_at(_obj, _offset, normalize_for_write(x)); } @@ -296,11 +299,11 @@ #define DEFINE_GETSETOOP(java_type, Type) \ \ UNSAFE_ENTRY(java_type, Unsafe_Get##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \ - return MemoryAccess(thread, obj, offset).get(); \ + return MemoryAccess(thread, obj, offset).get(); \ } UNSAFE_END \ \ UNSAFE_ENTRY(void, Unsafe_Put##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \ - MemoryAccess(thread, obj, offset).put(x); \ + MemoryAccess(thread, obj, offset).put(x); \ } UNSAFE_END \ \ // END DEFINE_GETSETOOP. @@ -319,11 +322,11 @@ #define DEFINE_GETSETOOP_VOLATILE(java_type, Type) \ \ UNSAFE_ENTRY(java_type, Unsafe_Get##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \ - return MemoryAccess(thread, obj, offset).get_volatile(); \ + return MemoryAccess(thread, obj, offset).get_volatile(); \ } UNSAFE_END \ \ UNSAFE_ENTRY(void, Unsafe_Put##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \ - MemoryAccess(thread, obj, offset).put_volatile(x); \ + MemoryAccess(thread, obj, offset).put_volatile(x); \ } UNSAFE_END \ \ // END DEFINE_GETSETOOP_VOLATILE.