src/share/vm/prims/unsafe.cpp
Index
Unified diffs
Context diffs
Sdiffs
Patch
New
Old
Previous File
Next File
*** old/src/share/vm/prims/unsafe.cpp Tue Mar 1 08:48:49 2016
--- new/src/share/vm/prims/unsafe.cpp Tue Mar 1 08:48:49 2016
*** 62,73 ****
--- 62,71 ----
#define UNSAFE_LEAF(result_type, header) \
JVM_LEAF(static result_type, header)
#define UNSAFE_END JVM_END
#define UnsafeWrapper(arg) /*nothing, for the present*/
static inline void* addr_from_java(jlong addr) {
// This assert fails in a variety of ways on 32-bit systems.
// It is impossible to predict whether native code that converts
// pointers to longs will sign-extend or zero-extend the addresses.
*** 158,169 ****
--- 156,165 ----
// These functions allow a null base pointer with an arbitrary address.
// But if the base pointer is non-null, the offset should make some sense.
// That is, it should be in the range [0, MAX_OBJECT_SIZE].
UNSAFE_ENTRY(jobject, Unsafe_GetObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
UnsafeWrapper("Unsafe_GetObject");
oop p = JNIHandles::resolve(obj);
oop v;
if (UseCompressedOops) {
narrowOop n = *(narrowOop*)index_oop_from_field_offset_long(p, offset);
*** 201,212 ****
--- 197,206 ----
return ret;
} UNSAFE_END
UNSAFE_ENTRY(void, Unsafe_SetObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
UnsafeWrapper("Unsafe_SetObject");
oop x = JNIHandles::resolve(x_h);
oop p = JNIHandles::resolve(obj);
if (UseCompressedOops) {
oop_store((narrowOop*)index_oop_from_field_offset_long(p, offset), x);
*** 214,225 ****
--- 208,217 ----
oop_store((oop*)index_oop_from_field_offset_long(p, offset), x);
}
} UNSAFE_END
UNSAFE_ENTRY(jobject, Unsafe_GetObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
UnsafeWrapper("Unsafe_GetObjectVolatile");
oop p = JNIHandles::resolve(obj);
void* addr = index_oop_from_field_offset_long(p, offset);
volatile oop v;
*** 233,244 ****
--- 225,234 ----
OrderAccess::acquire();
return JNIHandles::make_local(env, v);
} UNSAFE_END
UNSAFE_ENTRY(void, Unsafe_SetObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
UnsafeWrapper("Unsafe_SetObjectVolatile");
oop x = JNIHandles::resolve(x_h);
oop p = JNIHandles::resolve(obj);
void* addr = index_oop_from_field_offset_long(p, offset);
OrderAccess::release();
*** 250,277 ****
--- 240,261 ----
OrderAccess::fence();
} UNSAFE_END
UNSAFE_ENTRY(jobject, Unsafe_GetUncompressedObject(JNIEnv *env, jobject unsafe, jlong addr)) {
UnsafeWrapper("Unsafe_GetUncompressedObject");
oop v = *(oop*) (address) addr;
return JNIHandles::make_local(env, v);
} UNSAFE_END
UNSAFE_ENTRY(jclass, Unsafe_GetJavaMirror(JNIEnv *env, jobject unsafe, jlong metaspace_klass)) {
UnsafeWrapper("Unsafe_GetJavaMirror");
Klass* klass = (Klass*) (address) metaspace_klass;
return (jclass) JNIHandles::make_local(klass->java_mirror());
} UNSAFE_END
UNSAFE_ENTRY(jlong, Unsafe_GetKlassPointer(JNIEnv *env, jobject unsafe, jobject obj)) {
UnsafeWrapper("Unsafe_GetKlassPointer");
oop o = JNIHandles::resolve(obj);
jlong klass = (jlong) (address) o->klass();
return klass;
} UNSAFE_END
*** 304,315 ****
--- 288,297 ----
// add safepoint checks and thread state transitions, we must ensure that we calculate
// the address of the field _after_ we have acquired the lock, else the object may have
// been moved by the GC
UNSAFE_ENTRY(jlong, Unsafe_GetLongVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
UnsafeWrapper("Unsafe_GetLongVolatile");
if (VM_Version::supports_cx8()) {
GET_FIELD_VOLATILE(obj, offset, jlong, v);
return v;
} else {
Handle p (THREAD, JNIHandles::resolve(obj));
*** 319,330 ****
--- 301,310 ----
return value;
}
} UNSAFE_END
UNSAFE_ENTRY(void, Unsafe_SetLongVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong x)) {
UnsafeWrapper("Unsafe_SetLongVolatile");
if (VM_Version::supports_cx8()) {
SET_FIELD_VOLATILE(obj, offset, jlong, x);
} else {
Handle p (THREAD, JNIHandles::resolve(obj));
jlong* addr = (jlong*)(index_oop_from_field_offset_long(p(), offset));
*** 334,368 ****
--- 314,342 ----
} UNSAFE_END
#endif // not SUPPORTS_NATIVE_CX8
UNSAFE_LEAF(jboolean, Unsafe_isBigEndian0(JNIEnv *env, jobject unsafe)) {
UnsafeWrapper("Unsafe_IsBigEndian0");
#ifdef VM_LITTLE_ENDIAN
return false;
#else
return true;
#endif
} UNSAFE_END
UNSAFE_LEAF(jint, Unsafe_unalignedAccess0(JNIEnv *env, jobject unsafe)) {
UnsafeWrapper("Unsafe_UnalignedAccess0");
return UseUnalignedAccesses;
} UNSAFE_END
#define DEFINE_GETSETOOP(java_type, Type) \
\
UNSAFE_ENTRY(java_type, Unsafe_Get##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
UnsafeWrapper("Unsafe_Get"#Type); \
GET_FIELD(obj, offset, java_type, v); \
return v; \
} UNSAFE_END \
\
UNSAFE_ENTRY(void, Unsafe_Set##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
UnsafeWrapper("Unsafe_Set"#Type); \
SET_FIELD(obj, offset, java_type, x); \
} UNSAFE_END \
\
// END DEFINE_GETSETOOP.
*** 378,394 ****
--- 352,366 ----
#undef DEFINE_GETSETOOP
#define DEFINE_GETSETOOP_VOLATILE(java_type, Type) \
\
UNSAFE_ENTRY(java_type, Unsafe_Get##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
UnsafeWrapper("Unsafe_Get"#Type); \
GET_FIELD_VOLATILE(obj, offset, java_type, v); \
return v; \
} UNSAFE_END \
\
UNSAFE_ENTRY(void, Unsafe_Set##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
UnsafeWrapper("Unsafe_Set"#Type); \
SET_FIELD_VOLATILE(obj, offset, java_type, x); \
} UNSAFE_END \
\
// END DEFINE_GETSETOOP_VOLATILE.
*** 407,424 ****
--- 379,392 ----
#undef DEFINE_GETSETOOP_VOLATILE
// The non-intrinsified versions of setOrdered just use setVolatile
UNSAFE_ENTRY(void, Unsafe_SetOrderedInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint x)) {
UnsafeWrapper("Unsafe_SetOrderedInt");
SET_FIELD_VOLATILE(obj, offset, jint, x);
} UNSAFE_END
UNSAFE_ENTRY(void, Unsafe_SetOrderedObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
UnsafeWrapper("Unsafe_SetOrderedObject");
oop x = JNIHandles::resolve(x_h);
oop p = JNIHandles::resolve(obj);
void* addr = index_oop_from_field_offset_long(p, offset);
OrderAccess::release();
*** 430,441 ****
--- 398,407 ----
OrderAccess::fence();
} UNSAFE_END
UNSAFE_ENTRY(void, Unsafe_SetOrderedLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong x)) {
UnsafeWrapper("Unsafe_SetOrderedLong");
#ifdef SUPPORTS_NATIVE_CX8
SET_FIELD_VOLATILE(obj, offset, jlong, x);
#else
// Keep old code for platforms which may not have atomic long (8 bytes) instructions
*** 449,472 ****
--- 415,432 ----
}
#endif
} UNSAFE_END
UNSAFE_LEAF(void, Unsafe_LoadFence(JNIEnv *env, jobject unsafe)) {
UnsafeWrapper("Unsafe_LoadFence");
OrderAccess::acquire();
} UNSAFE_END
UNSAFE_LEAF(void, Unsafe_StoreFence(JNIEnv *env, jobject unsafe)) {
UnsafeWrapper("Unsafe_StoreFence");
OrderAccess::release();
} UNSAFE_END
UNSAFE_LEAF(void, Unsafe_FullFence(JNIEnv *env, jobject unsafe)) {
UnsafeWrapper("Unsafe_FullFence");
OrderAccess::fence();
} UNSAFE_END
////// Data in the C heap.
*** 474,494 ****
--- 434,452 ----
// They just crash. Only a oop base pointer can generate a NullPointerException.
//
#define DEFINE_GETSETNATIVE(java_type, Type, native_type) \
\
UNSAFE_ENTRY(java_type, Unsafe_GetNative##Type(JNIEnv *env, jobject unsafe, jlong addr)) { \
UnsafeWrapper("Unsafe_GetNative"#Type); \
void* p = addr_from_java(addr); \
JavaThread* t = JavaThread::current(); \
t->set_doing_unsafe_access(true); \
java_type x = *(volatile native_type*)p; \
t->set_doing_unsafe_access(false); \
return x; \
} UNSAFE_END \
\
UNSAFE_ENTRY(void, Unsafe_SetNative##Type(JNIEnv *env, jobject unsafe, jlong addr, java_type x)) { \
UnsafeWrapper("Unsafe_SetNative"#Type); \
JavaThread* t = JavaThread::current(); \
t->set_doing_unsafe_access(true); \
void* p = addr_from_java(addr); \
*(volatile native_type*)p = x; \
t->set_doing_unsafe_access(false); \
*** 505,516 ****
--- 463,472 ----
DEFINE_GETSETNATIVE(jdouble, Double, double);
#undef DEFINE_GETSETNATIVE
UNSAFE_ENTRY(jlong, Unsafe_GetNativeLong(JNIEnv *env, jobject unsafe, jlong addr)) {
UnsafeWrapper("Unsafe_GetNativeLong");
JavaThread* t = JavaThread::current();
// We do it this way to avoid problems with access to heap using 64
// bit loads, as jlong in heap could be not 64-bit aligned, and on
// some CPUs (SPARC) it leads to SIGBUS.
t->set_doing_unsafe_access(true);
*** 531,542 ****
--- 487,496 ----
return x;
} UNSAFE_END
UNSAFE_ENTRY(void, Unsafe_SetNativeLong(JNIEnv *env, jobject unsafe, jlong addr, jlong x)) {
UnsafeWrapper("Unsafe_SetNativeLong");
JavaThread* t = JavaThread::current();
// see comment for Unsafe_GetNativeLong
t->set_doing_unsafe_access(true);
void* p = addr_from_java(addr);
*** 553,633 ****
--- 507,569 ----
t->set_doing_unsafe_access(false);
} UNSAFE_END
UNSAFE_LEAF(jlong, Unsafe_GetNativeAddress(JNIEnv *env, jobject unsafe, jlong addr)) {
UnsafeWrapper("Unsafe_GetNativeAddress");
void* p = addr_from_java(addr);
return addr_to_java(*(void**)p);
} UNSAFE_END
UNSAFE_LEAF(void, Unsafe_SetNativeAddress(JNIEnv *env, jobject unsafe, jlong addr, jlong x)) {
UnsafeWrapper("Unsafe_SetNativeAddress");
void* p = addr_from_java(addr);
*(void**)p = addr_from_java(x);
} UNSAFE_END
////// Allocation requests
UNSAFE_ENTRY(jobject, Unsafe_AllocateInstance(JNIEnv *env, jobject unsafe, jclass cls)) {
UnsafeWrapper("Unsafe_AllocateInstance");
{
ThreadToNativeFromVM ttnfv(thread);
return env->AllocObject(cls);
}
} UNSAFE_END
UNSAFE_ENTRY(jlong, Unsafe_AllocateMemory0(JNIEnv *env, jobject unsafe, jlong size)) {
UnsafeWrapper("Unsafe_AllocateMemory");
size_t sz = (size_t)size;
sz = round_to(sz, HeapWordSize);
void* x = os::malloc(sz, mtInternal);
return addr_to_java(x);
} UNSAFE_END
UNSAFE_ENTRY(jlong, Unsafe_ReallocateMemory0(JNIEnv *env, jobject unsafe, jlong addr, jlong size)) {
UnsafeWrapper("Unsafe_ReallocateMemory0");
void* p = addr_from_java(addr);
size_t sz = (size_t)size;
sz = round_to(sz, HeapWordSize);
void* x = os::realloc(p, sz, mtInternal);
return addr_to_java(x);
} UNSAFE_END
UNSAFE_ENTRY(void, Unsafe_FreeMemory0(JNIEnv *env, jobject unsafe, jlong addr)) {
UnsafeWrapper("Unsafe_FreeMemory0");
void* p = addr_from_java(addr);
os::free(p);
} UNSAFE_END
UNSAFE_ENTRY(void, Unsafe_SetMemory0(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong size, jbyte value)) {
UnsafeWrapper("Unsafe_SetMemory0");
size_t sz = (size_t)size;
oop base = JNIHandles::resolve(obj);
void* p = index_oop_from_field_offset_long(base, offset);
Copy::fill_to_memory_atomic(p, sz, value);
} UNSAFE_END
UNSAFE_ENTRY(void, Unsafe_CopyMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size)) {
UnsafeWrapper("Unsafe_CopyMemory0");
size_t sz = (size_t)size;
oop srcp = JNIHandles::resolve(srcObj);
oop dstp = JNIHandles::resolve(dstObj);
*** 640,651 ****
--- 576,585 ----
// This function is a leaf since if the source and destination are both in native memory
// the copy may potentially be very large, and we don't want to disable GC if we can avoid it.
// If either source or destination (or both) are on the heap, the function will enter VM using
// JVM_ENTRY_FROM_LEAF
UNSAFE_LEAF(void, Unsafe_CopySwapMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size, jlong elemSize)) {
UnsafeWrapper("Unsafe_CopySwapMemory0");
size_t sz = (size_t)size;
size_t esz = (size_t)elemSize;
if (srcObj == NULL && dstObj == NULL) {
// Both src & dst are in native memory
*** 669,686 ****
--- 603,616 ----
} UNSAFE_END
////// Random queries
UNSAFE_LEAF(jint, Unsafe_AddressSize0(JNIEnv *env, jobject unsafe)) {
UnsafeWrapper("Unsafe_AddressSize");
return sizeof(void*);
} UNSAFE_END
UNSAFE_LEAF(jint, Unsafe_PageSize()) {
UnsafeWrapper("Unsafe_PageSize");
return os::vm_page_size();
} UNSAFE_END
static jint find_field_offset(jobject field, int must_be_static, TRAPS) {
assert(field != NULL, "field must not be NULL");
*** 701,724 ****
--- 631,648 ----
int offset = InstanceKlass::cast(k)->field_offset(slot);
return field_offset_from_byte_offset(offset);
}
UNSAFE_ENTRY(jlong, Unsafe_ObjectFieldOffset0(JNIEnv *env, jobject unsafe, jobject field)) {
UnsafeWrapper("Unsafe_ObjectFieldOffset0");
return find_field_offset(field, 0, THREAD);
} UNSAFE_END
UNSAFE_ENTRY(jlong, Unsafe_StaticFieldOffset0(JNIEnv *env, jobject unsafe, jobject field)) {
UnsafeWrapper("Unsafe_StaticFieldOffset0");
return find_field_offset(field, 1, THREAD);
} UNSAFE_END
UNSAFE_ENTRY(jobject, Unsafe_StaticFieldBase0(JNIEnv *env, jobject unsafe, jobject field)) {
UnsafeWrapper("Unsafe_StaticFieldBase0");
assert(field != NULL, "field must not be NULL");
// Note: In this VM implementation, a field address is always a short
// offset from the base of a a klass metaobject. Thus, the full dynamic
// range of the return type is never used. However, some implementations
*** 737,748 ****
--- 661,670 ----
return JNIHandles::make_local(env, mirror);
} UNSAFE_END
UNSAFE_ENTRY(void, Unsafe_EnsureClassInitialized0(JNIEnv *env, jobject unsafe, jobject clazz)) {
UnsafeWrapper("Unsafe_EnsureClassInitialized0");
assert(clazz != NULL, "clazz must not be NULL");
oop mirror = JNIHandles::resolve_non_null(clazz);
Klass* klass = java_lang_Class::as_Klass(mirror);
*** 752,763 ****
--- 674,683 ----
}
}
UNSAFE_END
UNSAFE_ENTRY(jboolean, Unsafe_ShouldBeInitialized0(JNIEnv *env, jobject unsafe, jobject clazz)) {
UnsafeWrapper("Unsafe_ShouldBeInitialized0");
assert(clazz != NULL, "clazz must not be NULL");
oop mirror = JNIHandles::resolve_non_null(clazz);
Klass* klass = java_lang_Class::as_Klass(mirror);
*** 789,810 ****
--- 709,726 ----
ShouldNotReachHere();
}
}
UNSAFE_ENTRY(jint, Unsafe_ArrayBaseOffset0(JNIEnv *env, jobject unsafe, jclass clazz)) {
UnsafeWrapper("Unsafe_ArrayBaseOffset0");
int base = 0, scale = 0;
getBaseAndScale(base, scale, clazz, CHECK_0);
return field_offset_from_byte_offset(base);
} UNSAFE_END
UNSAFE_ENTRY(jint, Unsafe_ArrayIndexScale0(JNIEnv *env, jobject unsafe, jclass clazz)) {
UnsafeWrapper("Unsafe_ArrayIndexScale0");
int base = 0, scale = 0;
getBaseAndScale(base, scale, clazz, CHECK_0);
// This VM packs both fields and array elements down to the byte.
// But watch out: If this changes, so that array references for
*** 897,908 ****
--- 813,822 ----
return result;
}
UNSAFE_ENTRY(jclass, Unsafe_DefineClass0(JNIEnv *env, jobject unsafe, jstring name, jbyteArray data, int offset, int length, jobject loader, jobject pd)) {
UnsafeWrapper("Unsafe_DefineClass");
ThreadToNativeFromVM ttnfv(thread);
return Unsafe_DefineClass_impl(env, name, data, offset, length, loader, pd);
} UNSAFE_END
*** 1032,1043 ****
--- 946,955 ----
return instanceKlassHandle(THREAD, anonk);
}
UNSAFE_ENTRY(jclass, Unsafe_DefineAnonymousClass0(JNIEnv *env, jobject unsafe, jclass host_class, jbyteArray data, jobjectArray cp_patches_jh)) {
UnsafeWrapper("Unsafe_DefineAnonymousClass0");
ResourceMark rm(THREAD);
instanceKlassHandle anon_klass;
jobject res_jh = NULL;
u1* temp_alloc = NULL;
*** 1063,1085 ****
--- 975,991 ----
return (jclass) res_jh;
} UNSAFE_END
UNSAFE_ENTRY(void, Unsafe_ThrowException(JNIEnv *env, jobject unsafe, jthrowable thr)) {
UnsafeWrapper("Unsafe_ThrowException");
{
ThreadToNativeFromVM ttnfv(thread);
env->Throw(thr);
}
} UNSAFE_END
// JSR166 ------------------------------------------------------------------
UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSwapObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h)) {
UnsafeWrapper("Unsafe_CompareAndSwapObject");
oop x = JNIHandles::resolve(x_h);
oop e = JNIHandles::resolve(e_h);
oop p = JNIHandles::resolve(obj);
HeapWord* addr = (HeapWord *)index_oop_from_field_offset_long(p, offset);
oop res = oopDesc::atomic_compare_exchange_oop(x, addr, e, true);
*** 1091,1111 ****
--- 997,1013 ----
return true;
} UNSAFE_END
UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSwapInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
UnsafeWrapper("Unsafe_CompareAndSwapInt");
oop p = JNIHandles::resolve(obj);
jint* addr = (jint *) index_oop_from_field_offset_long(p, offset);
return (jint)(Atomic::cmpxchg(x, addr, e)) == e;
} UNSAFE_END
UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSwapLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
UnsafeWrapper("Unsafe_CompareAndSwapLong");
Handle p(THREAD, JNIHandles::resolve(obj));
jlong* addr = (jlong*)index_oop_from_field_offset_long(p(), offset);
#ifdef SUPPORTS_NATIVE_CX8
return (jlong)(Atomic::cmpxchg(x, addr, e)) == e;
*** 1125,1136 ****
--- 1027,1036 ----
}
#endif
} UNSAFE_END
UNSAFE_ENTRY(void, Unsafe_Park(JNIEnv *env, jobject unsafe, jboolean isAbsolute, jlong time)) {
UnsafeWrapper("Unsafe_Park");
EventThreadPark event;
HOTSPOT_THREAD_PARK_BEGIN((uintptr_t) thread->parker(), (int) isAbsolute, time);
JavaThreadParkedState jtps(thread, time != 0);
thread->parker()->park(isAbsolute != 0, time);
*** 1145,1156 ****
--- 1045,1054 ----
event.commit();
}
} UNSAFE_END
UNSAFE_ENTRY(void, Unsafe_Unpark(JNIEnv *env, jobject unsafe, jobject jthread)) {
UnsafeWrapper("Unsafe_Unpark");
Parker* p = NULL;
if (jthread != NULL) {
oop java_thread = JNIHandles::resolve_non_null(jthread);
if (java_thread != NULL) {
*** 1183,1194 ****
--- 1081,1090 ----
p->unpark();
}
} UNSAFE_END
UNSAFE_ENTRY(jint, Unsafe_GetLoadAverage0(JNIEnv *env, jobject unsafe, jdoubleArray loadavg, jint nelem)) {
UnsafeWrapper("Unsafe_Loadavg");
const int max_nelem = 3;
double la[max_nelem];
jint ret;
typeArrayOop a = typeArrayOop(JNIHandles::resolve_non_null(loadavg));
*** 1332,1345 ****
--- 1228,1237 ----
// The Unsafe_xxx functions above are called only from the interpreter.
// The optimizer looks at names and signatures to recognize
// individual functions.
JVM_ENTRY(void, JVM_RegisterJDKInternalMiscUnsafeMethods(JNIEnv *env, jclass unsafeclass)) {
UnsafeWrapper("JVM_RegisterJDKInternalMiscUnsafeMethods");
{
ThreadToNativeFromVM ttnfv(thread);
int ok = env->RegisterNatives(unsafeclass, jdk_internal_misc_Unsafe_methods, sizeof(jdk_internal_misc_Unsafe_methods)/sizeof(JNINativeMethod));
guarantee(ok == 0, "register jdk.internal.misc.Unsafe natives");
}
} JVM_END
src/share/vm/prims/unsafe.cpp
Index
Unified diffs
Context diffs
Sdiffs
Patch
New
Old
Previous File
Next File