< prev index next >

src/hotspot/share/prims/unsafe.cpp

Print this page

        

@@ -56,19 +56,30 @@
 
 #define MAX_OBJECT_SIZE \
   ( arrayOopDesc::header_size(T_DOUBLE) * HeapWordSize \
     + ((julong)max_jint * sizeof(double)) )
 
-
 #define UNSAFE_ENTRY(result_type, header) \
   JVM_ENTRY(static result_type, header)
 
+#define UNSAFE_ACCESS_ENTRY(result_type, header) \
+  JVM_ENTRY(static result_type, header) \
+  thread->set_doing_unsafe_access(true);
+  
 #define UNSAFE_LEAF(result_type, header) \
   JVM_LEAF(static result_type, header)
 
-#define UNSAFE_END JVM_END
+#define UNSAFE_ACCESS_LEAF(result_type, header) \
+  JVM_LEAF(static result_type, header) \
+  JavaThread* thread=JavaThread::thread_from_jni_environment(env); \
+  thread->set_doing_unsafe_access(true);  
+
+#define UNSAFE_ACCESS_END \
+  thread->set_doing_unsafe_access(false); \
+  JVM_END
 
+#define UNSAFE_END JVM_END
 
 static inline void* addr_from_java(jlong addr) {
   // This assert fails in a variety of ways on 32-bit systems.
   // It is impossible to predict whether native code that converts
   // pointers to longs will sign-extend or zero-extend the addresses.

@@ -523,36 +534,36 @@
   void* p = addr_from_java(addr);
 
   os::free(p);
 } UNSAFE_END
 
-UNSAFE_ENTRY(void, Unsafe_SetMemory0(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong size, jbyte value)) {
+UNSAFE_ACCESS_ENTRY(void, Unsafe_SetMemory0(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong size, jbyte value)) {
   size_t sz = (size_t)size;
 
   oop base = JNIHandles::resolve(obj);
   void* p = index_oop_from_field_offset_long(base, offset);
 
   Copy::fill_to_memory_atomic(p, sz, value);
-} UNSAFE_END
+} UNSAFE_ACCESS_END
 
-UNSAFE_ENTRY(void, Unsafe_CopyMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size)) {
+UNSAFE_ACCESS_ENTRY(void, Unsafe_CopyMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size)) {
   size_t sz = (size_t)size;
 
   oop srcp = JNIHandles::resolve(srcObj);
   oop dstp = JNIHandles::resolve(dstObj);
 
   void* src = index_oop_from_field_offset_long(srcp, srcOffset);
   void* dst = index_oop_from_field_offset_long(dstp, dstOffset);
 
   Copy::conjoint_memory_atomic(src, dst, sz);
-} UNSAFE_END
+} UNSAFE_ACCESS_END
 
 // This function is a leaf since if the source and destination are both in native memory
 // the copy may potentially be very large, and we don't want to disable GC if we can avoid it.
 // If either source or destination (or both) are on the heap, the function will enter VM using
 // JVM_ENTRY_FROM_LEAF
-UNSAFE_LEAF(void, Unsafe_CopySwapMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size, jlong elemSize)) {
+UNSAFE_ACCESS_LEAF(void, Unsafe_CopySwapMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size, jlong elemSize)) {
   size_t sz = (size_t)size;
   size_t esz = (size_t)elemSize;
 
   if (srcObj == NULL && dstObj == NULL) {
     // Both src & dst are in native memory

@@ -571,11 +582,11 @@
       address dst = (address)index_oop_from_field_offset_long(dstp, dstOffset);
 
       Copy::conjoint_swap(src, dst, sz, esz);
     } JVM_END
   }
-} UNSAFE_END
+} UNSAFE_ACCESS_END
 
 ////// Random queries
 
 UNSAFE_LEAF(jint, Unsafe_AddressSize0(JNIEnv *env, jobject unsafe)) {
   return sizeof(void*);
< prev index next >