104 }
105
106 inline jlong field_offset_from_byte_offset(jlong byte_offset) {
107 return byte_offset;
108 }
109
110 inline jint invocation_key_from_method_slot(jint slot) {
111 return slot;
112 }
113
114 inline jint invocation_key_to_method_slot(jint key) {
115 return key;
116 }
117
118 inline void* index_oop_from_field_offset_long(oop p, jlong field_offset) {
119 jlong byte_offset = field_offset_to_byte_offset(field_offset);
120 #ifdef ASSERT
121 if (p != NULL) {
122 assert(byte_offset >= 0 && byte_offset <= (jlong)MAX_OBJECT_SIZE, "sane offset");
123 if (byte_offset == (jint)byte_offset) {
124 void* ptr_plus_disp = (address)p + byte_offset;
125 assert((void*)p->obj_field_addr<oop>((jint)byte_offset) == ptr_plus_disp,
126 "raw [ptr+disp] must be consistent with oop::field_base");
127 }
128 jlong p_size = HeapWordSize * (jlong)(p->size());
129 assert(byte_offset < p_size, err_msg("Unsafe access: offset " INT64_FORMAT " > object's size " INT64_FORMAT, byte_offset, p_size));
130 }
131 #endif
132 if (sizeof(char*) == sizeof(jint)) // (this constant folds!)
133 return (address)p + (jint) byte_offset;
134 else
135 return (address)p + byte_offset;
136 }
137
138 // Externally callable versions:
139 // (Use these in compiler intrinsics which emulate unsafe primitives.)
140 jlong Unsafe_field_offset_to_byte_offset(jlong field_offset) {
141 return field_offset;
142 }
143 jlong Unsafe_field_offset_from_byte_offset(jlong byte_offset) {
144 return byte_offset;
145 }
146 jint Unsafe_invocation_key_from_method_slot(jint slot) {
147 return invocation_key_from_method_slot(slot);
148 }
149 jint Unsafe_invocation_key_to_method_slot(jint key) {
150 return invocation_key_to_method_slot(key);
151 }
152
153
154 ///// Data in the Java heap.
155
156 #define GET_FIELD(obj, offset, type_name, v) \
157 oop p = JNIHandles::resolve(obj); \
158 type_name v = *(type_name*)index_oop_from_field_offset_long(p, offset)
159
160 #define SET_FIELD(obj, offset, type_name, x) \
161 oop p = JNIHandles::resolve(obj); \
162 *(type_name*)index_oop_from_field_offset_long(p, offset) = x
163
164 #define GET_FIELD_VOLATILE(obj, offset, type_name, v) \
165 oop p = JNIHandles::resolve(obj); \
166 if (support_IRIW_for_not_multiple_copy_atomic_cpu) { \
167 OrderAccess::fence(); \
168 } \
169 volatile type_name v = OrderAccess::load_acquire((volatile type_name*)index_oop_from_field_offset_long(p, offset));
170
171 #define SET_FIELD_VOLATILE(obj, offset, type_name, x) \
172 oop p = JNIHandles::resolve(obj); \
173 OrderAccess::release_store_fence((volatile type_name*)index_oop_from_field_offset_long(p, offset), x);
174
175
176 // Get/SetObject must be special-cased, since it works with handles.
177
178 // These functions allow a null base pointer with an arbitrary address.
179 // But if the base pointer is non-null, the offset should make some sense.
180 // That is, it should be in the range [0, MAX_OBJECT_SIZE].
181 UNSAFE_ENTRY(jobject, Unsafe_GetObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset))
182 UnsafeWrapper("Unsafe_GetObject");
183 oop p = JNIHandles::resolve(obj);
184 oop v;
185 if (UseCompressedOops) {
186 narrowOop n = *(narrowOop*)index_oop_from_field_offset_long(p, offset);
187 v = oopDesc::decode_heap_oop(n);
188 } else {
189 v = *(oop*)index_oop_from_field_offset_long(p, offset);
190 }
191 jobject ret = JNIHandles::make_local(env, v);
192 #if INCLUDE_ALL_GCS
193 // We could be accessing the referent field in a reference
194 // object. If G1 is enabled then we need to register non-null
195 // referent with the SATB barrier.
196 if (UseG1GC) {
197 bool needs_barrier = false;
198
199 if (ret != NULL) {
200 if (offset == java_lang_ref_Reference::referent_offset && obj != NULL) {
201 oop o = JNIHandles::resolve(obj);
202 Klass* k = o->klass();
203 if (InstanceKlass::cast(k)->reference_type() != REF_NONE) {
204 assert(InstanceKlass::cast(k)->is_subclass_of(SystemDictionary::Reference_klass()), "sanity");
205 needs_barrier = true;
206 }
207 }
208 }
209
210 if (needs_barrier) {
211 oop referent = JNIHandles::resolve(ret);
212 G1SATBCardTableModRefBS::enqueue(referent);
213 }
214 }
215 #endif // INCLUDE_ALL_GCS
216 return ret;
217 UNSAFE_END
218
219 UNSAFE_ENTRY(void, Unsafe_SetObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h))
220 UnsafeWrapper("Unsafe_SetObject");
221 oop x = JNIHandles::resolve(x_h);
222 oop p = JNIHandles::resolve(obj);
223 if (UseCompressedOops) {
224 oop_store((narrowOop*)index_oop_from_field_offset_long(p, offset), x);
225 } else {
226 oop_store((oop*)index_oop_from_field_offset_long(p, offset), x);
227 }
228 UNSAFE_END
229
230 UNSAFE_ENTRY(jobject, Unsafe_GetObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset))
231 UnsafeWrapper("Unsafe_GetObjectVolatile");
232 oop p = JNIHandles::resolve(obj);
233 void* addr = index_oop_from_field_offset_long(p, offset);
234 volatile oop v;
235 if (UseCompressedOops) {
236 volatile narrowOop n = *(volatile narrowOop*) addr;
237 (void)const_cast<oop&>(v = oopDesc::decode_heap_oop(n));
238 } else {
239 (void)const_cast<oop&>(v = *(volatile oop*) addr);
240 }
241 OrderAccess::acquire();
242 return JNIHandles::make_local(env, v);
243 UNSAFE_END
244
245 UNSAFE_ENTRY(void, Unsafe_SetObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h))
246 UnsafeWrapper("Unsafe_SetObjectVolatile");
247 oop x = JNIHandles::resolve(x_h);
248 oop p = JNIHandles::resolve(obj);
249 void* addr = index_oop_from_field_offset_long(p, offset);
250 OrderAccess::release();
251 if (UseCompressedOops) {
252 oop_store((narrowOop*)addr, x);
253 } else {
254 oop_store((oop*)addr, x);
255 }
256 OrderAccess::fence();
257 UNSAFE_END
258
259 UNSAFE_ENTRY(jobject, Unsafe_GetUncompressedObject(JNIEnv *env, jobject unsafe, jlong addr))
260 UnsafeWrapper("Unsafe_GetUncompressedObject");
261 oop v = *(oop*) (address) addr;
262 return JNIHandles::make_local(env, v);
263 UNSAFE_END
264
265 UNSAFE_ENTRY(jclass, Unsafe_GetJavaMirror(JNIEnv *env, jobject unsafe, jlong metaspace_klass))
266 UnsafeWrapper("Unsafe_GetJavaMirror");
267 Klass* klass = (Klass*) (address) metaspace_klass;
268 return (jclass) JNIHandles::make_local(klass->java_mirror());
295 // the simplest and is used for the current implementation. Note that the Java object
296 // that contains the field, can not, in general, be used for locking. To do so can lead
297 // to deadlocks as we may introduce locking into what appears to the Java code to be a
298 // lock-free path.
299 //
300 // As all the locked-regions are very short and themselves non-blocking we can treat
301 // them as leaf routines and elide safepoint checks (ie we don't perform any thread
302 // state transitions even when blocking for the lock). Note that if we do choose to
303 // add safepoint checks and thread state transitions, we must ensure that we calculate
304 // the address of the field _after_ we have acquired the lock, else the object may have
305 // been moved by the GC
306
307 UNSAFE_ENTRY(jlong, Unsafe_GetLongVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset))
308 UnsafeWrapper("Unsafe_GetLongVolatile");
309 {
310 if (VM_Version::supports_cx8()) {
311 GET_FIELD_VOLATILE(obj, offset, jlong, v);
312 return v;
313 }
314 else {
315 Handle p (THREAD, JNIHandles::resolve(obj));
316 jlong* addr = (jlong*)(index_oop_from_field_offset_long(p(), offset));
317 MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
318 jlong value = Atomic::load(addr);
319 return value;
320 }
321 }
322 UNSAFE_END
323
324 UNSAFE_ENTRY(void, Unsafe_SetLongVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong x))
325 UnsafeWrapper("Unsafe_SetLongVolatile");
326 {
327 if (VM_Version::supports_cx8()) {
328 SET_FIELD_VOLATILE(obj, offset, jlong, x);
329 }
330 else {
331 Handle p (THREAD, JNIHandles::resolve(obj));
332 jlong* addr = (jlong*)(index_oop_from_field_offset_long(p(), offset));
333 MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
334 Atomic::store(x, addr);
335 }
336 }
337 UNSAFE_END
338
339 #endif // not SUPPORTS_NATIVE_CX8
340
341 UNSAFE_ENTRY(jboolean, Unsafe_isBigEndian0(JNIEnv *env, jobject unsafe))
342 UnsafeWrapper("Unsafe_IsBigEndian0");
343 {
344 #ifdef VM_LITTLE_ENDIAN
345 return false;
346 #else
347 return true;
348 #endif
349 }
350 UNSAFE_END
351
418 DEFINE_GETSETOOP_VOLATILE(jfloat, Float);
419 DEFINE_GETSETOOP_VOLATILE(jdouble, Double);
420
421 #ifdef SUPPORTS_NATIVE_CX8
422 DEFINE_GETSETOOP_VOLATILE(jlong, Long);
423 #endif
424
425 #undef DEFINE_GETSETOOP_VOLATILE
426
427 // The non-intrinsified versions of setOrdered just use setVolatile
428
429 UNSAFE_ENTRY(void, Unsafe_SetOrderedInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint x))
430 UnsafeWrapper("Unsafe_SetOrderedInt");
431 SET_FIELD_VOLATILE(obj, offset, jint, x);
432 UNSAFE_END
433
434 UNSAFE_ENTRY(void, Unsafe_SetOrderedObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h))
435 UnsafeWrapper("Unsafe_SetOrderedObject");
436 oop x = JNIHandles::resolve(x_h);
437 oop p = JNIHandles::resolve(obj);
438 void* addr = index_oop_from_field_offset_long(p, offset);
439 OrderAccess::release();
440 if (UseCompressedOops) {
441 oop_store((narrowOop*)addr, x);
442 } else {
443 oop_store((oop*)addr, x);
444 }
445 OrderAccess::fence();
446 UNSAFE_END
447
448 UNSAFE_ENTRY(void, Unsafe_SetOrderedLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong x))
449 UnsafeWrapper("Unsafe_SetOrderedLong");
450 #ifdef SUPPORTS_NATIVE_CX8
451 SET_FIELD_VOLATILE(obj, offset, jlong, x);
452 #else
453 // Keep old code for platforms which may not have atomic long (8 bytes) instructions
454 {
455 if (VM_Version::supports_cx8()) {
456 SET_FIELD_VOLATILE(obj, offset, jlong, x);
457 }
458 else {
459 Handle p (THREAD, JNIHandles::resolve(obj));
460 jlong* addr = (jlong*)(index_oop_from_field_offset_long(p(), offset));
461 MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
462 Atomic::store(x, addr);
463 }
464 }
465 #endif
466 UNSAFE_END
467
468 UNSAFE_ENTRY(void, Unsafe_LoadFence(JNIEnv *env, jobject unsafe))
469 UnsafeWrapper("Unsafe_LoadFence");
470 OrderAccess::acquire();
471 UNSAFE_END
472
473 UNSAFE_ENTRY(void, Unsafe_StoreFence(JNIEnv *env, jobject unsafe))
474 UnsafeWrapper("Unsafe_StoreFence");
475 OrderAccess::release();
476 UNSAFE_END
477
478 UNSAFE_ENTRY(void, Unsafe_FullFence(JNIEnv *env, jobject unsafe))
479 UnsafeWrapper("Unsafe_FullFence");
618 }
619 return addr_to_java(x);
620 UNSAFE_END
621
622 UNSAFE_ENTRY(void, Unsafe_FreeMemory(JNIEnv *env, jobject unsafe, jlong addr))
623 UnsafeWrapper("Unsafe_FreeMemory");
624 void* p = addr_from_java(addr);
625 if (p == NULL) {
626 return;
627 }
628 os::free(p);
629 UNSAFE_END
630
631 UNSAFE_ENTRY(void, Unsafe_SetMemory(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong size, jbyte value))
632 UnsafeWrapper("Unsafe_SetMemory");
633 size_t sz = (size_t)size;
634 if (sz != (julong)size || size < 0) {
635 THROW(vmSymbols::java_lang_IllegalArgumentException());
636 }
637 oop base = JNIHandles::resolve(obj);
638 void* p = index_oop_from_field_offset_long(base, offset);
639 Copy::fill_to_memory_atomic(p, sz, value);
640 UNSAFE_END
641
642 UNSAFE_ENTRY(void, Unsafe_CopyMemory(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size))
643 UnsafeWrapper("Unsafe_CopyMemory");
644 if (size == 0) {
645 return;
646 }
647 size_t sz = (size_t)size;
648 if (sz != (julong)size || size < 0) {
649 THROW(vmSymbols::java_lang_IllegalArgumentException());
650 }
651 oop srcp = JNIHandles::resolve(srcObj);
652 oop dstp = JNIHandles::resolve(dstObj);
653 if (dstp != NULL && !dstp->is_typeArray()) {
654 // NYI: This works only for non-oop arrays at present.
655 // Generalizing it would be reasonable, but requires card marking.
656 // Also, autoboxing a Long from 0L in copyMemory(x,y, 0L,z, n) would be bad.
657 THROW(vmSymbols::java_lang_IllegalArgumentException());
658 }
659 void* src = index_oop_from_field_offset_long(srcp, srcOffset);
660 void* dst = index_oop_from_field_offset_long(dstp, dstOffset);
661 Copy::conjoint_memory_atomic(src, dst, sz);
662 UNSAFE_END
663
664
665 ////// Random queries
666
667 // See comment at file start about UNSAFE_LEAF
668 //UNSAFE_LEAF(jint, Unsafe_AddressSize())
669 UNSAFE_ENTRY(jint, Unsafe_AddressSize(JNIEnv *env, jobject unsafe))
670 UnsafeWrapper("Unsafe_AddressSize");
671 return sizeof(void*);
672 UNSAFE_END
1065 // let caller initialize it as needed...
1066
1067 return (jclass) res_jh;
1068 }
1069 UNSAFE_END
1070
1071
1072
1073 UNSAFE_ENTRY(void, Unsafe_ThrowException(JNIEnv *env, jobject unsafe, jthrowable thr))
1074 UnsafeWrapper("Unsafe_ThrowException");
1075 {
1076 ThreadToNativeFromVM ttnfv(thread);
1077 env->Throw(thr);
1078 }
1079 UNSAFE_END
1080
1081 // JSR166 ------------------------------------------------------------------
1082
1083 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSwapObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h))
1084 UnsafeWrapper("Unsafe_CompareAndSwapObject");
1085 oop x = JNIHandles::resolve(x_h);
1086 oop e = JNIHandles::resolve(e_h);
1087 oop p = JNIHandles::resolve(obj);
1088 HeapWord* addr = (HeapWord *)index_oop_from_field_offset_long(p, offset);
1089 oop res = oopDesc::atomic_compare_exchange_oop(x, addr, e, true);
1090 jboolean success = (res == e);
1091 if (success)
1092 update_barrier_set((void*)addr, x);
1093 return success;
1094 UNSAFE_END
1095
1096 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSwapInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x))
1097 UnsafeWrapper("Unsafe_CompareAndSwapInt");
1098 oop p = JNIHandles::resolve(obj);
1099 jint* addr = (jint *) index_oop_from_field_offset_long(p, offset);
1100 return (jint)(Atomic::cmpxchg(x, addr, e)) == e;
1101 UNSAFE_END
1102
1103 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSwapLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x))
1104 UnsafeWrapper("Unsafe_CompareAndSwapLong");
1105 Handle p (THREAD, JNIHandles::resolve(obj));
1106 jlong* addr = (jlong*)(index_oop_from_field_offset_long(p(), offset));
1107 #ifdef SUPPORTS_NATIVE_CX8
1108 return (jlong)(Atomic::cmpxchg(x, addr, e)) == e;
1109 #else
1110 if (VM_Version::supports_cx8())
1111 return (jlong)(Atomic::cmpxchg(x, addr, e)) == e;
1112 else {
1113 jboolean success = false;
1114 MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
1115 jlong val = Atomic::load(addr);
1116 if (val == e) { Atomic::store(x, addr); success = true; }
1117 return success;
1118 }
1119 #endif
1120 UNSAFE_END
1121
1122 UNSAFE_ENTRY(void, Unsafe_Park(JNIEnv *env, jobject unsafe, jboolean isAbsolute, jlong time))
1123 UnsafeWrapper("Unsafe_Park");
1124 EventThreadPark event;
1125 HOTSPOT_THREAD_PARK_BEGIN((uintptr_t) thread->parker(), (int) isAbsolute, time);
|
104 }
105
106 inline jlong field_offset_from_byte_offset(jlong byte_offset) {
107 return byte_offset;
108 }
109
110 inline jint invocation_key_from_method_slot(jint slot) {
111 return slot;
112 }
113
114 inline jint invocation_key_to_method_slot(jint key) {
115 return key;
116 }
117
118 inline void* index_oop_from_field_offset_long(oop p, jlong field_offset) {
119 jlong byte_offset = field_offset_to_byte_offset(field_offset);
120 #ifdef ASSERT
121 if (p != NULL) {
122 assert(byte_offset >= 0 && byte_offset <= (jlong)MAX_OBJECT_SIZE, "sane offset");
123 if (byte_offset == (jint)byte_offset) {
124 // We need to preemptively evacuate the object here to make the comparison
125 // in the assert below not give false negatives in case the object
126 // gets moved by concurrent threads while executing this code.
127 p = oopDesc::bs()->resolve_and_maybe_copy_oop(p);
128 void* ptr_plus_disp = (address)p + byte_offset;
129 assert((void*)p->obj_field_addr<oop>((jint)byte_offset) == ptr_plus_disp,
130 "raw [ptr+disp] must be consistent with oop::field_base");
131 }
132 jlong p_size = HeapWordSize * (jlong)(p->size());
133 assert(byte_offset < p_size, err_msg("Unsafe access: offset " INT64_FORMAT " > object's size " INT64_FORMAT, byte_offset, p_size));
134 }
135 #endif
136 if (sizeof(char*) == sizeof(jint)) // (this constant folds!)
137 return (address)p + (jint) byte_offset;
138 else
139 return (address)p + byte_offset;
140 }
141
142 // Externally callable versions:
143 // (Use these in compiler intrinsics which emulate unsafe primitives.)
144 jlong Unsafe_field_offset_to_byte_offset(jlong field_offset) {
145 return field_offset;
146 }
147 jlong Unsafe_field_offset_from_byte_offset(jlong byte_offset) {
148 return byte_offset;
149 }
150 jint Unsafe_invocation_key_from_method_slot(jint slot) {
151 return invocation_key_from_method_slot(slot);
152 }
153 jint Unsafe_invocation_key_to_method_slot(jint key) {
154 return invocation_key_to_method_slot(key);
155 }
156
157
158 ///// Data in the Java heap.
159
160 #define GET_FIELD(obj, offset, type_name, v) \
161 oop p = JNIHandles::resolve(obj); \
162 p = oopDesc::bs()->resolve_oop(p); \
163 type_name v = *(type_name*)index_oop_from_field_offset_long(p, offset)
164
165 #define SET_FIELD(obj, offset, type_name, x) \
166 oop p = JNIHandles::resolve(obj); \
167 p = oopDesc::bs()->resolve_and_maybe_copy_oop(p); \
168 *(type_name*)index_oop_from_field_offset_long(p, offset) = x
169
170 #define GET_FIELD_VOLATILE(obj, offset, type_name, v) \
171 oop p = JNIHandles::resolve(obj); \
172 p = oopDesc::bs()->resolve_oop(p); \
173 if (support_IRIW_for_not_multiple_copy_atomic_cpu) { \
174 OrderAccess::fence(); \
175 } \
176 volatile type_name v = OrderAccess::load_acquire((volatile type_name*)index_oop_from_field_offset_long(p, offset));
177
178 #define SET_FIELD_VOLATILE(obj, offset, type_name, x) \
179 oop p = JNIHandles::resolve(obj); \
180 p = oopDesc::bs()->resolve_and_maybe_copy_oop(p); \
181 OrderAccess::release_store_fence((volatile type_name*)index_oop_from_field_offset_long(p, offset), x);
182
183
184 // Get/SetObject must be special-cased, since it works with handles.
185
186 // These functions allow a null base pointer with an arbitrary address.
187 // But if the base pointer is non-null, the offset should make some sense.
188 // That is, it should be in the range [0, MAX_OBJECT_SIZE].
189 UNSAFE_ENTRY(jobject, Unsafe_GetObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset))
190 UnsafeWrapper("Unsafe_GetObject");
191 oop p = JNIHandles::resolve(obj);
192 p = oopDesc::bs()->resolve_oop(p);
193 oop v;
194 if (UseCompressedOops) {
195 narrowOop n = *(narrowOop*)index_oop_from_field_offset_long(p, offset);
196 v = oopDesc::decode_heap_oop(n);
197 } else {
198 v = *(oop*)index_oop_from_field_offset_long(p, offset);
199 }
200 jobject ret = JNIHandles::make_local(env, v);
201 #if INCLUDE_ALL_GCS
202 // We could be accessing the referent field in a reference
203 // object. If G1 is enabled then we need to register non-null
204 // referent with the SATB barrier.
205 if (UseG1GC || UseShenandoahGC) {
206 bool needs_barrier = false;
207
208 if (ret != NULL) {
209 if (offset == java_lang_ref_Reference::referent_offset && obj != NULL) {
210 oop o = JNIHandles::resolve(obj);
211 Klass* k = o->klass();
212 if (InstanceKlass::cast(k)->reference_type() != REF_NONE) {
213 assert(InstanceKlass::cast(k)->is_subclass_of(SystemDictionary::Reference_klass()), "sanity");
214 needs_barrier = true;
215 }
216 }
217 }
218
219 if (needs_barrier) {
220 oop referent = JNIHandles::resolve(ret);
221 G1SATBCardTableModRefBS::enqueue(referent);
222 }
223 }
224 #endif // INCLUDE_ALL_GCS
225 return ret;
226 UNSAFE_END
227
228 UNSAFE_ENTRY(void, Unsafe_SetObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h))
229 UnsafeWrapper("Unsafe_SetObject");
230 oop x = oopDesc::bs()->resolve_oop(JNIHandles::resolve(x_h));
231 oop p = oopDesc::bs()->resolve_and_maybe_copy_oop(JNIHandles::resolve(obj));
232 if (UseCompressedOops) {
233 oop_store((narrowOop*)index_oop_from_field_offset_long(p, offset), x);
234 } else {
235 oop_store((oop*)index_oop_from_field_offset_long(p, offset), x);
236 }
237 UNSAFE_END
238
239 UNSAFE_ENTRY(jobject, Unsafe_GetObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset))
240 UnsafeWrapper("Unsafe_GetObjectVolatile");
241 oop p = JNIHandles::resolve(obj);
242 p = oopDesc::bs()->resolve_oop(p);
243 void* addr = index_oop_from_field_offset_long(p, offset);
244 volatile oop v;
245 if (UseCompressedOops) {
246 volatile narrowOop n = *(volatile narrowOop*) addr;
247 (void)const_cast<oop&>(v = oopDesc::decode_heap_oop(n));
248 } else {
249 (void)const_cast<oop&>(v = *(volatile oop*) addr);
250 }
251 OrderAccess::acquire();
252 return JNIHandles::make_local(env, v);
253 UNSAFE_END
254
255 UNSAFE_ENTRY(void, Unsafe_SetObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h))
256 UnsafeWrapper("Unsafe_SetObjectVolatile");
257 oop x = JNIHandles::resolve(x_h);
258 oop p = JNIHandles::resolve(obj);
259 x = oopDesc::bs()->resolve_oop(x);
260 p = oopDesc::bs()->resolve_and_maybe_copy_oop(p);
261 void* addr = index_oop_from_field_offset_long(p, offset);
262 OrderAccess::release();
263 if (UseCompressedOops) {
264 oop_store((narrowOop*)addr, x);
265 } else {
266 oop_store((oop*)addr, x);
267 }
268 OrderAccess::fence();
269 UNSAFE_END
270
271 UNSAFE_ENTRY(jobject, Unsafe_GetUncompressedObject(JNIEnv *env, jobject unsafe, jlong addr))
272 UnsafeWrapper("Unsafe_GetUncompressedObject");
273 oop v = *(oop*) (address) addr;
274 return JNIHandles::make_local(env, v);
275 UNSAFE_END
276
277 UNSAFE_ENTRY(jclass, Unsafe_GetJavaMirror(JNIEnv *env, jobject unsafe, jlong metaspace_klass))
278 UnsafeWrapper("Unsafe_GetJavaMirror");
279 Klass* klass = (Klass*) (address) metaspace_klass;
280 return (jclass) JNIHandles::make_local(klass->java_mirror());
307 // the simplest and is used for the current implementation. Note that the Java object
308 // that contains the field, can not, in general, be used for locking. To do so can lead
309 // to deadlocks as we may introduce locking into what appears to the Java code to be a
310 // lock-free path.
311 //
312 // As all the locked-regions are very short and themselves non-blocking we can treat
313 // them as leaf routines and elide safepoint checks (ie we don't perform any thread
314 // state transitions even when blocking for the lock). Note that if we do choose to
315 // add safepoint checks and thread state transitions, we must ensure that we calculate
316 // the address of the field _after_ we have acquired the lock, else the object may have
317 // been moved by the GC
318
319 UNSAFE_ENTRY(jlong, Unsafe_GetLongVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset))
320 UnsafeWrapper("Unsafe_GetLongVolatile");
321 {
322 if (VM_Version::supports_cx8()) {
323 GET_FIELD_VOLATILE(obj, offset, jlong, v);
324 return v;
325 }
326 else {
327 Handle p (THREAD, oopDesc::bs()->resolve_oop(JNIHandles::resolve(obj)));
328 jlong* addr = (jlong*)(index_oop_from_field_offset_long(p(), offset));
329 MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
330 jlong value = Atomic::load(addr);
331 return value;
332 }
333 }
334 UNSAFE_END
335
336 UNSAFE_ENTRY(void, Unsafe_SetLongVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong x))
337 UnsafeWrapper("Unsafe_SetLongVolatile");
338 {
339 if (VM_Version::supports_cx8()) {
340 SET_FIELD_VOLATILE(obj, offset, jlong, x);
341 }
342 else {
343 Handle p (THREAD, oopDesc::bs()->resolve_and_maybe_copy_oop(JNIHandles::resolve(obj)));
344 jlong* addr = (jlong*)(index_oop_from_field_offset_long(p(), offset));
345 MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
346 Atomic::store(x, addr);
347 }
348 }
349 UNSAFE_END
350
351 #endif // not SUPPORTS_NATIVE_CX8
352
353 UNSAFE_ENTRY(jboolean, Unsafe_isBigEndian0(JNIEnv *env, jobject unsafe))
354 UnsafeWrapper("Unsafe_IsBigEndian0");
355 {
356 #ifdef VM_LITTLE_ENDIAN
357 return false;
358 #else
359 return true;
360 #endif
361 }
362 UNSAFE_END
363
430 DEFINE_GETSETOOP_VOLATILE(jfloat, Float);
431 DEFINE_GETSETOOP_VOLATILE(jdouble, Double);
432
433 #ifdef SUPPORTS_NATIVE_CX8
434 DEFINE_GETSETOOP_VOLATILE(jlong, Long);
435 #endif
436
437 #undef DEFINE_GETSETOOP_VOLATILE
438
439 // The non-intrinsified versions of setOrdered just use setVolatile
440
441 UNSAFE_ENTRY(void, Unsafe_SetOrderedInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint x))
442 UnsafeWrapper("Unsafe_SetOrderedInt");
443 SET_FIELD_VOLATILE(obj, offset, jint, x);
444 UNSAFE_END
445
446 UNSAFE_ENTRY(void, Unsafe_SetOrderedObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h))
447 UnsafeWrapper("Unsafe_SetOrderedObject");
448 oop x = JNIHandles::resolve(x_h);
449 oop p = JNIHandles::resolve(obj);
450 x = oopDesc::bs()->resolve_oop(x);
451 p = oopDesc::bs()->resolve_and_maybe_copy_oop(p);
452 void* addr = index_oop_from_field_offset_long(p, offset);
453 OrderAccess::release();
454 if (UseCompressedOops) {
455 oop_store((narrowOop*)addr, x);
456 } else {
457 oop_store((oop*)addr, x);
458 }
459 OrderAccess::fence();
460 UNSAFE_END
461
462 UNSAFE_ENTRY(void, Unsafe_SetOrderedLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong x))
463 UnsafeWrapper("Unsafe_SetOrderedLong");
464 #ifdef SUPPORTS_NATIVE_CX8
465 SET_FIELD_VOLATILE(obj, offset, jlong, x);
466 #else
467 // Keep old code for platforms which may not have atomic long (8 bytes) instructions
468 {
469 if (VM_Version::supports_cx8()) {
470 SET_FIELD_VOLATILE(obj, offset, jlong, x);
471 }
472 else {
473 Handle p (THREAD, oopDesc::bs()->resolve_and_maybe_copy_oop(JNIHandles::resolve(obj)));
474 jlong* addr = (jlong*)(index_oop_from_field_offset_long(p(), offset));
475 MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
476 Atomic::store(x, addr);
477 }
478 }
479 #endif
480 UNSAFE_END
481
482 UNSAFE_ENTRY(void, Unsafe_LoadFence(JNIEnv *env, jobject unsafe))
483 UnsafeWrapper("Unsafe_LoadFence");
484 OrderAccess::acquire();
485 UNSAFE_END
486
487 UNSAFE_ENTRY(void, Unsafe_StoreFence(JNIEnv *env, jobject unsafe))
488 UnsafeWrapper("Unsafe_StoreFence");
489 OrderAccess::release();
490 UNSAFE_END
491
492 UNSAFE_ENTRY(void, Unsafe_FullFence(JNIEnv *env, jobject unsafe))
493 UnsafeWrapper("Unsafe_FullFence");
632 }
633 return addr_to_java(x);
634 UNSAFE_END
635
636 UNSAFE_ENTRY(void, Unsafe_FreeMemory(JNIEnv *env, jobject unsafe, jlong addr))
637 UnsafeWrapper("Unsafe_FreeMemory");
638 void* p = addr_from_java(addr);
639 if (p == NULL) {
640 return;
641 }
642 os::free(p);
643 UNSAFE_END
644
645 UNSAFE_ENTRY(void, Unsafe_SetMemory(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong size, jbyte value))
646 UnsafeWrapper("Unsafe_SetMemory");
647 size_t sz = (size_t)size;
648 if (sz != (julong)size || size < 0) {
649 THROW(vmSymbols::java_lang_IllegalArgumentException());
650 }
651 oop base = JNIHandles::resolve(obj);
652 base = oopDesc::bs()->resolve_and_maybe_copy_oop(base);
653 void* p = index_oop_from_field_offset_long(base, offset);
654 Copy::fill_to_memory_atomic(p, sz, value);
655 UNSAFE_END
656
657 UNSAFE_ENTRY(void, Unsafe_CopyMemory(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size))
658 UnsafeWrapper("Unsafe_CopyMemory");
659 if (size == 0) {
660 return;
661 }
662 size_t sz = (size_t)size;
663 if (sz != (julong)size || size < 0) {
664 THROW(vmSymbols::java_lang_IllegalArgumentException());
665 }
666 oop srcp = JNIHandles::resolve(srcObj);
667 oop dstp = JNIHandles::resolve(dstObj);
668 srcp = oopDesc::bs()->resolve_oop(srcp);
669 dstp = oopDesc::bs()->resolve_and_maybe_copy_oop(dstp);
670 if (dstp != NULL && !dstp->is_typeArray()) {
671 // NYI: This works only for non-oop arrays at present.
672 // Generalizing it would be reasonable, but requires card marking.
673 // Also, autoboxing a Long from 0L in copyMemory(x,y, 0L,z, n) would be bad.
674 THROW(vmSymbols::java_lang_IllegalArgumentException());
675 }
676 void* src = index_oop_from_field_offset_long(srcp, srcOffset);
677 void* dst = index_oop_from_field_offset_long(dstp, dstOffset);
678 Copy::conjoint_memory_atomic(src, dst, sz);
679 UNSAFE_END
680
681
682 ////// Random queries
683
684 // See comment at file start about UNSAFE_LEAF
685 //UNSAFE_LEAF(jint, Unsafe_AddressSize())
686 UNSAFE_ENTRY(jint, Unsafe_AddressSize(JNIEnv *env, jobject unsafe))
687 UnsafeWrapper("Unsafe_AddressSize");
688 return sizeof(void*);
689 UNSAFE_END
1082 // let caller initialize it as needed...
1083
1084 return (jclass) res_jh;
1085 }
1086 UNSAFE_END
1087
1088
1089
1090 UNSAFE_ENTRY(void, Unsafe_ThrowException(JNIEnv *env, jobject unsafe, jthrowable thr))
1091 UnsafeWrapper("Unsafe_ThrowException");
1092 {
1093 ThreadToNativeFromVM ttnfv(thread);
1094 env->Throw(thr);
1095 }
1096 UNSAFE_END
1097
1098 // JSR166 ------------------------------------------------------------------
1099
1100 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSwapObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h))
1101 UnsafeWrapper("Unsafe_CompareAndSwapObject");
1102 // We are about to write to this entry so check to see if we need to copy it.
1103 oop p = oopDesc::bs()->resolve_and_maybe_copy_oop(JNIHandles::resolve(obj));
1104 HeapWord* addr = (HeapWord *)index_oop_from_field_offset_long(p, offset);
1105 oop x = JNIHandles::resolve(x_h);
1106 x = oopDesc::bs()->resolve_oop(x);
1107 oop old = JNIHandles::resolve(e_h);
1108 jboolean success;
1109 if (UseShenandoahGC) {
1110 oop expected;
1111 do {
1112 expected = old;
1113 old = oopDesc::atomic_compare_exchange_oop(x, addr, expected, true);
1114 success = (old == expected);
1115 } while ((! success) && oopDesc::bs()->resolve_oop(old) == oopDesc::bs()->resolve_oop(expected));
1116 } else {
1117 success = (old == oopDesc::atomic_compare_exchange_oop(x, addr, old, true));
1118 }
1119 if (success)
1120 update_barrier_set((void*)addr, x);
1121 return success;
1122 UNSAFE_END
1123
1124 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSwapInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x))
1125 UnsafeWrapper("Unsafe_CompareAndSwapInt");
1126 // We are about to write to this entry so check to see if we need to copy it.
1127 oop p = oopDesc::bs()->resolve_and_maybe_copy_oop(JNIHandles::resolve(obj));
1128 jint* addr = (jint *) index_oop_from_field_offset_long(p, offset);
1129 return (jint)(Atomic::cmpxchg(x, addr, e)) == e;
1130 UNSAFE_END
1131
1132 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSwapLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x))
1133 UnsafeWrapper("Unsafe_CompareAndSwapLong");
1134 Handle p (THREAD, oopDesc::bs()->resolve_and_maybe_copy_oop(JNIHandles::resolve(obj)));
1135 jlong* addr = (jlong*)(index_oop_from_field_offset_long(p(), offset));
1136 #ifdef SUPPORTS_NATIVE_CX8
1137 return (jlong)(Atomic::cmpxchg(x, addr, e)) == e;
1138 #else
1139 if (VM_Version::supports_cx8())
1140 return (jlong)(Atomic::cmpxchg(x, addr, e)) == e;
1141 else {
1142 jboolean success = false;
1143 MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
1144 jlong val = Atomic::load(addr);
1145 if (val == e) { Atomic::store(x, addr); success = true; }
1146 return success;
1147 }
1148 #endif
1149 UNSAFE_END
1150
1151 UNSAFE_ENTRY(void, Unsafe_Park(JNIEnv *env, jobject unsafe, jboolean isAbsolute, jlong time))
1152 UnsafeWrapper("Unsafe_Park");
1153 EventThreadPark event;
1154 HOTSPOT_THREAD_PARK_BEGIN((uintptr_t) thread->parker(), (int) isAbsolute, time);
|