255
256 jlong x = Atomic::load(p);
257
258 return x;
259 }
260
261 void put_jlong_locked(jlong x) {
262 GuardUnsafeAccess guard(_thread, _obj);
263
264 MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
265
266 jlong* p = (jlong*)addr();
267
268 Atomic::store(normalize_for_write(x), p);
269 }
270 #endif
271 };
272
273 // Get/PutObject must be special-cased, since it works with handles.
274
275 // These functions allow a null base pointer with an arbitrary address.
276 // But if the base pointer is non-null, the offset should make some sense.
277 // That is, it should be in the range [0, MAX_OBJECT_SIZE].
278 UNSAFE_ENTRY(jobject, Unsafe_GetObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
279 oop p = JNIHandles::resolve(obj);
280 oop v;
281
282 if (UseCompressedOops) {
283 narrowOop n = *(narrowOop*)index_oop_from_field_offset_long(p, offset);
284 v = oopDesc::decode_heap_oop(n);
285 } else {
286 v = *(oop*)index_oop_from_field_offset_long(p, offset);
287 }
288
289 jobject ret = JNIHandles::make_local(env, v);
290
291 #if INCLUDE_ALL_GCS
292 // We could be accessing the referent field in a reference
293 // object. If G1 is enabled then we need to register non-null
294 // referent with the SATB barrier.
295 if (UseG1GC) {
296 bool needs_barrier = false;
297
298 if (ret != NULL) {
299 if (offset == java_lang_ref_Reference::referent_offset && obj != NULL) {
300 oop o = JNIHandles::resolve(obj);
301 Klass* k = o->klass();
302 if (InstanceKlass::cast(k)->reference_type() != REF_NONE) {
303 assert(InstanceKlass::cast(k)->is_subclass_of(SystemDictionary::Reference_klass()), "sanity");
304 needs_barrier = true;
305 }
306 }
307 }
308
309 if (needs_barrier) {
310 oop referent = JNIHandles::resolve(ret);
311 G1SATBCardTableModRefBS::enqueue(referent);
312 }
313 }
314 #endif // INCLUDE_ALL_GCS
315
316 return ret;
317 } UNSAFE_END
318
319 UNSAFE_ENTRY(void, Unsafe_PutObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
320 oop x = JNIHandles::resolve(x_h);
321 oop p = JNIHandles::resolve(obj);
322
323 if (UseCompressedOops) {
324 oop_store((narrowOop*)index_oop_from_field_offset_long(p, offset), x);
325 } else {
326 oop_store((oop*)index_oop_from_field_offset_long(p, offset), x);
327 }
328 } UNSAFE_END
329
330 UNSAFE_ENTRY(jobject, Unsafe_GetObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
331 oop p = JNIHandles::resolve(obj);
332 void* addr = index_oop_from_field_offset_long(p, offset);
333
334 volatile oop v;
335
336 if (support_IRIW_for_not_multiple_copy_atomic_cpu) {
337 OrderAccess::fence();
338 }
339
340 if (UseCompressedOops) {
341 volatile narrowOop n = *(volatile narrowOop*) addr;
342 (void)const_cast<oop&>(v = oopDesc::decode_heap_oop(n));
343 } else {
344 (void)const_cast<oop&>(v = *(volatile oop*) addr);
345 }
346
347 OrderAccess::acquire();
348 return JNIHandles::make_local(env, v);
349 } UNSAFE_END
350
351 UNSAFE_ENTRY(void, Unsafe_PutObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
352 oop x = JNIHandles::resolve(x_h);
353 oop p = JNIHandles::resolve(obj);
354 void* addr = index_oop_from_field_offset_long(p, offset);
355 OrderAccess::release();
356
357 if (UseCompressedOops) {
358 oop_store((narrowOop*)addr, x);
359 } else {
360 oop_store((oop*)addr, x);
361 }
362
363 OrderAccess::fence();
364 } UNSAFE_END
|
255
256 jlong x = Atomic::load(p);
257
258 return x;
259 }
260
261 void put_jlong_locked(jlong x) {
262 GuardUnsafeAccess guard(_thread, _obj);
263
264 MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
265
266 jlong* p = (jlong*)addr();
267
268 Atomic::store(normalize_for_write(x), p);
269 }
270 #endif
271 };
272
273 // Get/PutObject must be special-cased, since it works with handles.
274
275 // We could be accessing the referent field in a reference
276 // object. If G1 is enabled then we need to register non-null
277 // referent with the SATB barrier.
278
279 static bool is_java_lang_ref_Reference_access(oop o, jlong offset) {
280 if (offset == java_lang_ref_Reference::referent_offset && o != NULL) {
281 Klass* k = o->klass();
282 if (InstanceKlass::cast(k)->reference_type() != REF_NONE) {
283 assert(InstanceKlass::cast(k)->is_subclass_of(SystemDictionary::Reference_klass()), "sanity");
284 return true;
285 }
286 }
287 return false;
288 }
289
290 static void ensure_referent_alive(oop v) {
291 #if INCLUDE_ALL_GCS
292 if (UseG1GC && v != NULL) {
293 G1SATBCardTableModRefBS::enqueue(v);
294 }
295 #endif
296 }
297
298 // These functions allow a null base pointer with an arbitrary address.
299 // But if the base pointer is non-null, the offset should make some sense.
300 // That is, it should be in the range [0, MAX_OBJECT_SIZE].
301 UNSAFE_ENTRY(jobject, Unsafe_GetObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
302 oop p = JNIHandles::resolve(obj);
303 oop v;
304
305 if (UseCompressedOops) {
306 narrowOop n = *(narrowOop*)index_oop_from_field_offset_long(p, offset);
307 v = oopDesc::decode_heap_oop(n);
308 } else {
309 v = *(oop*)index_oop_from_field_offset_long(p, offset);
310 }
311
312 if (is_java_lang_ref_Reference_access(p, offset)) {
313 ensure_referent_alive(v);
314 }
315
316 return JNIHandles::make_local(env, v);
317 } UNSAFE_END
318
319 UNSAFE_ENTRY(void, Unsafe_PutObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
320 oop x = JNIHandles::resolve(x_h);
321 oop p = JNIHandles::resolve(obj);
322
323 if (UseCompressedOops) {
324 oop_store((narrowOop*)index_oop_from_field_offset_long(p, offset), x);
325 } else {
326 oop_store((oop*)index_oop_from_field_offset_long(p, offset), x);
327 }
328 } UNSAFE_END
329
330 UNSAFE_ENTRY(jobject, Unsafe_GetObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
331 oop p = JNIHandles::resolve(obj);
332 void* addr = index_oop_from_field_offset_long(p, offset);
333
334 volatile oop v;
335
336 if (support_IRIW_for_not_multiple_copy_atomic_cpu) {
337 OrderAccess::fence();
338 }
339
340 if (UseCompressedOops) {
341 volatile narrowOop n = *(volatile narrowOop*) addr;
342 (void)const_cast<oop&>(v = oopDesc::decode_heap_oop(n));
343 } else {
344 (void)const_cast<oop&>(v = *(volatile oop*) addr);
345 }
346
347 if (is_java_lang_ref_Reference_access(p, offset)) {
348 ensure_referent_alive(v);
349 }
350
351 OrderAccess::acquire();
352 return JNIHandles::make_local(env, v);
353 } UNSAFE_END
354
355 UNSAFE_ENTRY(void, Unsafe_PutObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
356 oop x = JNIHandles::resolve(x_h);
357 oop p = JNIHandles::resolve(obj);
358 void* addr = index_oop_from_field_offset_long(p, offset);
359 OrderAccess::release();
360
361 if (UseCompressedOops) {
362 oop_store((narrowOop*)addr, x);
363 } else {
364 oop_store((oop*)addr, x);
365 }
366
367 OrderAccess::fence();
368 } UNSAFE_END
|