< prev index next >

src/share/vm/runtime/atomic.cpp

Print this page
rev 10933 : 8154736: enhancement of cmpxchg and copy_to_survivor for ppc64
Reviewed-by:
Contributed-by: HORII@jp.ibm.com, mdoerr

@@ -55,14 +55,15 @@
   assert(sizeof(unsigned int) == sizeof(jint), "more work to do");
   return (unsigned int)Atomic::xchg((jint)exchange_value, (volatile jint*)dest);
 }
 
 unsigned Atomic::cmpxchg(unsigned int exchange_value,
-                         volatile unsigned int* dest, unsigned int compare_value) {
+                           volatile unsigned int* dest, unsigned int compare_value,
+                           memory_order order) {
   assert(sizeof(unsigned int) == sizeof(jint), "more work to do");
   return (unsigned int)Atomic::cmpxchg((jint)exchange_value, (volatile jint*)dest,
-                                       (jint)compare_value);
+                                       (jint)compare_value, order);
 }
 
 jlong Atomic::add(jlong    add_value, volatile jlong*    dest) {
   jlong old = load(dest);
   jlong new_value = old + add_value;
< prev index next >