1 /*
   2  * Copyright (c) 1999, 2009, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 inline void Atomic::store    (jbyte    store_value, jbyte*    dest) { *dest = store_value; }
  26 inline void Atomic::store    (jshort   store_value, jshort*   dest) { *dest = store_value; }
  27 inline void Atomic::store    (jint     store_value, jint*     dest) { *dest = store_value; }
  28 
  29 
  30 inline void Atomic::store_ptr(intptr_t store_value, intptr_t* dest) { *dest = store_value; }
  31 inline void Atomic::store_ptr(void*    store_value, void*     dest) { *(void**)dest = store_value; }
  32 
  33 inline void Atomic::store    (jbyte    store_value, volatile jbyte*    dest) { *dest = store_value; }
  34 inline void Atomic::store    (jshort   store_value, volatile jshort*   dest) { *dest = store_value; }
  35 inline void Atomic::store    (jint     store_value, volatile jint*     dest) { *dest = store_value; }
  36 inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; }
  37 inline void Atomic::store_ptr(void*    store_value, volatile void*     dest) { *(void* volatile *)dest = store_value; }
  38 
  39 inline void Atomic::inc    (volatile jint*     dest) { (void)add    (1, dest); }
  40 inline void Atomic::inc_ptr(volatile intptr_t* dest) { (void)add_ptr(1, dest); }
  41 inline void Atomic::inc_ptr(volatile void*     dest) { (void)add_ptr(1, dest); }
  42 
  43 inline void Atomic::dec    (volatile jint*     dest) { (void)add    (-1, dest); }
  44 inline void Atomic::dec_ptr(volatile intptr_t* dest) { (void)add_ptr(-1, dest); }
  45 inline void Atomic::dec_ptr(volatile void*     dest) { (void)add_ptr(-1, dest); }
  46 
  47 // For Sun Studio - implementation is in solaris_x86_[32/64].il.
  48 // For gcc - implementation is just below.
  49 
  50 // The lock prefix can be omitted for certain instructions on uniprocessors; to
  51 // facilitate this, os::is_MP() is passed as an additional argument.  64-bit
  52 // processors are assumed to be multi-threaded and/or multi-core, so the extra
  53 // argument is unnecessary.
  54 #ifndef _LP64
  55 #define IS_MP_DECL() , int is_mp
  56 #define IS_MP_ARG()  , (int) os::is_MP()
  57 #else
  58 #define IS_MP_DECL()
  59 #define IS_MP_ARG()
  60 #endif // _LP64
  61 
  62 extern "C" {
  63   jint _Atomic_add(jint add_value, volatile jint* dest IS_MP_DECL());
  64   jint _Atomic_xchg(jint exchange_value, volatile jint* dest);
  65   jint _Atomic_cmpxchg(jint exchange_value, volatile jint* dest,
  66                        jint compare_value IS_MP_DECL());
  67   jlong _Atomic_cmpxchg_long(jlong exchange_value, volatile jlong* dest,
  68                              jlong compare_value IS_MP_DECL());
  69 }
  70 
  71 inline jint     Atomic::add    (jint     add_value, volatile jint*     dest) {
  72   return _Atomic_add(add_value, dest IS_MP_ARG());
  73 }
  74 
  75 inline jint     Atomic::xchg       (jint     exchange_value, volatile jint*     dest) {
  76   return _Atomic_xchg(exchange_value, dest);
  77 }
  78 
  79 inline jint     Atomic::cmpxchg    (jint     exchange_value, volatile jint*     dest, jint     compare_value) {
  80   return _Atomic_cmpxchg(exchange_value, dest, compare_value IS_MP_ARG());
  81 }
  82 
  83 inline jlong    Atomic::cmpxchg    (jlong    exchange_value, volatile jlong*    dest, jlong    compare_value) {
  84   return _Atomic_cmpxchg_long(exchange_value, dest, compare_value IS_MP_ARG());
  85 }
  86 
  87 
  88 #ifdef AMD64
  89 inline void Atomic::store    (jlong    store_value, jlong*             dest) { *dest = store_value; }
  90 inline void Atomic::store    (jlong    store_value, volatile jlong*    dest) { *dest = store_value; }
  91 extern "C" jlong _Atomic_add_long(jlong add_value, volatile jlong* dest);
  92 extern "C" jlong _Atomic_xchg_long(jlong exchange_value, volatile jlong* dest);
  93 
  94 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) {
  95   return (intptr_t)_Atomic_add_long((jlong)add_value, (volatile jlong*)dest);
  96 }
  97 
  98 inline void*    Atomic::add_ptr(intptr_t add_value, volatile void*     dest) {
  99   return (void*)_Atomic_add_long((jlong)add_value, (volatile jlong*)dest);
 100 }
 101 
 102 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
 103   return (intptr_t)_Atomic_xchg_long((jlong)exchange_value, (volatile jlong*)dest);
 104 }
 105 
 106 inline void*    Atomic::xchg_ptr(void*    exchange_value, volatile void*     dest) {
 107   return (void*)_Atomic_xchg_long((jlong)exchange_value, (volatile jlong*)dest);
 108 }
 109 
 110 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value) {
 111   return (intptr_t)_Atomic_cmpxchg_long((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value);
 112 }
 113 
 114 inline void*    Atomic::cmpxchg_ptr(void*    exchange_value, volatile void*     dest, void*    compare_value) {
 115   return (void*)_Atomic_cmpxchg_long((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value);
 116 }
 117 
 118 inline jlong Atomic::load(volatile jlong* src) { return *src; }
 119 
 120 #else // !AMD64
 121 
 122 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) {
 123   return (intptr_t)add((jint)add_value, (volatile jint*)dest);
 124 }
 125 
 126 inline void*    Atomic::add_ptr(intptr_t add_value, volatile void*     dest) {
 127   return (void*)add((jint)add_value, (volatile jint*)dest);
 128 }
 129 
 130 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
 131   return (intptr_t)xchg((jint)exchange_value, (volatile jint*)dest);
 132 }
 133 
 134 inline void*    Atomic::xchg_ptr(void*    exchange_value, volatile void*     dest) {
 135   return (void*)xchg((jint)exchange_value, (volatile jint*)dest);
 136 }
 137 
 138 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value) {
 139   return (intptr_t)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value);
 140 }
 141 
 142 inline void*    Atomic::cmpxchg_ptr(void*    exchange_value, volatile void*     dest, void*    compare_value) {
 143   return (void*)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value);
 144 }
 145 
 146 extern "C" void _Atomic_load_long(volatile jlong* src, volatile jlong* dst);
 147 
 148 inline jlong Atomic::load(volatile jlong* src) {
 149   volatile jlong dest;
 150   _Atomic_load_long(src, &dest);
 151   return dest;
 152 }
 153 
 154 #endif // AMD64
 155 
 156 #ifdef _GNU_SOURCE
 157 // Add a lock prefix to an instruction on an MP machine
 158 #define LOCK_IF_MP(mp) "cmp $0, " #mp "; je 1f; lock; 1: "
 159 
 160 extern "C" {
 161   inline jint _Atomic_add(jint add_value, volatile jint* dest, int mp) {
 162     jint addend = add_value;
 163     __asm__ volatile (  LOCK_IF_MP(%3) "xaddl %0,(%2)"
 164                     : "=r" (addend)
 165                     : "0" (addend), "r" (dest), "r" (mp)
 166                     : "cc", "memory");
 167     return addend + add_value;
 168   }
 169 
 170 #ifdef AMD64
 171   inline jlong _Atomic_add_long(jlong add_value, volatile jlong* dest, int mp) {
 172     intptr_t addend = add_value;
 173     __asm__ __volatile__ (LOCK_IF_MP(%3) "xaddq %0,(%2)"
 174                         : "=r" (addend)
 175                         : "0" (addend), "r" (dest), "r" (mp)
 176                         : "cc", "memory");
 177     return addend + add_value;
 178   }
 179 
 180   inline jlong _Atomic_xchg_long(jlong exchange_value, volatile jlong* dest) {
 181     __asm__ __volatile__ ("xchgq (%2),%0"
 182                         : "=r" (exchange_value)
 183                         : "0" (exchange_value), "r" (dest)
 184                         : "memory");
 185     return exchange_value;
 186   }
 187 
 188 #endif // AMD64
 189 
 190   inline jint _Atomic_xchg(jint exchange_value, volatile jint* dest) {
 191     __asm__ __volatile__ ("xchgl (%2),%0"
 192                           : "=r" (exchange_value)
 193                         : "0" (exchange_value), "r" (dest)
 194                         : "memory");
 195     return exchange_value;
 196   }
 197 
 198   inline jint _Atomic_cmpxchg(jint exchange_value, volatile jint* dest, jint compare_value, int mp) {
 199     __asm__ volatile (LOCK_IF_MP(%4) "cmpxchgl %1,(%3)"
 200                     : "=a" (exchange_value)
 201                     : "r" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp)
 202                     : "cc", "memory");
 203     return exchange_value;
 204   }
 205 
 206   // This is the interface to the atomic instruction in solaris_i486.s.
 207   jlong _Atomic_cmpxchg_long_gcc(jlong exchange_value, volatile jlong* dest, jlong compare_value, int mp);
 208 
 209   inline jlong _Atomic_cmpxchg_long(jlong exchange_value, volatile jlong* dest, jlong compare_value, int mp) {
 210 #ifdef AMD64
 211     __asm__ __volatile__ (LOCK_IF_MP(%4) "cmpxchgq %1,(%3)"
 212                         : "=a" (exchange_value)
 213                         : "r" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp)
 214                         : "cc", "memory");
 215     return exchange_value;
 216 #else
 217     return _Atomic_cmpxchg_long_gcc(exchange_value, dest, compare_value, os::is_MP());
 218 
 219     #if 0
 220     // The code below does not work presumably because of the bug in gcc
 221     // The error message says:
 222     //   can't find a register in class BREG while reloading asm
 223     // However I want to save this code and later replace _Atomic_cmpxchg_long_gcc
 224     // with such inline asm code:
 225 
 226     volatile jlong_accessor evl, cvl, rv;
 227     evl.long_value = exchange_value;
 228     cvl.long_value = compare_value;
 229     int mp = os::is_MP();
 230 
 231     __asm__ volatile ("cmp $0, %%esi\n\t"
 232        "je 1f \n\t"
 233        "lock\n\t"
 234        "1: cmpxchg8b (%%edi)\n\t"
 235        : "=a"(cvl.words[0]),   "=d"(cvl.words[1])
 236        : "a"(cvl.words[0]), "d"(cvl.words[1]),
 237          "b"(evl.words[0]), "c"(evl.words[1]),
 238          "D"(dest), "S"(mp)
 239        :  "cc", "memory");
 240     return cvl.long_value;
 241     #endif // if 0
 242 #endif // AMD64
 243   }
 244 }
 245 #undef LOCK_IF_MP
 246 
 247 #endif // _GNU_SOURCE