1 /*
   2  * Copyright (c) 1999, 2014, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #ifndef OS_CPU_SOLARIS_X86_VM_ATOMIC_SOLARIS_X86_INLINE_HPP
  26 #define OS_CPU_SOLARIS_X86_VM_ATOMIC_SOLARIS_X86_INLINE_HPP
  27 
  28 #include "runtime/atomic.hpp"
  29 #include "runtime/os.hpp"
  30 
  31 inline void Atomic::store    (jbyte    store_value, jbyte*    dest) { *dest = store_value; }
  32 inline void Atomic::store    (jshort   store_value, jshort*   dest) { *dest = store_value; }
  33 inline void Atomic::store    (jint     store_value, jint*     dest) { *dest = store_value; }
  34 
  35 
  36 inline void Atomic::store_ptr(intptr_t store_value, intptr_t* dest) { *dest = store_value; }
  37 inline void Atomic::store_ptr(void*    store_value, void*     dest) { *(void**)dest = store_value; }
  38 
  39 inline void Atomic::store    (jbyte    store_value, volatile jbyte*    dest) { *dest = store_value; }
  40 inline void Atomic::store    (jshort   store_value, volatile jshort*   dest) { *dest = store_value; }
  41 inline void Atomic::store    (jint     store_value, volatile jint*     dest) { *dest = store_value; }
  42 inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; }
  43 inline void Atomic::store_ptr(void*    store_value, volatile void*     dest) { *(void* volatile *)dest = store_value; }
  44 
  45 inline void Atomic::inc    (volatile jint*     dest) { (void)add    (1, dest); }
  46 inline void Atomic::inc_ptr(volatile intptr_t* dest) { (void)add_ptr(1, dest); }
  47 inline void Atomic::inc_ptr(volatile void*     dest) { (void)add_ptr(1, dest); }
  48 
  49 inline void Atomic::dec    (volatile jint*     dest) { (void)add    (-1, dest); }
  50 inline void Atomic::dec_ptr(volatile intptr_t* dest) { (void)add_ptr(-1, dest); }
  51 inline void Atomic::dec_ptr(volatile void*     dest) { (void)add_ptr(-1, dest); }
  52 
  53 // For Sun Studio - implementation is in solaris_x86_[32/64].il.
  54 // For gcc - implementation is just below.
  55 
  56 // The lock prefix can be omitted for certain instructions on uniprocessors; to
  57 // facilitate this, os::is_MP() is passed as an additional argument.  64-bit
  58 // processors are assumed to be multi-threaded and/or multi-core, so the extra
  59 // argument is unnecessary.
  60 #ifndef _LP64
  61 #define IS_MP_DECL() , int is_mp
  62 #define IS_MP_ARG()  , (int) os::is_MP()
  63 #else
  64 #define IS_MP_DECL()
  65 #define IS_MP_ARG()
  66 #endif // _LP64
  67 
  68 extern "C" {
  69   jint _Atomic_add(jint add_value, volatile jint* dest IS_MP_DECL());
  70   jint _Atomic_xchg(jint exchange_value, volatile jint* dest);
  71   jbyte _Atomic_cmpxchg_byte(jbyte exchange_value, volatile jbyte* dest,
  72                        jbyte compare_value IS_MP_DECL());
  73   jint _Atomic_cmpxchg(jint exchange_value, volatile jint* dest,
  74                        jint compare_value IS_MP_DECL());
  75   jlong _Atomic_cmpxchg_long(jlong exchange_value, volatile jlong* dest,
  76                              jlong compare_value IS_MP_DECL());
  77 }
  78 
  79 inline jint     Atomic::add    (jint     add_value, volatile jint*     dest) {
  80   return _Atomic_add(add_value, dest IS_MP_ARG());
  81 }
  82 
  83 inline jint     Atomic::xchg       (jint     exchange_value, volatile jint*     dest) {
  84   return _Atomic_xchg(exchange_value, dest);
  85 }
  86 
  87 inline jbyte    AtomicPlatform::cmpxchg(jbyte    exchange_value, volatile jbyte*    dest, jbyte    compare_value) {
  88   return _Atomic_cmpxchg_byte(exchange_value, dest, compare_value IS_MP_ARG());
  89 }
  90 
  91 inline jint     Atomic::cmpxchg        (jint     exchange_value, volatile jint*     dest, jint     compare_value) {
  92   return _Atomic_cmpxchg(exchange_value, dest, compare_value IS_MP_ARG());
  93 }
  94 
  95 inline jlong    Atomic::cmpxchg        (jlong    exchange_value, volatile jlong*    dest, jlong    compare_value) {
  96   return _Atomic_cmpxchg_long(exchange_value, dest, compare_value IS_MP_ARG());
  97 }
  98 
  99 
 100 #ifdef AMD64
 101 inline void Atomic::store    (jlong    store_value, jlong*             dest) { *dest = store_value; }
 102 inline void Atomic::store    (jlong    store_value, volatile jlong*    dest) { *dest = store_value; }
 103 extern "C" jlong _Atomic_add_long(jlong add_value, volatile jlong* dest);
 104 extern "C" jlong _Atomic_xchg_long(jlong exchange_value, volatile jlong* dest);
 105 
 106 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) {
 107   return (intptr_t)_Atomic_add_long((jlong)add_value, (volatile jlong*)dest);
 108 }
 109 
 110 inline void*    Atomic::add_ptr(intptr_t add_value, volatile void*     dest) {
 111   return (void*)_Atomic_add_long((jlong)add_value, (volatile jlong*)dest);
 112 }
 113 
 114 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
 115   return (intptr_t)_Atomic_xchg_long((jlong)exchange_value, (volatile jlong*)dest);
 116 }
 117 
 118 inline void*    Atomic::xchg_ptr(void*    exchange_value, volatile void*     dest) {
 119   return (void*)_Atomic_xchg_long((jlong)exchange_value, (volatile jlong*)dest);
 120 }
 121 
 122 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value) {
 123   return (intptr_t)_Atomic_cmpxchg_long((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value);
 124 }
 125 
 126 inline void*    Atomic::cmpxchg_ptr(void*    exchange_value, volatile void*     dest, void*    compare_value) {
 127   return (void*)_Atomic_cmpxchg_long((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value);
 128 }
 129 
 130 inline jlong Atomic::load(volatile jlong* src) { return *src; }
 131 
 132 #else // !AMD64
 133 
 134 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) {
 135   return (intptr_t)add((jint)add_value, (volatile jint*)dest);
 136 }
 137 
 138 inline void*    Atomic::add_ptr(intptr_t add_value, volatile void*     dest) {
 139   return (void*)add((jint)add_value, (volatile jint*)dest);
 140 }
 141 
 142 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
 143   return (intptr_t)xchg((jint)exchange_value, (volatile jint*)dest);
 144 }
 145 
 146 inline void*    Atomic::xchg_ptr(void*    exchange_value, volatile void*     dest) {
 147   return (void*)xchg((jint)exchange_value, (volatile jint*)dest);
 148 }
 149 
 150 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value) {
 151   return (intptr_t)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value);
 152 }
 153 
 154 inline void*    Atomic::cmpxchg_ptr(void*    exchange_value, volatile void*     dest, void*    compare_value) {
 155   return (void*)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value);
 156 }
 157 
 158 extern "C" void _Atomic_move_long(volatile jlong* src, volatile jlong* dst);
 159 
 160 inline jlong Atomic::load(volatile jlong* src) {
 161   volatile jlong dest;
 162   _Atomic_move_long(src, &dest);
 163   return dest;
 164 }
 165 
 166 inline void Atomic::store(jlong store_value, jlong* dest) {
 167   _Atomic_move_long((volatile jlong*)&store_value, (volatile jlong*)dest);
 168 }
 169 
 170 inline void Atomic::store(jlong store_value, volatile jlong* dest) {
 171   _Atomic_move_long((volatile jlong*)&store_value, dest);
 172 }
 173 
 174 #endif // AMD64
 175 
 176 #ifdef _GNU_SOURCE
 177 // Add a lock prefix to an instruction on an MP machine
 178 #define LOCK_IF_MP(mp) "cmp $0, " #mp "; je 1f; lock; 1: "
 179 
 180 extern "C" {
 181   inline jint _Atomic_add(jint add_value, volatile jint* dest, int mp) {
 182     jint addend = add_value;
 183     __asm__ volatile (  LOCK_IF_MP(%3) "xaddl %0,(%2)"
 184                     : "=r" (addend)
 185                     : "0" (addend), "r" (dest), "r" (mp)
 186                     : "cc", "memory");
 187     return addend + add_value;
 188   }
 189 
 190 #ifdef AMD64
 191   inline jlong _Atomic_add_long(jlong add_value, volatile jlong* dest, int mp) {
 192     intptr_t addend = add_value;
 193     __asm__ __volatile__ (LOCK_IF_MP(%3) "xaddq %0,(%2)"
 194                         : "=r" (addend)
 195                         : "0" (addend), "r" (dest), "r" (mp)
 196                         : "cc", "memory");
 197     return addend + add_value;
 198   }
 199 
 200   inline jlong _Atomic_xchg_long(jlong exchange_value, volatile jlong* dest) {
 201     __asm__ __volatile__ ("xchgq (%2),%0"
 202                         : "=r" (exchange_value)
 203                         : "0" (exchange_value), "r" (dest)
 204                         : "memory");
 205     return exchange_value;
 206   }
 207 
 208 #endif // AMD64
 209 
 210   inline jint _Atomic_xchg(jint exchange_value, volatile jint* dest) {
 211     __asm__ __volatile__ ("xchgl (%2),%0"
 212                           : "=r" (exchange_value)
 213                         : "0" (exchange_value), "r" (dest)
 214                         : "memory");
 215     return exchange_value;
 216   }
 217 
 218   inline jint _Atomic_cmpxchg(jint exchange_value, volatile jint* dest, jint compare_value, int mp) {
 219     __asm__ volatile (LOCK_IF_MP(%4) "cmpxchgl %1,(%3)"
 220                     : "=a" (exchange_value)
 221                     : "r" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp)
 222                     : "cc", "memory");
 223     return exchange_value;
 224   }
 225 
 226 
 227   inline jbyte _Atomic_cmpxchg_byte(jbyte exchange_value, volatile jbyte* dest, jbyte compare_value, int mp) {
 228     __asm__ volatile (LOCK_IF_MP(%4) "cmpxchgb %1,(%3)"
 229                     : "=a" (exchange_value)
 230                     : "q" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp)
 231                     : "cc", "memory");
 232     return exchange_value;
 233   }
 234 
 235   // This is the interface to the atomic instruction in solaris_i486.s.
 236   jlong _Atomic_cmpxchg_long_gcc(jlong exchange_value, volatile jlong* dest, jlong compare_value, int mp);
 237 
 238   inline jlong _Atomic_cmpxchg_long(jlong exchange_value, volatile jlong* dest, jlong compare_value, int mp) {
 239 #ifdef AMD64
 240     __asm__ __volatile__ (LOCK_IF_MP(%4) "cmpxchgq %1,(%3)"
 241                         : "=a" (exchange_value)
 242                         : "r" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp)
 243                         : "cc", "memory");
 244     return exchange_value;
 245 #else
 246     return _Atomic_cmpxchg_long_gcc(exchange_value, dest, compare_value, os::is_MP());
 247 
 248     #if 0
 249     // The code below does not work presumably because of the bug in gcc
 250     // The error message says:
 251     //   can't find a register in class BREG while reloading asm
 252     // However I want to save this code and later replace _Atomic_cmpxchg_long_gcc
 253     // with such inline asm code:
 254 
 255     volatile jlong_accessor evl, cvl, rv;
 256     evl.long_value = exchange_value;
 257     cvl.long_value = compare_value;
 258     int mp = os::is_MP();
 259 
 260     __asm__ volatile ("cmp $0, %%esi\n\t"
 261        "je 1f \n\t"
 262        "lock\n\t"
 263        "1: cmpxchg8b (%%edi)\n\t"
 264        : "=a"(cvl.words[0]),   "=d"(cvl.words[1])
 265        : "a"(cvl.words[0]), "d"(cvl.words[1]),
 266          "b"(evl.words[0]), "c"(evl.words[1]),
 267          "D"(dest), "S"(mp)
 268        :  "cc", "memory");
 269     return cvl.long_value;
 270     #endif // if 0
 271 #endif // AMD64
 272   }
 273 }
 274 #undef LOCK_IF_MP
 275 
 276 #endif // _GNU_SOURCE
 277 
 278 #endif // OS_CPU_SOLARIS_X86_VM_ATOMIC_SOLARIS_X86_INLINE_HPP