1 /*
   2  * Copyright (c) 1999, 2017, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #ifndef OS_CPU_SOLARIS_X86_VM_ATOMIC_SOLARIS_X86_HPP
  26 #define OS_CPU_SOLARIS_X86_VM_ATOMIC_SOLARIS_X86_HPP
  27 
  28 #include "runtime/os.hpp"
  29 
  30 // For Sun Studio - implementation is in solaris_x86_[32/64].il.
  31 
  32 extern "C" {
  33   int32_t _Atomic_add(int32_t add_value, volatile int32_t* dest);
  34   int32_t _Atomic_xchg(int32_t exchange_value, volatile int32_t* dest);
  35   int8_t _Atomic_cmpxchg_byte(int8_t exchange_value, volatile int8_t* dest,
  36                                   int8_t compare_value);
  37   int32_t _Atomic_cmpxchg(int32_t exchange_value, volatile int32_t* dest,
  38                               int32_t compare_value);
  39   int64_t _Atomic_cmpxchg_long(int64_t exchange_value, volatile int64_t* dest,
  40                                    int64_t compare_value);
  41 }
  42 
  43 template <>
  44 inline int32_t Atomic::specialized_add<int32_t>(int32_t add_value, volatile int32_t* dest) {
  45   return _Atomic_add(add_value, dest);
  46 }
  47 
  48 template <>
  49 inline int32_t Atomic::specialized_xchg<int32_t>(int32_t exchange_value, volatile int32_t* dest) {
  50   return _Atomic_xchg(exchange_value, dest);
  51 }
  52 
  53 #define VM_HAS_SPECIALIZED_CMPXCHG_BYTE
  54 template <>
  55 inline int8_t Atomic::specialized_cmpxchg<int8_t>(int8_t exchange_value, volatile int8_t* dest, int8_t compare_value, cmpxchg_memory_order order) {
  56   return _Atomic_cmpxchg_byte(exchange_value, dest, compare_value);
  57 }
  58 
  59 template <>
  60 inline int32_t Atomic::specialized_cmpxchg<int32_t>(int32_t exchange_value, volatile int32_t* dest, int32_t compare_value, cmpxchg_memory_order order) {
  61   return _Atomic_cmpxchg(exchange_value, dest, compare_value);
  62 }
  63 
  64 template <>
  65 inline int64_t Atomic::specialized_cmpxchg<int64_t>(int64_t exchange_value, volatile int64_t* dest, int64_t compare_value, cmpxchg_memory_order order) {
  66   return _Atomic_cmpxchg_long(exchange_value, dest, compare_value);
  67 }
  68 
  69 #ifdef AMD64
  70 
  71 extern "C" int64_t _Atomic_add_long(int64_t add_value, volatile int64_t* dest);
  72 extern "C" int64_t _Atomic_xchg_long(int64_t exchange_value, volatile int64_t* dest);
  73 
  74 template <>
  75 inline int64_t Atomic::specialized_add<int64_t>(int64_t add_value, volatile int64_t* dest) {
  76   return _Atomic_add_long(add_value, dest);
  77 }
  78 
  79 template <>
  80 inline int64_t Atomic::specialized_xchg<int64_t>(int64_t exchange_value, volatile int64_t* dest) {
  81   return _Atomic_xchg_long(exchange_value, dest);
  82 }
  83 
  84 #else // !AMD64
  85 
  86 
  87 extern "C" void _Atomic_move_long(const volatile int64_t* src, volatile int64_t* dst);
  88 
  89 template <>
  90 inline int64_t Atomic::specialized_load<int64_t>(const volatile int64_t* src) {
  91   volatile int64_t dest;
  92   _Atomic_move_long(src, &dest);
  93   return dest;
  94 }
  95 
  96 template <>
  97 inline void Atomic::specialized_store<int64_t>(int64_t store_value, volatile int64_t* dest) {
  98   _Atomic_move_long((volatile int64_t*)&store_value, (volatile int64_t*)dest);
  99 }
 100 
 101 #endif // AMD64
 102 
 103 
 104 #endif // OS_CPU_SOLARIS_X86_VM_ATOMIC_SOLARIS_X86_HPP