< prev index next >

src/share/vm/runtime/atomic.hpp

Print this page
rev 13266 : imported patch Atomic_refactoring
rev 13268 : [mq]: Atomic_polishing_v2


   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #ifndef SHARE_VM_RUNTIME_ATOMIC_HPP
  26 #define SHARE_VM_RUNTIME_ATOMIC_HPP
  27 
  28 #include "memory/allocation.hpp"
  29 #include "metaprogramming/conditional.hpp"
  30 #include "metaprogramming/enableIf.hpp"
  31 #include "metaprogramming/integerTypes.hpp"
  32 #include "metaprogramming/isIntegral.hpp"
  33 #include "metaprogramming/isPointer.hpp"
  34 #include "metaprogramming/isSame.hpp"
  35 #include "metaprogramming/removePointer.hpp"
  36 #include "utilities/align.hpp"
  37 #include "utilities/debug.hpp"
  38 #include "utilities/macros.hpp"
  39 
  40 enum cmpxchg_memory_order {
  41   memory_order_relaxed,
  42   // Use value which doesn't interfere with C++2011. We need to be more conservative.
  43   memory_order_conservative = 8
  44 };
  45 
  46 class Atomic : AllStatic {
  47   template<typename T> class Never: public FalseType {};
  48 
  49   template <typename T>
  50   inline static void specialized_store(T store_value, volatile T* dest) {
  51     STATIC_ASSERT(sizeof(T) <= size_t(BytesPerWord)); // Does the machine support atomic wide accesses?
  52     (void)const_cast<T&>(*dest = store_value);
  53   }
  54 
  55   template <typename T>


 230 inline T Atomic::load(volatile T* src) {
 231   typedef typename IntegerTypes::Signed<T>::type Raw;
 232   return IntegerTypes::cast<T>(specialized_load(reinterpret_cast<const volatile Raw*>(src)));
 233 }
 234 
 235 template <typename T, typename U>
 236 inline U Atomic::add(T add_value, volatile U* dst) {
 237   STATIC_ASSERT(IsIntegral<T>::value);
 238   STATIC_ASSERT(IsIntegral<U>::value);
 239   typedef typename IntegerTypes::Signed<U>::type Raw;
 240   // Allow -Wconversion or the like to complain about unsafe conversions.
 241   U value = add_value;
 242   Raw raw_value = IntegerTypes::cast_to_signed(value);
 243   Raw result = specialized_add(raw_value, reinterpret_cast<volatile Raw*>(dst));
 244   return IntegerTypes::cast<U>(result);
 245 }
 246 
 247 template <typename T, typename U>
 248 inline U* Atomic::add(T add_value, U* volatile* dst) {
 249   STATIC_ASSERT(IsIntegral<T>::value);
 250   typedef typename IntegerTypes::Signed<intptr_t>::type Raw;
 251   ptrdiff_t value = add_value;
 252   Raw raw_value = IntegerTypes::cast_to_signed(value * sizeof(U));
 253   Raw result = specialized_add(raw_value, reinterpret_cast<volatile Raw*>(dst));
 254   return IntegerTypes::cast<U*>(result);
 255 }
 256 
 257 template <typename T>
 258 inline void Atomic::inc(volatile T* src) {
 259   STATIC_ASSERT(IsIntegral<T>::value);
 260   typedef typename IntegerTypes::Signed<T>::type Raw;
 261   specialized_inc(reinterpret_cast<volatile Raw*>(src));
 262 }
 263 
 264 template <typename T>
 265 inline void Atomic::inc(T* volatile* src) {
 266   if (sizeof(T) != 1) {
 267     add(1, src);
 268   } else {
 269     typedef typename IntegerTypes::Signed<intptr_t>::type Raw;
 270     specialized_inc(reinterpret_cast<volatile Raw*>(src));
 271   }
 272 }
 273 
 274 template <typename T>
 275 inline void Atomic::dec(volatile T* src) {
 276   STATIC_ASSERT(IsIntegral<T>::value);
 277   typedef typename IntegerTypes::Signed<T>::type Raw;
 278   specialized_dec(reinterpret_cast<volatile Raw*>(src));
 279 }
 280 
 281 template <typename T>
 282 inline void Atomic::dec(T* volatile* src) {
 283   if (sizeof(T) != 1) {
 284     add(-1, src);
 285   } else {
 286     typedef typename IntegerTypes::Signed<intptr_t>::type Raw;
 287     specialized_dec(reinterpret_cast<volatile Raw*>(src));
 288   }
 289 }
 290 
 291 template <typename T, typename U>
 292 inline U Atomic::xchg(T exchange_value, volatile U* dest) {
 293   typedef typename IntegerTypes::Signed<U>::type Raw;
 294   U exchange_value_cast = exchange_value;
 295   Raw result = specialized_xchg(IntegerTypes::cast_to_signed(exchange_value_cast),
 296                                 reinterpret_cast<volatile Raw*>(dest));
 297   return IntegerTypes::cast<U>(result);
 298 }
 299 
 300 template <typename T, typename U, typename V>
 301 inline U Atomic::cmpxchg(T exchange_value, volatile U* dest, V compare_value, cmpxchg_memory_order order) {
 302   typedef typename IntegerTypes::Signed<U>::type Raw;
 303   U exchange_value_cast = exchange_value;
 304   U compare_value_cast = compare_value;
 305   Raw result = specialized_cmpxchg(IntegerTypes::cast_to_signed(exchange_value_cast),
 306                                    reinterpret_cast<volatile Raw*>(dest),




   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #ifndef SHARE_VM_RUNTIME_ATOMIC_HPP
  26 #define SHARE_VM_RUNTIME_ATOMIC_HPP
  27 
  28 #include "memory/allocation.hpp"


  29 #include "metaprogramming/integerTypes.hpp"
  30 #include "metaprogramming/isIntegral.hpp"
  31 #include "metaprogramming/isPointer.hpp"


  32 #include "utilities/align.hpp"
  33 #include "utilities/debug.hpp"
  34 #include "utilities/macros.hpp"
  35 
  36 enum cmpxchg_memory_order {
  37   memory_order_relaxed,
  38   // Use value which doesn't interfere with C++2011. We need to be more conservative.
  39   memory_order_conservative = 8
  40 };
  41 
  42 class Atomic : AllStatic {
  43   template<typename T> class Never: public FalseType {};
  44 
  45   template <typename T>
  46   inline static void specialized_store(T store_value, volatile T* dest) {
  47     STATIC_ASSERT(sizeof(T) <= size_t(BytesPerWord)); // Does the machine support atomic wide accesses?
  48     (void)const_cast<T&>(*dest = store_value);
  49   }
  50 
  51   template <typename T>


 226 inline T Atomic::load(volatile T* src) {
 227   typedef typename IntegerTypes::Signed<T>::type Raw;
 228   return IntegerTypes::cast<T>(specialized_load(reinterpret_cast<const volatile Raw*>(src)));
 229 }
 230 
 231 template <typename T, typename U>
 232 inline U Atomic::add(T add_value, volatile U* dst) {
 233   STATIC_ASSERT(IsIntegral<T>::value);
 234   STATIC_ASSERT(IsIntegral<U>::value);
 235   typedef typename IntegerTypes::Signed<U>::type Raw;
 236   // Allow -Wconversion or the like to complain about unsafe conversions.
 237   U value = add_value;
 238   Raw raw_value = IntegerTypes::cast_to_signed(value);
 239   Raw result = specialized_add(raw_value, reinterpret_cast<volatile Raw*>(dst));
 240   return IntegerTypes::cast<U>(result);
 241 }
 242 
 243 template <typename T, typename U>
 244 inline U* Atomic::add(T add_value, U* volatile* dst) {
 245   STATIC_ASSERT(IsIntegral<T>::value);
 246   typedef typename IntegerTypes::Signed<U*>::type Raw;
 247   ptrdiff_t value = add_value;
 248   Raw raw_value = IntegerTypes::cast_to_signed(value * sizeof(U));
 249   Raw result = specialized_add(raw_value, reinterpret_cast<volatile Raw*>(dst));
 250   return IntegerTypes::cast<U*>(result);
 251 }
 252 
 253 template <typename T>
 254 inline void Atomic::inc(volatile T* src) {
 255   STATIC_ASSERT(IsIntegral<T>::value);
 256   typedef typename IntegerTypes::Signed<T>::type Raw;
 257   specialized_inc(reinterpret_cast<volatile Raw*>(src));
 258 }
 259 
 260 template <typename T>
 261 inline void Atomic::inc(T* volatile* src) {
 262   if (sizeof(T) != 1) {
 263     add(1, src);
 264   } else {
 265     typedef typename IntegerTypes::Signed<T*>::type Raw;
 266     specialized_inc(reinterpret_cast<volatile Raw*>(src));
 267   }
 268 }
 269 
 270 template <typename T>
 271 inline void Atomic::dec(volatile T* src) {
 272   STATIC_ASSERT(IsIntegral<T>::value);
 273   typedef typename IntegerTypes::Signed<T>::type Raw;
 274   specialized_dec(reinterpret_cast<volatile Raw*>(src));
 275 }
 276 
 277 template <typename T>
 278 inline void Atomic::dec(T* volatile* src) {
 279   if (sizeof(T) != 1) {
 280     add(-1, src);
 281   } else {
 282     typedef typename IntegerTypes::Signed<T*>::type Raw;
 283     specialized_dec(reinterpret_cast<volatile Raw*>(src));
 284   }
 285 }
 286 
 287 template <typename T, typename U>
 288 inline U Atomic::xchg(T exchange_value, volatile U* dest) {
 289   typedef typename IntegerTypes::Signed<U>::type Raw;
 290   U exchange_value_cast = exchange_value;
 291   Raw result = specialized_xchg(IntegerTypes::cast_to_signed(exchange_value_cast),
 292                                 reinterpret_cast<volatile Raw*>(dest));
 293   return IntegerTypes::cast<U>(result);
 294 }
 295 
 296 template <typename T, typename U, typename V>
 297 inline U Atomic::cmpxchg(T exchange_value, volatile U* dest, V compare_value, cmpxchg_memory_order order) {
 298   typedef typename IntegerTypes::Signed<U>::type Raw;
 299   U exchange_value_cast = exchange_value;
 300   U compare_value_cast = compare_value;
 301   Raw result = specialized_cmpxchg(IntegerTypes::cast_to_signed(exchange_value_cast),
 302                                    reinterpret_cast<volatile Raw*>(dest),


< prev index next >