1 /*
   2  * Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #ifndef SHARE_VM_RUNTIME_ACCESSBACKEND_HPP
  26 #define SHARE_VM_RUNTIME_ACCESSBACKEND_HPP
  27 
  28 #include "utilities/traits/enableIf.hpp"
  29 #include "utilities/traits/typeIf.hpp"
  30 #include "utilities/traits/integer.hpp"
  31 #include "utilities/traits/isDerived.hpp"
  32 #include "utilities/traits/isFloatLike.hpp"
  33 #include "utilities/traits/isVolatile.hpp"
  34 
  35 enum BarrierType {
  36   BARRIER_STORE,
  37   BARRIER_STORE_AT,
  38   BARRIER_LOAD,
  39   BARRIER_LOAD_AT,
  40   BARRIER_CAS,
  41   BARRIER_CAS_AT,
  42   BARRIER_SWAP,
  43   BARRIER_SWAP_AT,
  44   BARRIER_COPY,
  45   BARRIER_CLONE
  46 };
  47 
  48 enum {
  49   RAW_DECORATOR_MASK = MO_RELAXED | MO_VOLATILE | MO_ATOMIC | MO_ACQUIRE | MO_RELEASE | MO_SEQ_CST | DEST_CONJOINT | DEST_DISJOINT | COPY_ARRAYOF | ACCESS_ARRAYCOPY | ACCESS_ATOMIC | ACCESS_ALIGNED
  50 };
  51 
  52 enum {
  53   BASIC_DECORATOR_MASK = RAW_DECORATOR_MASK | GC_CONVERT_COMPRESSED_OOP | RT_USE_COMPRESSED_OOPS | VALUE_NOT_NULL | VALUE_IS_OOP | ACCESS_ON_ANONYMOUS
  54 };
  55 
  56 template <DecoratorSet decorators, typename T>
  57 struct TestEncodable {
  58   enum {
  59     value = DecoratorTest<decorators>::HAS_VALUE_IS_OOP && DecoratorTest<decorators>::NEEDS_OOP_COMPRESS
  60   };
  61 };
  62 
  63 template <bool compress, typename T>
  64 struct EncodedTypeHelper {
  65   typedef T type;
  66 };
  67 
  68 template <typename T>
  69 struct EncodedTypeHelper<true, T> {
  70   typedef narrowOop type;
  71 };
  72 
  73 template <DecoratorSet decorators, typename T>
  74 struct EncodedType {
  75   typedef typename EncodedTypeHelper<TestEncodable<decorators, T>::value, T>::type type;
  76 };
  77 
  78 template <DecoratorSet decorators>
  79 struct EncodedOopType {
  80   typedef typename EncodedTypeHelper<TestEncodable<decorators, oop>::value, oop>::type type;
  81 };
  82 
  83 template <typename T, DecoratorSet ds>
  84 struct PossiblyLockedAccess {
  85   enum {
  86 #ifndef SUPPORTS_NATIVE_CX8
  87     value = (sizeof(T) == sizeof(int64_t)) && DecoratorTest<ds>::HAS_MO_ATOMIC
  88 #else
  89     value = false
  90 #endif
  91   };
  92 };
  93 
  94 template <bool on_heap, bool on_nmethod, bool on_klass>
  95 struct BaseTypeHelper {
  96   typedef void* type;
  97 };
  98 
  99 template <>
 100 struct BaseTypeHelper<true, false, false> {
 101   typedef oop type;
 102 };
 103 
 104 template <>
 105 struct BaseTypeHelper<false, true, false> {
 106   typedef nmethod* type;
 107 };
 108 
 109 template <>
 110 struct BaseTypeHelper<false, false, true> {
 111   typedef Klass* type;
 112 };
 113 
 114 template <DecoratorSet decorators>
 115 struct BaseType {
 116   typedef typename BaseTypeHelper<DecoratorTest<decorators>::HAS_ACCESS_ON_HEAP,
 117                                   DecoratorTest<decorators>::HAS_ACCESS_ON_NMETHOD,
 118                                   DecoratorTest<decorators>::HAS_ACCESS_ON_KLASS>::type type;
 119 };
 120 
 121 namespace AccessInternal {
 122   template <DecoratorSet decorators, typename T>
 123   struct AccessFunctionTypes {
 124     typedef void (*store_func_t)(void* addr, T value);
 125     typedef void (*store_at_func_t)(typename BaseType<decorators>::type base, ptrdiff_t offset, T value);
 126     typedef T (*load_func_t)(void* addr);
 127     typedef T (*load_at_func_t)(typename BaseType<decorators>::type, ptrdiff_t offset);
 128     typedef T (*cas_func_t)(T new_value, void* addr, T compare_value);
 129     typedef T (*cas_at_func_t)(T new_value, typename BaseType<decorators>::type, ptrdiff_t offset, T compare_value);
 130     typedef T (*swap_func_t)(T new_value, void* addr);
 131     typedef T (*swap_at_func_t)(T new_value, typename BaseType<decorators>::type, ptrdiff_t offset);
 132     typedef bool (*copy_func_t)(arrayOop src_obj, arrayOop dst_obj, T* src, T* dst, size_t length);
 133   };
 134 
 135   bool wide_atomic_needs_locking();
 136   int64_t load_locked(void* addr);
 137   void store_locked(void* addr, int64_t val);
 138   int64_t swap_locked(int64_t new_val, void* addr);
 139   int64_t cas_locked(int64_t new_val, void* addr, int64_t expected_val);
 140 
 141   void* field_addr(void* base, ptrdiff_t offset);
 142 
 143   // Forward calls to Copy:: in the cpp file to reduce dependencies and allow
 144   // faster build times, given how frequently included access is.
 145   void copy_arrayof_conjoint_oops(void* src, void* dst, size_t length);
 146   void copy_conjoint_oops(void* src, void* dst, size_t length);
 147   void copy_conjoint_memory_atomic(void* src, void* dst, size_t length);
 148   void copy_conjoint_jbytes(void* src, void* dst, size_t length);
 149   void copy_conjoint_jlongs_atomic(void* src, void* dst, size_t length);
 150   void copy_disjoint_words(void* src, void* dst, size_t length);
 151 }
 152 
 153 template <DecoratorSet decorators>
 154 class RawAccessBarrier: public AllStatic {
 155 protected:
 156   template <typename T>
 157   static typename EnableIf<IsPointerSize<T>::value, void>::type
 158   atomic_store(void* addr, T value);
 159 
 160   template <typename T>
 161   static typename EnableIf<!IsPointerSize<T>::value, void>::type
 162   atomic_store(void* addr, T value);
 163 
 164   template <typename T>
 165   static typename EnableIf<IsPointerSize<T>::value, void>::type
 166   release_store(void* addr, T value);
 167 
 168   template <typename T>
 169   static typename EnableIf<!IsPointerSize<T>::value, void>::type
 170   release_store(void* addr, T value);
 171 
 172   template <typename T>
 173   static typename EnableIf<IsPointerSize<T>::value, void>::type
 174   release_store_fence(void* addr, T value);
 175 
 176   template <typename T>
 177   static typename EnableIf<!IsPointerSize<T>::value, void>::type
 178   release_store_fence(void* addr, T value);
 179 
 180   template <typename T>
 181   static typename EnableIf<IsPointerSize<T>::value, T>::type
 182   atomic_load(void* addr);
 183 
 184   template <typename T>
 185   static typename EnableIf<!IsPointerSize<T>::value, T>::type
 186   atomic_load(void* addr);
 187 
 188   template <typename T>
 189   static typename EnableIf<IsPointerSize<T>::value, T>::type
 190   load_acquire(void* addr);
 191 
 192   template <typename T>
 193   static typename EnableIf<!IsPointerSize<T>::value, T>::type
 194   load_acquire(void* addr);
 195 
 196   template <typename T>
 197   static typename EnableIf<IsPointerSize<T>::value, T>::type
 198   fence_load_acquire(void* addr);
 199 
 200   template <typename T>
 201   static inline typename EnableIf<!IsPointerSize<T>::value, T>::type
 202   fence_load_acquire(void* addr);
 203 
 204   template <typename T>
 205   static typename EnableIf<IsPointerSize<T>::value, T>::type
 206   cas_relaxed(T new_value, void* addr, T compare_value);
 207 
 208   template <typename T>
 209   static typename EnableIf<!IsPointerSize<T>::value, T>::type
 210   cas_relaxed(T new_value, void* addr, T compare_value);
 211 
 212   template <typename T>
 213   static typename EnableIf<IsPointerSize<T>::value, T>::type
 214   cas_seq_cst(T new_value, void* addr, T compare_value);
 215 
 216   template <typename T>
 217   static typename EnableIf<!IsPointerSize<T>::value, T>::type
 218   cas_seq_cst(T new_value, void* addr, T compare_value);
 219 
 220   template <typename T>
 221   static typename EnableIf<IsPointerSize<T>::value, T>::type
 222   swap_seq_cst(T new_value, void* addr);
 223 
 224   template <typename T>
 225   static typename EnableIf<!IsPointerSize<T>::value, T>::type
 226   swap_seq_cst(T new_value, void* addr);
 227 
 228   // The following *_locked mechanisms serve the purpose of handling atomic operations
 229   // that are larger than a machine can handle, and then possibly opt for using
 230   // a slower path using a mutex to perform the operation.
 231 
 232   template <typename T>
 233   static inline T load_not_locked(void* addr) {
 234     if (DecoratorTest<decorators>::HAS_MO_SEQ_CST) {
 235       return fence_load_acquire<T>(addr);
 236     } else if (DecoratorTest<decorators>::HAS_MO_ACQUIRE) {
 237       return load_acquire<T>(addr);
 238     } else if (DecoratorTest<decorators>::HAS_MO_ATOMIC) {
 239       return atomic_load<T>(addr);
 240     } else if (DecoratorTest<decorators>::HAS_MO_VOLATILE) {
 241       return *(volatile T*)addr;
 242     } else {
 243       return *(T*)addr;
 244     }
 245   }
 246 
 247   template <typename T, DecoratorSet ds>
 248   static inline typename EnableIf<!PossiblyLockedAccess<T, ds>::value, T>::type load_maybe_locked(void* addr) {
 249     return load_not_locked<T>(addr);
 250   }
 251 
 252   template <typename T, DecoratorSet ds>
 253   static typename EnableIf<PossiblyLockedAccess<T, ds>::value, T>::type load_maybe_locked(void* addr);
 254 
 255   template <typename T>
 256   static inline void store_not_locked(void* addr, T value) {
 257     if (DecoratorTest<decorators>::HAS_MO_SEQ_CST) {
 258       release_store_fence<T>(addr, value);
 259     } else if (DecoratorTest<decorators>::HAS_MO_RELEASE) {
 260       release_store<T>(addr, value);
 261     } else if (DecoratorTest<decorators>::HAS_MO_ATOMIC) {
 262       atomic_store<T>(addr, value);
 263     } else if (DecoratorTest<decorators>::HAS_MO_VOLATILE) {
 264       (void)const_cast<T&>(*(volatile T*)addr = value);
 265     } else {
 266       *(T*)addr = value;
 267     }
 268   }
 269 
 270   template <typename T, DecoratorSet ds>
 271   static inline typename EnableIf<!PossiblyLockedAccess<T, ds>::value, void>::type store_maybe_locked(void* addr, T value) {
 272     store_not_locked<T>(addr, value);
 273   }
 274 
 275   template <typename T, DecoratorSet ds>
 276   static typename EnableIf<PossiblyLockedAccess<T, ds>::value, void>::type store_maybe_locked(void* addr, T value);
 277 
 278   template <typename T>
 279   static inline T cas_not_locked(T new_value, void* addr, T compare_value) {
 280     if (DecoratorTest<decorators>::HAS_MO_SEQ_CST) {
 281       return cas_seq_cst<T>(new_value, addr, compare_value);
 282     } else {
 283       return cas_relaxed<T>(new_value, addr, compare_value);
 284     }
 285   }
 286 
 287   template <typename T, DecoratorSet ds>
 288   static inline typename EnableIf<!PossiblyLockedAccess<T, ds>::value, T>::type cas_maybe_locked(T new_value, void* addr, T compare_value) {
 289     return cas_not_locked<T>(new_value, addr, compare_value);
 290   }
 291 
 292   template <typename T, DecoratorSet ds>
 293   static typename EnableIf<PossiblyLockedAccess<T, ds>::value, T>::type cas_maybe_locked(T new_value, void* addr, T compare_value);
 294 
 295   template <typename T>
 296   static inline T swap_not_locked(T new_value, void* addr) {
 297     return swap_seq_cst<T>(new_value, addr);
 298   }
 299 
 300   template <typename T, DecoratorSet ds>
 301   static inline typename EnableIf<!PossiblyLockedAccess<T, ds>::value, T>::type swap_maybe_locked(T new_value, void* addr) {
 302     return swap_not_locked<T>(new_value, addr);
 303   }
 304 
 305   template <typename T, DecoratorSet ds>
 306   static typename EnableIf<PossiblyLockedAccess<T, ds>::value, T>::type swap_maybe_locked(T new_value, void* addr);
 307 
 308 public:
 309   template <typename T>
 310   static inline typename EnableIf<!IsFloatLike<T>::value, void>::type store(void* addr, T value) {
 311     store_maybe_locked<T, decorators>(addr, value);
 312   }
 313 
 314   template <typename T>
 315   static inline typename EnableIf<!IsFloatLike<T>::value, T>::type load(void* addr) {
 316     return load_maybe_locked<T, decorators>(addr);
 317   }
 318 
 319   template <typename T>
 320   static inline typename EnableIf<!IsFloatLike<T>::value, T>::type cas(T new_value, void* addr, T compare_value) {
 321     return cas_maybe_locked<T, decorators>(new_value, addr, compare_value);
 322   }
 323 
 324   template <typename T>
 325   static inline typename EnableIf<!IsFloatLike<T>::value, T>::type swap(T new_value, void* addr) {
 326     return swap_maybe_locked<T, decorators>(new_value, addr);
 327   }
 328 
 329   template <typename T>
 330   static typename EnableIf<!IsFloatLike<T>::value, bool>::type copy(T* src, T* dst, size_t length);
 331 
 332   static void clone(oop src, oop dst, size_t size);
 333 
 334   template <typename T>
 335   static inline typename EnableIf<IsFloatLike<T>::value, void>::type store(void* addr, T value) {
 336     typedef IntegerType<T> IntType;
 337     store<typename IntType::signed_type>(addr, IntType::cast_to_signed(value));
 338   }
 339 
 340   template <typename T>
 341   static inline typename EnableIf<IsFloatLike<T>::value, T>::type load(void* addr) {
 342     typedef IntegerType<T> IntType;
 343     return IntType::cast_from_signed(load<typename IntType::signed_type>(addr));
 344   }
 345 
 346   template <typename T>
 347   static inline typename EnableIf<IsFloatLike<T>::value, T>::type cas(T new_value, void* addr, T compare_value) {
 348     typedef IntegerType<T> IntType;
 349     return IntType::cast_from_signed(cas<typename IntType::signed_type>(IntType::cast_to_signed(new_value), addr, IntType::cast_to_signed(compare_value)));
 350   }
 351 
 352   template <typename T>
 353   static inline typename EnableIf<IsFloatLike<T>::value, T>::type swap(T new_value, void* addr) {
 354     typedef IntegerType<T> IntType;
 355     return IntType::cast_from_signed(cas<typename IntType::signed_type>(IntType::cast_to_signed(new_value), addr));
 356   }
 357 
 358   template <typename T>
 359   static typename EnableIf<IsSame<T, double>::value, bool>::type copy(T* src, T* dst, size_t length) {
 360     typedef typename IntegerType<T>::signed_type IntType;
 361     return copy<IntType>((IntType*)src, (IntType*)dst, length);
 362   }
 363 
 364 };
 365 
 366 
 367 template <DecoratorSet decorators>
 368 class BasicAccessBarrier: public AllStatic {
 369   typedef RawAccessBarrier<DecoratorIntersection<decorators, RAW_DECORATOR_MASK>::value> Raw;
 370 
 371 protected:
 372   static inline void* field_addr(void* base, ptrdiff_t byte_offset) {
 373     return AccessInternal::field_addr(base, byte_offset);
 374   }
 375 
 376 protected:
 377   template <DecoratorSet internal_decorators, typename T>
 378   static inline typename EnableIf<TestEncodable<internal_decorators, T>::value,
 379                                   typename EncodedType<internal_decorators, T>::type>::type
 380   encode_internal(T value);
 381 
 382   template <DecoratorSet internal_decorators, typename T>
 383   static inline typename EnableIf<!TestEncodable<internal_decorators, T>::value,
 384                                   typename EncodedType<internal_decorators, T>::type>::type
 385   encode_internal(T value) {
 386     return value;
 387   }
 388 
 389   template <typename T>
 390   static inline typename EncodedType<decorators, T>::type
 391   encode(T value) {
 392     return encode_internal<decorators, T>(value);
 393   }
 394 
 395   template <DecoratorSet internal_decorators, typename T>
 396   static inline typename EnableIf<TestEncodable<internal_decorators, T>::value, T>::type
 397   decode_internal(typename EncodedType<internal_decorators, T>::type value);
 398 
 399   template <DecoratorSet internal_decorators, typename T>
 400   static inline typename EnableIf<!TestEncodable<internal_decorators, T>::value, T>::type
 401   decode_internal(typename EncodedType<internal_decorators, T>::type value) {
 402     return value;
 403   }
 404 
 405   template <typename T>
 406   static inline T decode(typename EncodedType<decorators, T>::type value) {
 407     return decode_internal<decorators, T>(value);
 408   }
 409 
 410 public:
 411   template <typename T>
 412   static void store(void* addr, T value) {
 413     Raw::template store<T>(addr, value);
 414   }
 415 
 416   template <typename T>
 417   static void store_at(void* base, ptrdiff_t offset, T value) {
 418     store<T>(field_addr(base, offset), value);
 419   }
 420 
 421   template <typename T>
 422   static T load(void* addr) {
 423     return Raw::template load<T>(addr);
 424   }
 425 
 426   template <typename T>
 427   static T load_at(void* base, ptrdiff_t offset) {
 428     return load<T>(field_addr(base, offset));
 429   }
 430 
 431   template <typename T>
 432   static T cas(T new_value, void* addr, T compare_value) {
 433     return Raw::template cas<T>(new_value, addr, compare_value);
 434   }
 435 
 436   template <typename T>
 437   static T cas_at(T new_value, void* base, ptrdiff_t offset, T compare_value) {
 438     return cas<T>(new_value, field_addr(base, offset), compare_value);
 439   }
 440 
 441   template <typename T>
 442   static T swap(T new_value, void* addr) {
 443     return Raw::template swap<T>(new_value, addr);
 444   }
 445 
 446   template <typename T>
 447   static T swap_at(T new_value, void* base, ptrdiff_t offset) {
 448     return swap<T>(new_value, field_addr(base, offset));
 449   }
 450 
 451   template <typename T>
 452   static bool copy(arrayOop src_obj, arrayOop dst_obj, T* src, T* dst, size_t length) {
 453     return Raw::template copy<T>(src, dst, length);
 454   }
 455 
 456   template <typename T>
 457   static void oop_store(void* addr, T value);
 458   template <typename T>
 459   static void oop_store_at(void* base, ptrdiff_t offset, T value);
 460 
 461   template <typename T>
 462   static T oop_load(void* addr);
 463   template <typename T>
 464   static T oop_load_at(void* base, ptrdiff_t offset);
 465 
 466   template <typename T>
 467   static T oop_cas(T new_value, void* addr, T compare_value);
 468   template <typename T>
 469   static T oop_cas_at(T new_value, void* base, ptrdiff_t offset, T compare_value);
 470 
 471   template <typename T>
 472   static T oop_swap(T new_value, void* addr);
 473   template <typename T>
 474   static T oop_swap_at(T new_value, void* base, ptrdiff_t offset);
 475 
 476   template <typename T>
 477   static bool oop_copy(arrayOop src_obj, arrayOop dst_obj, T* src, T* dst, size_t length);
 478 
 479   static void clone(oop src, oop dst, size_t size);
 480 };
 481 
 482 #endif // SHARE_VM_RUNTIME_ACCESSBACKEND_HPP