1 /*
   2  * Copyright (c) 2017, 2018, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #ifndef SHARE_OOPS_ACCESSBACKEND_HPP
  26 #define SHARE_OOPS_ACCESSBACKEND_HPP
  27 
  28 #include "gc/shared/barrierSetConfig.hpp"
  29 #include "memory/allocation.hpp"
  30 #include "metaprogramming/conditional.hpp"
  31 #include "metaprogramming/decay.hpp"
  32 #include "metaprogramming/enableIf.hpp"
  33 #include "metaprogramming/integralConstant.hpp"
  34 #include "metaprogramming/isFloatingPoint.hpp"
  35 #include "metaprogramming/isIntegral.hpp"
  36 #include "metaprogramming/isPointer.hpp"
  37 #include "metaprogramming/isSame.hpp"
  38 #include "metaprogramming/isVolatile.hpp"
  39 #include "oops/accessDecorators.hpp"
  40 #include "oops/oopsHierarchy.hpp"
  41 #include "utilities/debug.hpp"
  42 #include "utilities/globalDefinitions.hpp"
  43 
  44 
  45 // This metafunction returns either oop or narrowOop depending on whether
  46 // an access needs to use compressed oops or not.
  47 template <DecoratorSet decorators>
  48 struct HeapOopType: AllStatic {
  49   static const bool needs_oop_compress = HasDecorator<decorators, INTERNAL_CONVERT_COMPRESSED_OOP>::value &&
  50                                          HasDecorator<decorators, INTERNAL_RT_USE_COMPRESSED_OOPS>::value;
  51   typedef typename Conditional<needs_oop_compress, narrowOop, oop>::type type;
  52 };
  53 
  54 namespace AccessInternal {
  55   enum BarrierType {
  56     BARRIER_STORE,
  57     BARRIER_STORE_AT,
  58     BARRIER_LOAD,
  59     BARRIER_LOAD_AT,
  60     BARRIER_ATOMIC_CMPXCHG,
  61     BARRIER_ATOMIC_CMPXCHG_AT,
  62     BARRIER_ATOMIC_XCHG,
  63     BARRIER_ATOMIC_XCHG_AT,
  64     BARRIER_ARRAYCOPY,
  65     BARRIER_CLONE,
  66     BARRIER_RESOLVE,
  67     BARRIER_EQUALS
  68   };
  69 
  70   template <DecoratorSet decorators, typename T>
  71   struct MustConvertCompressedOop: public IntegralConstant<bool,
  72     HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value &&
  73     IsSame<typename HeapOopType<decorators>::type, narrowOop>::value &&
  74     IsSame<T, oop>::value> {};
  75 
  76   // This metafunction returns an appropriate oop type if the value is oop-like
  77   // and otherwise returns the same type T.
  78   template <DecoratorSet decorators, typename T>
  79   struct EncodedType: AllStatic {
  80     typedef typename Conditional<
  81       HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value,
  82       typename HeapOopType<decorators>::type, T>::type type;
  83   };
  84 
  85   template <DecoratorSet decorators>
  86   inline typename HeapOopType<decorators>::type*
  87   oop_field_addr(oop base, ptrdiff_t byte_offset) {
  88     return reinterpret_cast<typename HeapOopType<decorators>::type*>(
  89              reinterpret_cast<intptr_t>((void*)base) + byte_offset);
  90   }
  91 
  92   // This metafunction returns whether it is possible for a type T to require
  93   // locking to support wide atomics or not.
  94   template <typename T>
  95 #ifdef SUPPORTS_NATIVE_CX8
  96   struct PossiblyLockedAccess: public IntegralConstant<bool, false> {};
  97 #else
  98   struct PossiblyLockedAccess: public IntegralConstant<bool, (sizeof(T) > 4)> {};
  99 #endif
 100 
 101   template <DecoratorSet decorators, typename T>
 102   struct AccessFunctionTypes {
 103     typedef T (*load_at_func_t)(oop base, ptrdiff_t offset);
 104     typedef void (*store_at_func_t)(oop base, ptrdiff_t offset, T value);
 105     typedef T (*atomic_cmpxchg_at_func_t)(T new_value, oop base, ptrdiff_t offset, T compare_value);
 106     typedef T (*atomic_xchg_at_func_t)(T new_value, oop base, ptrdiff_t offset);
 107 
 108     typedef T (*load_func_t)(void* addr);
 109     typedef void (*store_func_t)(void* addr, T value);
 110     typedef T (*atomic_cmpxchg_func_t)(T new_value, void* addr, T compare_value);
 111     typedef T (*atomic_xchg_func_t)(T new_value, void* addr);
 112 
 113     typedef bool (*arraycopy_func_t)(arrayOop src_obj, ptrdiff_t src_offset_in_bytes, const T* src_raw, arrayOop dst_obj, ptrdiff_t dst_offset_in_bytes, T* dst_raw, size_t length);
 114     typedef void (*clone_func_t)(oop src, oop dst, size_t size);
 115     typedef oop (*resolve_func_t)(oop obj);
 116     typedef bool (*equals_func_t)(oop o1, oop o2);
 117   };
 118 
 119   template <DecoratorSet decorators>
 120   struct AccessFunctionTypes<decorators, void> {
 121     typedef bool (*arraycopy_func_t)(arrayOop src_obj, ptrdiff_t src_offset_in_bytes, const void* src, arrayOop dst_obj, ptrdiff_t dst_offset_in_bytes, void* dst, size_t length);
 122   };
 123 
 124   template <DecoratorSet decorators, typename T, BarrierType barrier> struct AccessFunction {};
 125 
 126 #define ACCESS_GENERATE_ACCESS_FUNCTION(bt, func)                   \
 127   template <DecoratorSet decorators, typename T>                    \
 128   struct AccessFunction<decorators, T, bt>: AllStatic{              \
 129     typedef typename AccessFunctionTypes<decorators, T>::func type; \
 130   }
 131   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_STORE, store_func_t);
 132   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_STORE_AT, store_at_func_t);
 133   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_LOAD, load_func_t);
 134   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_LOAD_AT, load_at_func_t);
 135   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ATOMIC_CMPXCHG, atomic_cmpxchg_func_t);
 136   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ATOMIC_CMPXCHG_AT, atomic_cmpxchg_at_func_t);
 137   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ATOMIC_XCHG, atomic_xchg_func_t);
 138   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ATOMIC_XCHG_AT, atomic_xchg_at_func_t);
 139   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ARRAYCOPY, arraycopy_func_t);
 140   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_CLONE, clone_func_t);
 141   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_RESOLVE, resolve_func_t);
 142   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_EQUALS, equals_func_t);
 143 #undef ACCESS_GENERATE_ACCESS_FUNCTION
 144 
 145   template <DecoratorSet decorators, typename T, BarrierType barrier_type>
 146   typename AccessFunction<decorators, T, barrier_type>::type resolve_barrier();
 147 
 148   template <DecoratorSet decorators, typename T, BarrierType barrier_type>
 149   typename AccessFunction<decorators, T, barrier_type>::type resolve_oop_barrier();
 150 
 151   class AccessLocker {
 152   public:
 153     AccessLocker();
 154     ~AccessLocker();
 155   };
 156   bool wide_atomic_needs_locking();
 157 
 158   void* field_addr(oop base, ptrdiff_t offset);
 159 
 160   // Forward calls to Copy:: in the cpp file to reduce dependencies and allow
 161   // faster build times, given how frequently included access is.
 162   void arraycopy_arrayof_conjoint_oops(void* src, void* dst, size_t length);
 163   void arraycopy_conjoint_oops(oop* src, oop* dst, size_t length);
 164   void arraycopy_conjoint_oops(narrowOop* src, narrowOop* dst, size_t length);
 165 
 166   void arraycopy_disjoint_words(void* src, void* dst, size_t length);
 167   void arraycopy_disjoint_words_atomic(void* src, void* dst, size_t length);
 168 
 169   template<typename T>
 170   void arraycopy_conjoint(T* src, T* dst, size_t length);
 171   template<typename T>
 172   void arraycopy_arrayof_conjoint(T* src, T* dst, size_t length);
 173   template<typename T>
 174   void arraycopy_conjoint_atomic(T* src, T* dst, size_t length);
 175 }
 176 
 177 // This mask specifies what decorators are relevant for raw accesses. When passing
 178 // accesses to the raw layer, irrelevant decorators are removed.
 179 const DecoratorSet RAW_DECORATOR_MASK = INTERNAL_DECORATOR_MASK | MO_DECORATOR_MASK |
 180                                         ARRAYCOPY_DECORATOR_MASK | OOP_DECORATOR_MASK;
 181 
 182 // The RawAccessBarrier performs raw accesses with additional knowledge of
 183 // memory ordering, so that OrderAccess/Atomic is called when necessary.
 184 // It additionally handles compressed oops, and hence is not completely "raw"
 185 // strictly speaking.
 186 template <DecoratorSet decorators>
 187 class RawAccessBarrier: public AllStatic {
 188 protected:
 189   static inline void* field_addr(oop base, ptrdiff_t byte_offset) {
 190     return AccessInternal::field_addr(base, byte_offset);
 191   }
 192 
 193 protected:
 194   // Only encode if INTERNAL_VALUE_IS_OOP
 195   template <DecoratorSet idecorators, typename T>
 196   static inline typename EnableIf<
 197     AccessInternal::MustConvertCompressedOop<idecorators, T>::value,
 198     typename HeapOopType<idecorators>::type>::type
 199   encode_internal(T value);
 200 
 201   template <DecoratorSet idecorators, typename T>
 202   static inline typename EnableIf<
 203     !AccessInternal::MustConvertCompressedOop<idecorators, T>::value, T>::type
 204   encode_internal(T value) {
 205     return value;
 206   }
 207 
 208   template <typename T>
 209   static inline typename AccessInternal::EncodedType<decorators, T>::type
 210   encode(T value) {
 211     return encode_internal<decorators, T>(value);
 212   }
 213 
 214   // Only decode if INTERNAL_VALUE_IS_OOP
 215   template <DecoratorSet idecorators, typename T>
 216   static inline typename EnableIf<
 217     AccessInternal::MustConvertCompressedOop<idecorators, T>::value, T>::type
 218   decode_internal(typename HeapOopType<idecorators>::type value);
 219 
 220   template <DecoratorSet idecorators, typename T>
 221   static inline typename EnableIf<
 222     !AccessInternal::MustConvertCompressedOop<idecorators, T>::value, T>::type
 223   decode_internal(T value) {
 224     return value;
 225   }
 226 
 227   template <typename T>
 228   static inline T decode(typename AccessInternal::EncodedType<decorators, T>::type value) {
 229     return decode_internal<decorators, T>(value);
 230   }
 231 
 232 protected:
 233   template <DecoratorSet ds, typename T>
 234   static typename EnableIf<
 235     HasDecorator<ds, MO_SEQ_CST>::value, T>::type
 236   load_internal(void* addr);
 237 
 238   template <DecoratorSet ds, typename T>
 239   static typename EnableIf<
 240     HasDecorator<ds, MO_ACQUIRE>::value, T>::type
 241   load_internal(void* addr);
 242 
 243   template <DecoratorSet ds, typename T>
 244   static typename EnableIf<
 245     HasDecorator<ds, MO_RELAXED>::value, T>::type
 246   load_internal(void* addr);
 247 
 248   template <DecoratorSet ds, typename T>
 249   static inline typename EnableIf<
 250     HasDecorator<ds, MO_VOLATILE>::value, T>::type
 251   load_internal(void* addr) {
 252     return *reinterpret_cast<const volatile T*>(addr);
 253   }
 254 
 255   template <DecoratorSet ds, typename T>
 256   static inline typename EnableIf<
 257     HasDecorator<ds, MO_UNORDERED>::value, T>::type
 258   load_internal(void* addr) {
 259     return *reinterpret_cast<const T*>(addr);
 260   }
 261 
 262   template <DecoratorSet ds, typename T>
 263   static typename EnableIf<
 264     HasDecorator<ds, MO_SEQ_CST>::value>::type
 265   store_internal(void* addr, T value);
 266 
 267   template <DecoratorSet ds, typename T>
 268   static typename EnableIf<
 269     HasDecorator<ds, MO_RELEASE>::value>::type
 270   store_internal(void* addr, T value);
 271 
 272   template <DecoratorSet ds, typename T>
 273   static typename EnableIf<
 274     HasDecorator<ds, MO_RELAXED>::value>::type
 275   store_internal(void* addr, T value);
 276 
 277   template <DecoratorSet ds, typename T>
 278   static inline typename EnableIf<
 279     HasDecorator<ds, MO_VOLATILE>::value>::type
 280   store_internal(void* addr, T value) {
 281     (void)const_cast<T&>(*reinterpret_cast<volatile T*>(addr) = value);
 282   }
 283 
 284   template <DecoratorSet ds, typename T>
 285   static inline typename EnableIf<
 286     HasDecorator<ds, MO_UNORDERED>::value>::type
 287   store_internal(void* addr, T value) {
 288     *reinterpret_cast<T*>(addr) = value;
 289   }
 290 
 291   template <DecoratorSet ds, typename T>
 292   static typename EnableIf<
 293     HasDecorator<ds, MO_SEQ_CST>::value, T>::type
 294   atomic_cmpxchg_internal(T new_value, void* addr, T compare_value);
 295 
 296   template <DecoratorSet ds, typename T>
 297   static typename EnableIf<
 298     HasDecorator<ds, MO_RELAXED>::value, T>::type
 299   atomic_cmpxchg_internal(T new_value, void* addr, T compare_value);
 300 
 301   template <DecoratorSet ds, typename T>
 302   static typename EnableIf<
 303     HasDecorator<ds, MO_SEQ_CST>::value, T>::type
 304   atomic_xchg_internal(T new_value, void* addr);
 305 
 306   // The following *_locked mechanisms serve the purpose of handling atomic operations
 307   // that are larger than a machine can handle, and then possibly opt for using
 308   // a slower path using a mutex to perform the operation.
 309 
 310   template <DecoratorSet ds, typename T>
 311   static inline typename EnableIf<
 312     !AccessInternal::PossiblyLockedAccess<T>::value, T>::type
 313   atomic_cmpxchg_maybe_locked(T new_value, void* addr, T compare_value) {
 314     return atomic_cmpxchg_internal<ds>(new_value, addr, compare_value);
 315   }
 316 
 317   template <DecoratorSet ds, typename T>
 318   static typename EnableIf<
 319     AccessInternal::PossiblyLockedAccess<T>::value, T>::type
 320   atomic_cmpxchg_maybe_locked(T new_value, void* addr, T compare_value);
 321 
 322   template <DecoratorSet ds, typename T>
 323   static inline typename EnableIf<
 324     !AccessInternal::PossiblyLockedAccess<T>::value, T>::type
 325   atomic_xchg_maybe_locked(T new_value, void* addr) {
 326     return atomic_xchg_internal<ds>(new_value, addr);
 327   }
 328 
 329   template <DecoratorSet ds, typename T>
 330   static typename EnableIf<
 331     AccessInternal::PossiblyLockedAccess<T>::value, T>::type
 332   atomic_xchg_maybe_locked(T new_value, void* addr);
 333 
 334 public:
 335   template <typename T>
 336   static inline void store(void* addr, T value) {
 337     store_internal<decorators>(addr, value);
 338   }
 339 
 340   template <typename T>
 341   static inline T load(void* addr) {
 342     return load_internal<decorators, T>(addr);
 343   }
 344 
 345   template <typename T>
 346   static inline T atomic_cmpxchg(T new_value, void* addr, T compare_value) {
 347     return atomic_cmpxchg_maybe_locked<decorators>(new_value, addr, compare_value);
 348   }
 349 
 350   template <typename T>
 351   static inline T atomic_xchg(T new_value, void* addr) {
 352     return atomic_xchg_maybe_locked<decorators>(new_value, addr);
 353   }
 354 
 355   template <typename T>
 356   static bool arraycopy(arrayOop src_obj, ptrdiff_t src_offset_in_bytes, const T* src_raw, arrayOop dst_obj, ptrdiff_t dst_offset_in_bytes, T* dst_raw, size_t length);
 357 
 358   template <typename T>
 359   static void oop_store(void* addr, T value);
 360   template <typename T>
 361   static void oop_store_at(oop base, ptrdiff_t offset, T value);
 362 
 363   template <typename T>
 364   static T oop_load(void* addr);
 365   template <typename T>
 366   static T oop_load_at(oop base, ptrdiff_t offset);
 367 
 368   template <typename T>
 369   static T oop_atomic_cmpxchg(T new_value, void* addr, T compare_value);
 370   template <typename T>
 371   static T oop_atomic_cmpxchg_at(T new_value, oop base, ptrdiff_t offset, T compare_value);
 372 
 373   template <typename T>
 374   static T oop_atomic_xchg(T new_value, void* addr);
 375   template <typename T>
 376   static T oop_atomic_xchg_at(T new_value, oop base, ptrdiff_t offset);
 377 
 378   template <typename T>
 379   static void store_at(oop base, ptrdiff_t offset, T value) {
 380     store(field_addr(base, offset), value);
 381   }
 382 
 383   template <typename T>
 384   static T load_at(oop base, ptrdiff_t offset) {
 385     return load<T>(field_addr(base, offset));
 386   }
 387 
 388   template <typename T>
 389   static T atomic_cmpxchg_at(T new_value, oop base, ptrdiff_t offset, T compare_value) {
 390     return atomic_cmpxchg(new_value, field_addr(base, offset), compare_value);
 391   }
 392 
 393   template <typename T>
 394   static T atomic_xchg_at(T new_value, oop base, ptrdiff_t offset) {
 395     return atomic_xchg(new_value, field_addr(base, offset));
 396   }
 397 
 398   template <typename T>
 399   static bool oop_arraycopy(arrayOop src_obj, ptrdiff_t src_offset_in_bytes, const T* src_raw, arrayOop dst_obj, ptrdiff_t dst_offset_in_bytes, T* dst_raw, size_t length);
 400 
 401   static void clone(oop src, oop dst, size_t size);
 402 
 403   static oop resolve(oop obj) { return obj; }
 404 
 405   static bool equals(oop o1, oop o2) { return o1 == o2; }
 406 };
 407 
 408 // Below is the implementation of the first 4 steps of the template pipeline:
 409 // * Step 1: Set default decorators and decay types. This step gets rid of CV qualifiers
 410 //           and sets default decorators to sensible values.
 411 // * Step 2: Reduce types. This step makes sure there is only a single T type and not
 412 //           multiple types. The P type of the address and T type of the value must
 413 //           match.
 414 // * Step 3: Pre-runtime dispatch. This step checks whether a runtime call can be
 415 //           avoided, and in that case avoids it (calling raw accesses or
 416 //           primitive accesses in a build that does not require primitive GC barriers)
 417 // * Step 4: Runtime-dispatch. This step performs a runtime dispatch to the corresponding
 418 //           BarrierSet::AccessBarrier accessor that attaches GC-required barriers
 419 //           to the access.
 420 
 421 namespace AccessInternal {
 422   template <typename T>
 423   struct OopOrNarrowOopInternal: AllStatic {
 424     typedef oop type;
 425   };
 426 
 427   template <>
 428   struct OopOrNarrowOopInternal<narrowOop>: AllStatic {
 429     typedef narrowOop type;
 430   };
 431 
 432   // This metafunction returns a canonicalized oop/narrowOop type for a passed
 433   // in oop-like types passed in from oop_* overloads where the user has sworn
 434   // that the passed in values should be oop-like (e.g. oop, oopDesc*, arrayOop,
 435   // narrowOoop, instanceOopDesc*, and random other things).
 436   // In the oop_* overloads, it must hold that if the passed in type T is not
 437   // narrowOop, then it by contract has to be one of many oop-like types implicitly
 438   // convertible to oop, and hence returns oop as the canonical oop type.
 439   // If it turns out it was not, then the implicit conversion to oop will fail
 440   // to compile, as desired.
 441   template <typename T>
 442   struct OopOrNarrowOop: AllStatic {
 443     typedef typename OopOrNarrowOopInternal<typename Decay<T>::type>::type type;
 444   };
 445 
 446   inline void* field_addr(oop base, ptrdiff_t byte_offset) {
 447     return reinterpret_cast<void*>(reinterpret_cast<intptr_t>((void*)base) + byte_offset);
 448   }
 449   // Step 4: Runtime dispatch
 450   // The RuntimeDispatch class is responsible for performing a runtime dispatch of the
 451   // accessor. This is required when the access either depends on whether compressed oops
 452   // is being used, or it depends on which GC implementation was chosen (e.g. requires GC
 453   // barriers). The way it works is that a function pointer initially pointing to an
 454   // accessor resolution function gets called for each access. Upon first invocation,
 455   // it resolves which accessor to be used in future invocations and patches the
 456   // function pointer to this new accessor.
 457 
 458   template <DecoratorSet decorators, typename T, BarrierType type>
 459   struct RuntimeDispatch: AllStatic {};
 460 
 461   template <DecoratorSet decorators, typename T>
 462   struct RuntimeDispatch<decorators, T, BARRIER_STORE>: AllStatic {
 463     typedef typename AccessFunction<decorators, T, BARRIER_STORE>::type func_t;
 464     static func_t _store_func;
 465 
 466     static void store_init(void* addr, T value);
 467 
 468     static inline void store(void* addr, T value) {
 469       _store_func(addr, value);
 470     }
 471   };
 472 
 473   template <DecoratorSet decorators, typename T>
 474   struct RuntimeDispatch<decorators, T, BARRIER_STORE_AT>: AllStatic {
 475     typedef typename AccessFunction<decorators, T, BARRIER_STORE_AT>::type func_t;
 476     static func_t _store_at_func;
 477 
 478     static void store_at_init(oop base, ptrdiff_t offset, T value);
 479 
 480     static inline void store_at(oop base, ptrdiff_t offset, T value) {
 481       _store_at_func(base, offset, value);
 482     }
 483   };
 484 
 485   template <DecoratorSet decorators, typename T>
 486   struct RuntimeDispatch<decorators, T, BARRIER_LOAD>: AllStatic {
 487     typedef typename AccessFunction<decorators, T, BARRIER_LOAD>::type func_t;
 488     static func_t _load_func;
 489 
 490     static T load_init(void* addr);
 491 
 492     static inline T load(void* addr) {
 493       return _load_func(addr);
 494     }
 495   };
 496 
 497   template <DecoratorSet decorators, typename T>
 498   struct RuntimeDispatch<decorators, T, BARRIER_LOAD_AT>: AllStatic {
 499     typedef typename AccessFunction<decorators, T, BARRIER_LOAD_AT>::type func_t;
 500     static func_t _load_at_func;
 501 
 502     static T load_at_init(oop base, ptrdiff_t offset);
 503 
 504     static inline T load_at(oop base, ptrdiff_t offset) {
 505       return _load_at_func(base, offset);
 506     }
 507   };
 508 
 509   template <DecoratorSet decorators, typename T>
 510   struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG>: AllStatic {
 511     typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG>::type func_t;
 512     static func_t _atomic_cmpxchg_func;
 513 
 514     static T atomic_cmpxchg_init(T new_value, void* addr, T compare_value);
 515 
 516     static inline T atomic_cmpxchg(T new_value, void* addr, T compare_value) {
 517       return _atomic_cmpxchg_func(new_value, addr, compare_value);
 518     }
 519   };
 520 
 521   template <DecoratorSet decorators, typename T>
 522   struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>: AllStatic {
 523     typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::type func_t;
 524     static func_t _atomic_cmpxchg_at_func;
 525 
 526     static T atomic_cmpxchg_at_init(T new_value, oop base, ptrdiff_t offset, T compare_value);
 527 
 528     static inline T atomic_cmpxchg_at(T new_value, oop base, ptrdiff_t offset, T compare_value) {
 529       return _atomic_cmpxchg_at_func(new_value, base, offset, compare_value);
 530     }
 531   };
 532 
 533   template <DecoratorSet decorators, typename T>
 534   struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG>: AllStatic {
 535     typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG>::type func_t;
 536     static func_t _atomic_xchg_func;
 537 
 538     static T atomic_xchg_init(T new_value, void* addr);
 539 
 540     static inline T atomic_xchg(T new_value, void* addr) {
 541       return _atomic_xchg_func(new_value, addr);
 542     }
 543   };
 544 
 545   template <DecoratorSet decorators, typename T>
 546   struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG_AT>: AllStatic {
 547     typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG_AT>::type func_t;
 548     static func_t _atomic_xchg_at_func;
 549 
 550     static T atomic_xchg_at_init(T new_value, oop base, ptrdiff_t offset);
 551 
 552     static inline T atomic_xchg_at(T new_value, oop base, ptrdiff_t offset) {
 553       return _atomic_xchg_at_func(new_value, base, offset);
 554     }
 555   };
 556 
 557   template <DecoratorSet decorators, typename T>
 558   struct RuntimeDispatch<decorators, T, BARRIER_ARRAYCOPY>: AllStatic {
 559     typedef typename AccessFunction<decorators, T, BARRIER_ARRAYCOPY>::type func_t;
 560     static func_t _arraycopy_func;
 561 
 562     static bool arraycopy_init(arrayOop src_obj, ptrdiff_t src_offset_in_bytes, const T* src_raw, arrayOop dst_obj, ptrdiff_t dst_offset_in_bytes, T* dst_raw, size_t length);
 563 
 564     static inline bool arraycopy(arrayOop src_obj, ptrdiff_t src_offset_in_bytes, const T* src_raw, arrayOop dst_obj, ptrdiff_t dst_offset_in_bytes, T* dst_raw, size_t length) {
 565       return _arraycopy_func(src_obj, src_offset_in_bytes, src_raw, dst_obj, dst_offset_in_bytes, dst_raw, length);
 566     }
 567   };
 568 
 569   template <DecoratorSet decorators, typename T>
 570   struct RuntimeDispatch<decorators, T, BARRIER_CLONE>: AllStatic {
 571     typedef typename AccessFunction<decorators, T, BARRIER_CLONE>::type func_t;
 572     static func_t _clone_func;
 573 
 574     static void clone_init(oop src, oop dst, size_t size);
 575 
 576     static inline void clone(oop src, oop dst, size_t size) {
 577       _clone_func(src, dst, size);
 578     }
 579   };
 580 
 581   template <DecoratorSet decorators, typename T>
 582   struct RuntimeDispatch<decorators, T, BARRIER_RESOLVE>: AllStatic {
 583     typedef typename AccessFunction<decorators, T, BARRIER_RESOLVE>::type func_t;
 584     static func_t _resolve_func;
 585 
 586     static oop resolve_init(oop obj);
 587 
 588     static inline oop resolve(oop obj) {
 589       return _resolve_func(obj);
 590     }
 591   };
 592 
 593   template <DecoratorSet decorators, typename T>
 594   struct RuntimeDispatch<decorators, T, BARRIER_EQUALS>: AllStatic {
 595     typedef typename AccessFunction<decorators, T, BARRIER_EQUALS>::type func_t;
 596     static func_t _equals_func;
 597 
 598     static bool equals_init(oop o1, oop o2);
 599 
 600     static inline bool equals(oop o1, oop o2) {
 601       return _equals_func(o1, o2);
 602     }
 603   };
 604 
 605   // Initialize the function pointers to point to the resolving function.
 606   template <DecoratorSet decorators, typename T>
 607   typename AccessFunction<decorators, T, BARRIER_STORE>::type
 608   RuntimeDispatch<decorators, T, BARRIER_STORE>::_store_func = &store_init;
 609 
 610   template <DecoratorSet decorators, typename T>
 611   typename AccessFunction<decorators, T, BARRIER_STORE_AT>::type
 612   RuntimeDispatch<decorators, T, BARRIER_STORE_AT>::_store_at_func = &store_at_init;
 613 
 614   template <DecoratorSet decorators, typename T>
 615   typename AccessFunction<decorators, T, BARRIER_LOAD>::type
 616   RuntimeDispatch<decorators, T, BARRIER_LOAD>::_load_func = &load_init;
 617 
 618   template <DecoratorSet decorators, typename T>
 619   typename AccessFunction<decorators, T, BARRIER_LOAD_AT>::type
 620   RuntimeDispatch<decorators, T, BARRIER_LOAD_AT>::_load_at_func = &load_at_init;
 621 
 622   template <DecoratorSet decorators, typename T>
 623   typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG>::type
 624   RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG>::_atomic_cmpxchg_func = &atomic_cmpxchg_init;
 625 
 626   template <DecoratorSet decorators, typename T>
 627   typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::type
 628   RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::_atomic_cmpxchg_at_func = &atomic_cmpxchg_at_init;
 629 
 630   template <DecoratorSet decorators, typename T>
 631   typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG>::type
 632   RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG>::_atomic_xchg_func = &atomic_xchg_init;
 633 
 634   template <DecoratorSet decorators, typename T>
 635   typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG_AT>::type
 636   RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG_AT>::_atomic_xchg_at_func = &atomic_xchg_at_init;
 637 
 638   template <DecoratorSet decorators, typename T>
 639   typename AccessFunction<decorators, T, BARRIER_ARRAYCOPY>::type
 640   RuntimeDispatch<decorators, T, BARRIER_ARRAYCOPY>::_arraycopy_func = &arraycopy_init;
 641 
 642   template <DecoratorSet decorators, typename T>
 643   typename AccessFunction<decorators, T, BARRIER_CLONE>::type
 644   RuntimeDispatch<decorators, T, BARRIER_CLONE>::_clone_func = &clone_init;
 645 
 646   template <DecoratorSet decorators, typename T>
 647   typename AccessFunction<decorators, T, BARRIER_RESOLVE>::type
 648   RuntimeDispatch<decorators, T, BARRIER_RESOLVE>::_resolve_func = &resolve_init;
 649 
 650   template <DecoratorSet decorators, typename T>
 651   typename AccessFunction<decorators, T, BARRIER_EQUALS>::type
 652   RuntimeDispatch<decorators, T, BARRIER_EQUALS>::_equals_func = &equals_init;
 653 
 654   // Step 3: Pre-runtime dispatching.
 655   // The PreRuntimeDispatch class is responsible for filtering the barrier strength
 656   // decorators. That is, for AS_RAW, it hardwires the accesses without a runtime
 657   // dispatch point. Otherwise it goes through a runtime check if hardwiring was
 658   // not possible.
 659   struct PreRuntimeDispatch: AllStatic {
 660     template<DecoratorSet decorators>
 661     struct CanHardwireRaw: public IntegralConstant<
 662       bool,
 663       !HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value || // primitive access
 664       !HasDecorator<decorators, INTERNAL_CONVERT_COMPRESSED_OOP>::value || // don't care about compressed oops (oop* address)
 665       HasDecorator<decorators, INTERNAL_RT_USE_COMPRESSED_OOPS>::value> // we can infer we use compressed oops (narrowOop* address)
 666     {};
 667 
 668     static const DecoratorSet convert_compressed_oops = INTERNAL_RT_USE_COMPRESSED_OOPS | INTERNAL_CONVERT_COMPRESSED_OOP;
 669 
 670     template<DecoratorSet decorators>
 671     static bool is_hardwired_primitive() {
 672       return !HasDecorator<decorators, INTERNAL_BT_BARRIER_ON_PRIMITIVES>::value &&
 673              !HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value;
 674     }
 675 
 676     template <DecoratorSet decorators, typename T>
 677     inline static typename EnableIf<
 678       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value>::type
 679     store(void* addr, T value) {
 680       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 681       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 682         Raw::oop_store(addr, value);
 683       } else {
 684         Raw::store(addr, value);
 685       }
 686     }
 687 
 688     template <DecoratorSet decorators, typename T>
 689     inline static typename EnableIf<
 690       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value>::type
 691     store(void* addr, T value) {
 692       if (UseCompressedOops) {
 693         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 694         PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 695       } else {
 696         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 697         PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 698       }
 699     }
 700 
 701     template <DecoratorSet decorators, typename T>
 702     inline static typename EnableIf<
 703       !HasDecorator<decorators, AS_RAW>::value>::type
 704     store(void* addr, T value) {
 705       if (is_hardwired_primitive<decorators>()) {
 706         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 707         PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 708       } else {
 709         RuntimeDispatch<decorators, T, BARRIER_STORE>::store(addr, value);
 710       }
 711     }
 712 
 713     template <DecoratorSet decorators, typename T>
 714     inline static typename EnableIf<
 715       HasDecorator<decorators, AS_RAW>::value>::type
 716     store_at(oop base, ptrdiff_t offset, T value) {
 717       store<decorators>(field_addr(base, offset), value);
 718     }
 719 
 720     template <DecoratorSet decorators, typename T>
 721     inline static typename EnableIf<
 722       !HasDecorator<decorators, AS_RAW>::value>::type
 723     store_at(oop base, ptrdiff_t offset, T value) {
 724       if (is_hardwired_primitive<decorators>()) {
 725         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 726         PreRuntimeDispatch::store_at<expanded_decorators>(base, offset, value);
 727       } else {
 728         RuntimeDispatch<decorators, T, BARRIER_STORE_AT>::store_at(base, offset, value);
 729       }
 730     }
 731 
 732     template <DecoratorSet decorators, typename T>
 733     inline static typename EnableIf<
 734       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, T>::type
 735     load(void* addr) {
 736       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 737       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 738         return Raw::template oop_load<T>(addr);
 739       } else {
 740         return Raw::template load<T>(addr);
 741       }
 742     }
 743 
 744     template <DecoratorSet decorators, typename T>
 745     inline static typename EnableIf<
 746       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, T>::type
 747     load(void* addr) {
 748       if (UseCompressedOops) {
 749         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 750         return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
 751       } else {
 752         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 753         return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
 754       }
 755     }
 756 
 757     template <DecoratorSet decorators, typename T>
 758     inline static typename EnableIf<
 759       !HasDecorator<decorators, AS_RAW>::value, T>::type
 760     load(void* addr) {
 761       if (is_hardwired_primitive<decorators>()) {
 762         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 763         return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
 764       } else {
 765         return RuntimeDispatch<decorators, T, BARRIER_LOAD>::load(addr);
 766       }
 767     }
 768 
 769     template <DecoratorSet decorators, typename T>
 770     inline static typename EnableIf<
 771       HasDecorator<decorators, AS_RAW>::value, T>::type
 772     load_at(oop base, ptrdiff_t offset) {
 773       return load<decorators, T>(field_addr(base, offset));
 774     }
 775 
 776     template <DecoratorSet decorators, typename T>
 777     inline static typename EnableIf<
 778       !HasDecorator<decorators, AS_RAW>::value, T>::type
 779     load_at(oop base, ptrdiff_t offset) {
 780       if (is_hardwired_primitive<decorators>()) {
 781         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 782         return PreRuntimeDispatch::load_at<expanded_decorators, T>(base, offset);
 783       } else {
 784         return RuntimeDispatch<decorators, T, BARRIER_LOAD_AT>::load_at(base, offset);
 785       }
 786     }
 787 
 788     template <DecoratorSet decorators, typename T>
 789     inline static typename EnableIf<
 790       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, T>::type
 791     atomic_cmpxchg(T new_value, void* addr, T compare_value) {
 792       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 793       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 794         return Raw::oop_atomic_cmpxchg(new_value, addr, compare_value);
 795       } else {
 796         return Raw::atomic_cmpxchg(new_value, addr, compare_value);
 797       }
 798     }
 799 
 800     template <DecoratorSet decorators, typename T>
 801     inline static typename EnableIf<
 802       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, T>::type
 803     atomic_cmpxchg(T new_value, void* addr, T compare_value) {
 804       if (UseCompressedOops) {
 805         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 806         return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
 807       } else {
 808         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 809         return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
 810       }
 811     }
 812 
 813     template <DecoratorSet decorators, typename T>
 814     inline static typename EnableIf<
 815       !HasDecorator<decorators, AS_RAW>::value, T>::type
 816     atomic_cmpxchg(T new_value, void* addr, T compare_value) {
 817       if (is_hardwired_primitive<decorators>()) {
 818         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 819         return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
 820       } else {
 821         return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG>::atomic_cmpxchg(new_value, addr, compare_value);
 822       }
 823     }
 824 
 825     template <DecoratorSet decorators, typename T>
 826     inline static typename EnableIf<
 827       HasDecorator<decorators, AS_RAW>::value, T>::type
 828     atomic_cmpxchg_at(T new_value, oop base, ptrdiff_t offset, T compare_value) {
 829       return atomic_cmpxchg<decorators>(new_value, field_addr(base, offset), compare_value);
 830     }
 831 
 832     template <DecoratorSet decorators, typename T>
 833     inline static typename EnableIf<
 834       !HasDecorator<decorators, AS_RAW>::value, T>::type
 835     atomic_cmpxchg_at(T new_value, oop base, ptrdiff_t offset, T compare_value) {
 836       if (is_hardwired_primitive<decorators>()) {
 837         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 838         return PreRuntimeDispatch::atomic_cmpxchg_at<expanded_decorators>(new_value, base, offset, compare_value);
 839       } else {
 840         return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::atomic_cmpxchg_at(new_value, base, offset, compare_value);
 841       }
 842     }
 843 
 844     template <DecoratorSet decorators, typename T>
 845     inline static typename EnableIf<
 846       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, T>::type
 847     atomic_xchg(T new_value, void* addr) {
 848       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 849       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 850         return Raw::oop_atomic_xchg(new_value, addr);
 851       } else {
 852         return Raw::atomic_xchg(new_value, addr);
 853       }
 854     }
 855 
 856     template <DecoratorSet decorators, typename T>
 857     inline static typename EnableIf<
 858       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, T>::type
 859     atomic_xchg(T new_value, void* addr) {
 860       if (UseCompressedOops) {
 861         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 862         return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
 863       } else {
 864         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 865         return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
 866       }
 867     }
 868 
 869     template <DecoratorSet decorators, typename T>
 870     inline static typename EnableIf<
 871       !HasDecorator<decorators, AS_RAW>::value, T>::type
 872     atomic_xchg(T new_value, void* addr) {
 873       if (is_hardwired_primitive<decorators>()) {
 874         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 875         return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
 876       } else {
 877         return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG>::atomic_xchg(new_value, addr);
 878       }
 879     }
 880 
 881     template <DecoratorSet decorators, typename T>
 882     inline static typename EnableIf<
 883       HasDecorator<decorators, AS_RAW>::value, T>::type
 884     atomic_xchg_at(T new_value, oop base, ptrdiff_t offset) {
 885       return atomic_xchg<decorators>(new_value, field_addr(base, offset));
 886     }
 887 
 888     template <DecoratorSet decorators, typename T>
 889     inline static typename EnableIf<
 890       !HasDecorator<decorators, AS_RAW>::value, T>::type
 891     atomic_xchg_at(T new_value, oop base, ptrdiff_t offset) {
 892       if (is_hardwired_primitive<decorators>()) {
 893         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 894         return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, base, offset);
 895       } else {
 896         return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG_AT>::atomic_xchg_at(new_value, base, offset);
 897       }
 898     }
 899 
 900     template <DecoratorSet decorators, typename T>
 901     inline static typename EnableIf<
 902       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, bool>::type
 903     arraycopy(arrayOop src_obj, ptrdiff_t src_offset_in_bytes, const T* src_raw, arrayOop dst_obj, ptrdiff_t dst_offset_in_bytes, T* dst_raw, size_t length) {
 904       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 905       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 906         return Raw::oop_arraycopy(src_obj, src_offset_in_bytes, src_raw, dst_obj, dst_offset_in_bytes, dst_raw, length);
 907       } else {
 908         return Raw::arraycopy(src_obj, src_offset_in_bytes, src_raw, dst_obj, dst_offset_in_bytes, dst_raw, length);
 909       }
 910     }
 911 
 912     template <DecoratorSet decorators, typename T>
 913     inline static typename EnableIf<
 914       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, bool>::type
 915     arraycopy(arrayOop src_obj, ptrdiff_t src_offset_in_bytes, const T* src_raw, arrayOop dst_obj, ptrdiff_t dst_offset_in_bytes, T* dst_raw, size_t length) {
 916       if (UseCompressedOops) {
 917         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 918         return PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw, dst_obj, dst_offset_in_bytes, dst_raw, length);
 919       } else {
 920         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 921         return PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw, dst_obj, dst_offset_in_bytes, dst_raw, length);
 922       }
 923     }
 924 
 925     template <DecoratorSet decorators, typename T>
 926     inline static typename EnableIf<
 927       !HasDecorator<decorators, AS_RAW>::value, bool>::type
 928     arraycopy(arrayOop src_obj, ptrdiff_t src_offset_in_bytes, const T* src_raw, arrayOop dst_obj, ptrdiff_t dst_offset_in_bytes, T* dst_raw, size_t length) {
 929       if (is_hardwired_primitive<decorators>()) {
 930         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 931         return PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw, dst_obj, dst_offset_in_bytes, dst_raw, length);
 932       } else {
 933         return RuntimeDispatch<decorators, T, BARRIER_ARRAYCOPY>::arraycopy(src_obj, src_offset_in_bytes, src_raw, dst_obj, dst_offset_in_bytes, dst_raw, length);
 934       }
 935     }
 936 
 937     template <DecoratorSet decorators>
 938     inline static typename EnableIf<
 939       HasDecorator<decorators, AS_RAW>::value>::type
 940     clone(oop src, oop dst, size_t size) {
 941       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 942       Raw::clone(src, dst, size);
 943     }
 944 
 945     template <DecoratorSet decorators>
 946     inline static typename EnableIf<
 947       !HasDecorator<decorators, AS_RAW>::value>::type
 948     clone(oop src, oop dst, size_t size) {
 949       RuntimeDispatch<decorators, oop, BARRIER_CLONE>::clone(src, dst, size);
 950     }
 951 
 952     template <DecoratorSet decorators>
 953     inline static typename EnableIf<
 954       HasDecorator<decorators, INTERNAL_BT_TO_SPACE_INVARIANT>::value, oop>::type
 955     resolve(oop obj) {
 956       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 957       return Raw::resolve(obj);
 958     }
 959 
 960     template <DecoratorSet decorators>
 961     inline static typename EnableIf<
 962       !HasDecorator<decorators, INTERNAL_BT_TO_SPACE_INVARIANT>::value, oop>::type
 963     resolve(oop obj) {
 964       return RuntimeDispatch<decorators, oop, BARRIER_RESOLVE>::resolve(obj);
 965     }
 966 
 967     template <DecoratorSet decorators>
 968     inline static typename EnableIf<
 969       HasDecorator<decorators, INTERNAL_BT_TO_SPACE_INVARIANT>::value, bool>::type
 970     equals(oop o1, oop o2) {
 971       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 972       return Raw::equals(o1, o2);
 973     }
 974 
 975     template <DecoratorSet decorators>
 976     inline static typename EnableIf<
 977       !HasDecorator<decorators, INTERNAL_BT_TO_SPACE_INVARIANT>::value, bool>::type
 978     equals(oop o1, oop o2) {
 979       return RuntimeDispatch<decorators, oop, BARRIER_EQUALS>::equals(o1, o2);
 980     }
 981   };
 982 
 983   // This class adds implied decorators that follow according to decorator rules.
 984   // For example adding default reference strength and default memory ordering
 985   // semantics.
 986   template <DecoratorSet input_decorators>
 987   struct DecoratorFixup: AllStatic {
 988     // If no reference strength has been picked, then strong will be picked
 989     static const DecoratorSet ref_strength_default = input_decorators |
 990       (((ON_DECORATOR_MASK & input_decorators) == 0 && (INTERNAL_VALUE_IS_OOP & input_decorators) != 0) ?
 991        ON_STRONG_OOP_REF : INTERNAL_EMPTY);
 992     // If no memory ordering has been picked, unordered will be picked
 993     static const DecoratorSet memory_ordering_default = ref_strength_default |
 994       ((MO_DECORATOR_MASK & ref_strength_default) == 0 ? MO_UNORDERED : INTERNAL_EMPTY);
 995     // If no barrier strength has been picked, normal will be used
 996     static const DecoratorSet barrier_strength_default = memory_ordering_default |
 997       ((AS_DECORATOR_MASK & memory_ordering_default) == 0 ? AS_NORMAL : INTERNAL_EMPTY);
 998     // Heap array accesses imply it is a heap access
 999     static const DecoratorSet heap_array_is_in_heap = barrier_strength_default |
1000       ((IN_HEAP_ARRAY & barrier_strength_default) != 0 ? IN_HEAP : INTERNAL_EMPTY);
1001     static const DecoratorSet conc_root_is_root = heap_array_is_in_heap |
1002       ((IN_CONCURRENT_ROOT & heap_array_is_in_heap) != 0 ? IN_ROOT : INTERNAL_EMPTY);
1003     static const DecoratorSet archive_root_is_root = conc_root_is_root |
1004       ((IN_ARCHIVE_ROOT & conc_root_is_root) != 0 ? IN_ROOT : INTERNAL_EMPTY);
1005     static const DecoratorSet value = archive_root_is_root | BT_BUILDTIME_DECORATORS;
1006   };
1007 
1008   // Step 2: Reduce types.
1009   // Enforce that for non-oop types, T and P have to be strictly the same.
1010   // P is the type of the address and T is the type of the values.
1011   // As for oop types, it is allow to send T in {narrowOop, oop} and
1012   // P in {narrowOop, oop, HeapWord*}. The following rules apply according to
1013   // the subsequent table. (columns are P, rows are T)
1014   // |           | HeapWord  |   oop   | narrowOop |
1015   // |   oop     |  rt-comp  | hw-none |  hw-comp  |
1016   // | narrowOop |     x     |    x    |  hw-none  |
1017   //
1018   // x means not allowed
1019   // rt-comp means it must be checked at runtime whether the oop is compressed.
1020   // hw-none means it is statically known the oop will not be compressed.
1021   // hw-comp means it is statically known the oop will be compressed.
1022 
1023   template <DecoratorSet decorators, typename T>
1024   inline void store_reduce_types(T* addr, T value) {
1025     PreRuntimeDispatch::store<decorators>(addr, value);
1026   }
1027 
1028   template <DecoratorSet decorators>
1029   inline void store_reduce_types(narrowOop* addr, oop value) {
1030     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1031                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
1032     PreRuntimeDispatch::store<expanded_decorators>(addr, value);
1033   }
1034 
1035   template <DecoratorSet decorators>
1036   inline void store_reduce_types(narrowOop* addr, narrowOop value) {
1037     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1038                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
1039     PreRuntimeDispatch::store<expanded_decorators>(addr, value);
1040   }
1041 
1042   template <DecoratorSet decorators>
1043   inline void store_reduce_types(HeapWord* addr, oop value) {
1044     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1045     PreRuntimeDispatch::store<expanded_decorators>(addr, value);
1046   }
1047 
1048   template <DecoratorSet decorators, typename T>
1049   inline T atomic_cmpxchg_reduce_types(T new_value, T* addr, T compare_value) {
1050     return PreRuntimeDispatch::atomic_cmpxchg<decorators>(new_value, addr, compare_value);
1051   }
1052 
1053   template <DecoratorSet decorators>
1054   inline oop atomic_cmpxchg_reduce_types(oop new_value, narrowOop* addr, oop compare_value) {
1055     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1056                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
1057     return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
1058   }
1059 
1060   template <DecoratorSet decorators>
1061   inline narrowOop atomic_cmpxchg_reduce_types(narrowOop new_value, narrowOop* addr, narrowOop compare_value) {
1062     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1063                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
1064     return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
1065   }
1066 
1067   template <DecoratorSet decorators>
1068   inline oop atomic_cmpxchg_reduce_types(oop new_value,
1069                                          HeapWord* addr,
1070                                          oop compare_value) {
1071     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1072     return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
1073   }
1074 
1075   template <DecoratorSet decorators, typename T>
1076   inline T atomic_xchg_reduce_types(T new_value, T* addr) {
1077     const DecoratorSet expanded_decorators = decorators;
1078     return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
1079   }
1080 
1081   template <DecoratorSet decorators>
1082   inline oop atomic_xchg_reduce_types(oop new_value, narrowOop* addr) {
1083     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1084                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
1085     return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
1086   }
1087 
1088   template <DecoratorSet decorators>
1089   inline narrowOop atomic_xchg_reduce_types(narrowOop new_value, narrowOop* addr) {
1090     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1091                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
1092     return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
1093   }
1094 
1095   template <DecoratorSet decorators>
1096   inline oop atomic_xchg_reduce_types(oop new_value, HeapWord* addr) {
1097     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1098     return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
1099   }
1100 
1101   template <DecoratorSet decorators, typename T>
1102   inline T load_reduce_types(T* addr) {
1103     return PreRuntimeDispatch::load<decorators, T>(addr);
1104   }
1105 
1106   template <DecoratorSet decorators, typename T>
1107   inline typename OopOrNarrowOop<T>::type load_reduce_types(narrowOop* addr) {
1108     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1109                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
1110     return PreRuntimeDispatch::load<expanded_decorators, typename OopOrNarrowOop<T>::type>(addr);
1111   }
1112 
1113   template <DecoratorSet decorators, typename T>
1114   inline oop load_reduce_types(HeapWord* addr) {
1115     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1116     return PreRuntimeDispatch::load<expanded_decorators, oop>(addr);
1117   }
1118 
1119   template <DecoratorSet decorators, typename T>
1120   inline bool arraycopy_reduce_types(arrayOop src_obj, ptrdiff_t src_offset_in_bytes, const T* src_raw, arrayOop dst_obj, ptrdiff_t dst_offset_in_bytes, T* dst_raw, size_t length) {
1121     return PreRuntimeDispatch::arraycopy<decorators>(src_obj, src_offset_in_bytes, src_raw, dst_obj, dst_offset_in_bytes, dst_raw, length);
1122   }
1123 
1124   template <DecoratorSet decorators>
1125   inline bool arraycopy_reduce_types(arrayOop src_obj, ptrdiff_t src_offset_in_bytes, const HeapWord* src_raw, arrayOop dst_obj, ptrdiff_t dst_offset_in_bytes, HeapWord* dst_raw, size_t length) {
1126     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1127     return PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw, dst_obj, dst_offset_in_bytes, dst_raw, length);
1128   }
1129 
1130   template <DecoratorSet decorators>
1131   inline bool arraycopy_reduce_types(arrayOop src_obj, ptrdiff_t src_offset_in_bytes, const narrowOop* src_raw, arrayOop dst_obj, ptrdiff_t dst_offset_in_bytes, narrowOop* dst_raw, size_t length) {
1132     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1133                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
1134     return PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw, dst_obj, dst_offset_in_bytes, dst_raw, length);
1135   }
1136 
1137   // Step 1: Set default decorators. This step remembers if a type was volatile
1138   // and then sets the MO_VOLATILE decorator by default. Otherwise, a default
1139   // memory ordering is set for the access, and the implied decorator rules
1140   // are applied to select sensible defaults for decorators that have not been
1141   // explicitly set. For example, default object referent strength is set to strong.
1142   // This step also decays the types passed in (e.g. getting rid of CV qualifiers
1143   // and references from the types). This step also perform some type verification
1144   // that the passed in types make sense.
1145 
1146   template <DecoratorSet decorators, typename T>
1147   static void verify_types(){
1148     // If this fails to compile, then you have sent in something that is
1149     // not recognized as a valid primitive type to a primitive Access function.
1150     STATIC_ASSERT((HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value || // oops have already been validated
1151                    (IsPointer<T>::value || IsIntegral<T>::value) ||
1152                     IsFloatingPoint<T>::value)); // not allowed primitive type
1153   }
1154 
1155   template <DecoratorSet decorators, typename P, typename T>
1156   inline void store(P* addr, T value) {
1157     verify_types<decorators, T>();
1158     typedef typename Decay<P>::type DecayedP;
1159     typedef typename Decay<T>::type DecayedT;
1160     DecayedT decayed_value = value;
1161     // If a volatile address is passed in but no memory ordering decorator,
1162     // set the memory ordering to MO_VOLATILE by default.
1163     const DecoratorSet expanded_decorators = DecoratorFixup<
1164       (IsVolatile<P>::value && !HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1165       (MO_VOLATILE | decorators) : decorators>::value;
1166     store_reduce_types<expanded_decorators>(const_cast<DecayedP*>(addr), decayed_value);
1167   }
1168 
1169   template <DecoratorSet decorators, typename T>
1170   inline void store_at(oop base, ptrdiff_t offset, T value) {
1171     verify_types<decorators, T>();
1172     typedef typename Decay<T>::type DecayedT;
1173     DecayedT decayed_value = value;
1174     const DecoratorSet expanded_decorators = DecoratorFixup<decorators |
1175                                              (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1176                                               INTERNAL_CONVERT_COMPRESSED_OOP : INTERNAL_EMPTY)>::value;
1177     PreRuntimeDispatch::store_at<expanded_decorators>(base, offset, decayed_value);
1178   }
1179 
1180   template <DecoratorSet decorators, typename P, typename T>
1181   inline T load(P* addr) {
1182     verify_types<decorators, T>();
1183     typedef typename Decay<P>::type DecayedP;
1184     typedef typename Conditional<HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value,
1185                                  typename OopOrNarrowOop<T>::type,
1186                                  typename Decay<T>::type>::type DecayedT;
1187     // If a volatile address is passed in but no memory ordering decorator,
1188     // set the memory ordering to MO_VOLATILE by default.
1189     const DecoratorSet expanded_decorators = DecoratorFixup<
1190       (IsVolatile<P>::value && !HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1191       (MO_VOLATILE | decorators) : decorators>::value;
1192     return load_reduce_types<expanded_decorators, DecayedT>(const_cast<DecayedP*>(addr));
1193   }
1194 
1195   template <DecoratorSet decorators, typename T>
1196   inline T load_at(oop base, ptrdiff_t offset) {
1197     verify_types<decorators, T>();
1198     typedef typename Conditional<HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value,
1199                                  typename OopOrNarrowOop<T>::type,
1200                                  typename Decay<T>::type>::type DecayedT;
1201     // Expand the decorators (figure out sensible defaults)
1202     // Potentially remember if we need compressed oop awareness
1203     const DecoratorSet expanded_decorators = DecoratorFixup<decorators |
1204                                              (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1205                                               INTERNAL_CONVERT_COMPRESSED_OOP : INTERNAL_EMPTY)>::value;
1206     return PreRuntimeDispatch::load_at<expanded_decorators, DecayedT>(base, offset);
1207   }
1208 
1209   template <DecoratorSet decorators, typename P, typename T>
1210   inline T atomic_cmpxchg(T new_value, P* addr, T compare_value) {
1211     verify_types<decorators, T>();
1212     typedef typename Decay<P>::type DecayedP;
1213     typedef typename Decay<T>::type DecayedT;
1214     DecayedT new_decayed_value = new_value;
1215     DecayedT compare_decayed_value = compare_value;
1216     const DecoratorSet expanded_decorators = DecoratorFixup<
1217       (!HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1218       (MO_SEQ_CST | decorators) : decorators>::value;
1219     return atomic_cmpxchg_reduce_types<expanded_decorators>(new_decayed_value,
1220                                                             const_cast<DecayedP*>(addr),
1221                                                             compare_decayed_value);
1222   }
1223 
1224   template <DecoratorSet decorators, typename T>
1225   inline T atomic_cmpxchg_at(T new_value, oop base, ptrdiff_t offset, T compare_value) {
1226     verify_types<decorators, T>();
1227     typedef typename Decay<T>::type DecayedT;
1228     DecayedT new_decayed_value = new_value;
1229     DecayedT compare_decayed_value = compare_value;
1230     // Determine default memory ordering
1231     const DecoratorSet expanded_decorators = DecoratorFixup<
1232       (!HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1233       (MO_SEQ_CST | decorators) : decorators>::value;
1234     // Potentially remember that we need compressed oop awareness
1235     const DecoratorSet final_decorators = expanded_decorators |
1236                                           (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1237                                            INTERNAL_CONVERT_COMPRESSED_OOP : INTERNAL_EMPTY);
1238     return PreRuntimeDispatch::atomic_cmpxchg_at<final_decorators>(new_decayed_value, base,
1239                                                                    offset, compare_decayed_value);
1240   }
1241 
1242   template <DecoratorSet decorators, typename P, typename T>
1243   inline T atomic_xchg(T new_value, P* addr) {
1244     verify_types<decorators, T>();
1245     typedef typename Decay<P>::type DecayedP;
1246     typedef typename Decay<T>::type DecayedT;
1247     DecayedT new_decayed_value = new_value;
1248     // atomic_xchg is only available in SEQ_CST flavour.
1249     const DecoratorSet expanded_decorators = DecoratorFixup<decorators | MO_SEQ_CST>::value;
1250     return atomic_xchg_reduce_types<expanded_decorators>(new_decayed_value,
1251                                                          const_cast<DecayedP*>(addr));
1252   }
1253 
1254   template <DecoratorSet decorators, typename T>
1255   inline T atomic_xchg_at(T new_value, oop base, ptrdiff_t offset) {
1256     verify_types<decorators, T>();
1257     typedef typename Decay<T>::type DecayedT;
1258     DecayedT new_decayed_value = new_value;
1259     // atomic_xchg is only available in SEQ_CST flavour.
1260     const DecoratorSet expanded_decorators = DecoratorFixup<decorators | MO_SEQ_CST |
1261                                              (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1262                                               INTERNAL_CONVERT_COMPRESSED_OOP : INTERNAL_EMPTY)>::value;
1263     return PreRuntimeDispatch::atomic_xchg_at<expanded_decorators>(new_decayed_value, base, offset);
1264   }
1265 
1266   template <DecoratorSet decorators, typename T>
1267   inline bool arraycopy(arrayOop src_obj, ptrdiff_t src_offset_in_bytes, const T* src_raw, arrayOop dst_obj, ptrdiff_t dst_offset_in_bytes, T* dst_raw, size_t length) {
1268     STATIC_ASSERT((HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ||
1269                    (IsSame<T, void>::value || IsIntegral<T>::value) ||
1270                     IsFloatingPoint<T>::value)); // arraycopy allows type erased void elements
1271     typedef typename Decay<T>::type DecayedT;
1272     const DecoratorSet expanded_decorators = DecoratorFixup<decorators | IN_HEAP_ARRAY | IN_HEAP>::value;
1273     return arraycopy_reduce_types<expanded_decorators>(src_obj, src_offset_in_bytes, const_cast<const DecayedT*>(src_raw),
1274                                                        dst_obj, dst_offset_in_bytes, const_cast<DecayedT*>(dst_raw),
1275                                                        length);
1276   }
1277 
1278   template <DecoratorSet decorators>
1279   inline void clone(oop src, oop dst, size_t size) {
1280     const DecoratorSet expanded_decorators = DecoratorFixup<decorators>::value;
1281     PreRuntimeDispatch::clone<expanded_decorators>(src, dst, size);
1282   }
1283 
1284   template <DecoratorSet decorators>
1285   inline oop resolve(oop obj) {
1286     const DecoratorSet expanded_decorators = DecoratorFixup<decorators>::value;
1287     return PreRuntimeDispatch::resolve<expanded_decorators>(obj);
1288   }
1289 
1290   template <DecoratorSet decorators>
1291   inline bool equals(oop o1, oop o2) {
1292     const DecoratorSet expanded_decorators = DecoratorFixup<decorators>::value;
1293     return PreRuntimeDispatch::equals<expanded_decorators>(o1, o2);
1294   }
1295 
1296   // Infer the type that should be returned from an Access::oop_load.
1297   template <typename P, DecoratorSet decorators>
1298   class OopLoadProxy: public StackObj {
1299   private:
1300     P *const _addr;
1301   public:
1302     OopLoadProxy(P* addr) : _addr(addr) {}
1303 
1304     inline operator oop() {
1305       return load<decorators | INTERNAL_VALUE_IS_OOP, P, oop>(_addr);
1306     }
1307 
1308     inline operator narrowOop() {
1309       return load<decorators | INTERNAL_VALUE_IS_OOP, P, narrowOop>(_addr);
1310     }
1311 
1312     template <typename T>
1313     inline bool operator ==(const T& other) const {
1314       return load<decorators | INTERNAL_VALUE_IS_OOP, P, T>(_addr) == other;
1315     }
1316 
1317     template <typename T>
1318     inline bool operator !=(const T& other) const {
1319       return load<decorators | INTERNAL_VALUE_IS_OOP, P, T>(_addr) != other;
1320     }
1321   };
1322 
1323   // Infer the type that should be returned from an Access::load_at.
1324   template <DecoratorSet decorators>
1325   class LoadAtProxy: public StackObj {
1326   private:
1327     const oop _base;
1328     const ptrdiff_t _offset;
1329   public:
1330     LoadAtProxy(oop base, ptrdiff_t offset) : _base(base), _offset(offset) {}
1331 
1332     template <typename T>
1333     inline operator T() const {
1334       return load_at<decorators, T>(_base, _offset);
1335     }
1336 
1337     template <typename T>
1338     inline bool operator ==(const T& other) const { return load_at<decorators, T>(_base, _offset) == other; }
1339 
1340     template <typename T>
1341     inline bool operator !=(const T& other) const { return load_at<decorators, T>(_base, _offset) != other; }
1342   };
1343 
1344   // Infer the type that should be returned from an Access::oop_load_at.
1345   template <DecoratorSet decorators>
1346   class OopLoadAtProxy: public StackObj {
1347   private:
1348     const oop _base;
1349     const ptrdiff_t _offset;
1350   public:
1351     OopLoadAtProxy(oop base, ptrdiff_t offset) : _base(base), _offset(offset) {}
1352 
1353     inline operator oop() const {
1354       return load_at<decorators | INTERNAL_VALUE_IS_OOP, oop>(_base, _offset);
1355     }
1356 
1357     inline operator narrowOop() const {
1358       return load_at<decorators | INTERNAL_VALUE_IS_OOP, narrowOop>(_base, _offset);
1359     }
1360 
1361     template <typename T>
1362     inline bool operator ==(const T& other) const {
1363       return load_at<decorators | INTERNAL_VALUE_IS_OOP, T>(_base, _offset) == other;
1364     }
1365 
1366     template <typename T>
1367     inline bool operator !=(const T& other) const {
1368       return load_at<decorators | INTERNAL_VALUE_IS_OOP, T>(_base, _offset) != other;
1369     }
1370   };
1371 }
1372 
1373 #endif // SHARE_OOPS_ACCESSBACKEND_HPP