1 /*
   2  * Copyright (c) 2017, 2018, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #ifndef SHARE_VM_RUNTIME_ACCESS_INLINE_HPP
  26 #define SHARE_VM_RUNTIME_ACCESS_INLINE_HPP
  27 
  28 #include "gc/shared/barrierSetConfig.inline.hpp"
  29 #include "metaprogramming/conditional.hpp"
  30 #include "metaprogramming/isFloatingPoint.hpp"
  31 #include "metaprogramming/isIntegral.hpp"
  32 #include "metaprogramming/isPointer.hpp"
  33 #include "metaprogramming/isVolatile.hpp"
  34 #include "oops/access.hpp"
  35 #include "oops/accessBackend.inline.hpp"
  36 #include "runtime/atomic.hpp"
  37 #include "runtime/orderAccess.inline.hpp"
  38 
  39 // This file outlines the template pipeline of accesses going through the Access
  40 // API. There are essentially 5 steps for each access.
  41 // * Step 1: Set default decorators and decay types. This step gets rid of CV qualifiers
  42 //           and sets default decorators to sensible values.
  43 // * Step 2: Reduce types. This step makes sure there is only a single T type and not
  44 //           multiple types. The P type of the address and T type of the value must
  45 //           match.
  46 // * Step 3: Pre-runtime dispatch. This step checks whether a runtime call can be
  47 //           avoided, and in that case avoids it (calling raw accesses or
  48 //           primitive accesses in a build that does not require primitive GC barriers)
  49 // * Step 4: Runtime-dispatch. This step performs a runtime dispatch to the corresponding
  50 //           BarrierSet::AccessBarrier accessor that attaches GC-required barriers
  51 //           to the access.
  52 // * Step 5: Post-runtime dispatch. This step now casts previously unknown types such
  53 //           as the address type of an oop on the heap (is it oop* or narrowOop*) to
  54 //           the appropriate type. It also splits sufficiently orthogonal accesses into
  55 //           different functions, such as whether the access involves oops or primitives
  56 //           and whether the access is performed on the heap or outside. Then the
  57 //           appropriate BarrierSet::AccessBarrier is called to perform the access.
  58 
  59 namespace AccessInternal {
  60 
  61   // Step 5: Post-runtime dispatch.
  62   // This class is the last step before calling the BarrierSet::AccessBarrier.
  63   // Here we make sure to figure out types that were not known prior to the
  64   // runtime dispatch, such as whether an oop on the heap is oop or narrowOop.
  65   // We also split orthogonal barriers such as handling primitives vs oops
  66   // and on-heap vs off-heap into different calls to the barrier set.
  67   template <class GCBarrierType, BarrierType type, DecoratorSet decorators>
  68   struct PostRuntimeDispatch: public AllStatic { };
  69 
  70   template <class GCBarrierType, DecoratorSet decorators>
  71   struct PostRuntimeDispatch<GCBarrierType, BARRIER_STORE, decorators>: public AllStatic {
  72     template <typename T>
  73     static void access_barrier(void* addr, T value) {
  74       GCBarrierType::store_in_heap(reinterpret_cast<T*>(addr), value);
  75     }
  76 
  77     static void oop_access_barrier(void* addr, oop value) {
  78       typedef typename HeapOopType<decorators>::type OopType;
  79       if (HasDecorator<decorators, IN_HEAP>::value) {
  80         GCBarrierType::oop_store_in_heap(reinterpret_cast<OopType*>(addr), value);
  81       } else {
  82         GCBarrierType::oop_store_not_in_heap(reinterpret_cast<OopType*>(addr), value);
  83       }
  84     }
  85   };
  86 
  87   template <class GCBarrierType, DecoratorSet decorators>
  88   struct PostRuntimeDispatch<GCBarrierType, BARRIER_LOAD, decorators>: public AllStatic {
  89     template <typename T>
  90     static T access_barrier(void* addr) {
  91       return GCBarrierType::load_in_heap(reinterpret_cast<T*>(addr));
  92     }
  93 
  94     static oop oop_access_barrier(void* addr) {
  95       typedef typename HeapOopType<decorators>::type OopType;
  96       if (HasDecorator<decorators, IN_HEAP>::value) {
  97         return GCBarrierType::oop_load_in_heap(reinterpret_cast<OopType*>(addr));
  98       } else {
  99         return GCBarrierType::oop_load_not_in_heap(reinterpret_cast<OopType*>(addr));
 100       }
 101     }
 102   };
 103 
 104   template <class GCBarrierType, DecoratorSet decorators>
 105   struct PostRuntimeDispatch<GCBarrierType, BARRIER_ATOMIC_XCHG, decorators>: public AllStatic {
 106     template <typename T>
 107     static T access_barrier(T new_value, void* addr) {
 108       return GCBarrierType::atomic_xchg_in_heap(new_value, reinterpret_cast<T*>(addr));
 109     }
 110 
 111     static oop oop_access_barrier(oop new_value, void* addr) {
 112       typedef typename HeapOopType<decorators>::type OopType;
 113       if (HasDecorator<decorators, IN_HEAP>::value) {
 114         return GCBarrierType::oop_atomic_xchg_in_heap(new_value, reinterpret_cast<OopType*>(addr));
 115       } else {
 116         return GCBarrierType::oop_atomic_xchg_not_in_heap(new_value, reinterpret_cast<OopType*>(addr));
 117       }
 118     }
 119   };
 120 
 121   template <class GCBarrierType, DecoratorSet decorators>
 122   struct PostRuntimeDispatch<GCBarrierType, BARRIER_ATOMIC_CMPXCHG, decorators>: public AllStatic {
 123     template <typename T>
 124     static T access_barrier(T new_value, void* addr, T compare_value) {
 125       return GCBarrierType::atomic_cmpxchg_in_heap(new_value, reinterpret_cast<T*>(addr), compare_value);
 126     }
 127 
 128     static oop oop_access_barrier(oop new_value, void* addr, oop compare_value) {
 129       typedef typename HeapOopType<decorators>::type OopType;
 130       if (HasDecorator<decorators, IN_HEAP>::value) {
 131         return GCBarrierType::oop_atomic_cmpxchg_in_heap(new_value, reinterpret_cast<OopType*>(addr), compare_value);
 132       } else {
 133         return GCBarrierType::oop_atomic_cmpxchg_not_in_heap(new_value, reinterpret_cast<OopType*>(addr), compare_value);
 134       }
 135     }
 136   };
 137 
 138   template <class GCBarrierType, DecoratorSet decorators>
 139   struct PostRuntimeDispatch<GCBarrierType, BARRIER_ARRAYCOPY, decorators>: public AllStatic {
 140     template <typename T>
 141     static bool access_barrier(arrayOop src_obj, arrayOop dst_obj, T* src, T* dst, size_t length) {
 142       GCBarrierType::arraycopy_in_heap(src_obj, dst_obj, src, dst, length);
 143       return true;
 144     }
 145 
 146     template <typename T>
 147     static bool oop_access_barrier(arrayOop src_obj, arrayOop dst_obj, T* src, T* dst, size_t length) {
 148       typedef typename HeapOopType<decorators>::type OopType;
 149       return GCBarrierType::oop_arraycopy_in_heap(src_obj, dst_obj,
 150                                                   reinterpret_cast<OopType*>(src),
 151                                                   reinterpret_cast<OopType*>(dst), length);
 152     }
 153   };
 154 
 155   template <class GCBarrierType, DecoratorSet decorators>
 156   struct PostRuntimeDispatch<GCBarrierType, BARRIER_STORE_AT, decorators>: public AllStatic {
 157     template <typename T>
 158     static void access_barrier(oop base, ptrdiff_t offset, T value) {
 159       GCBarrierType::store_in_heap_at(base, offset, value);
 160     }
 161 
 162     static void oop_access_barrier(oop base, ptrdiff_t offset, oop value) {
 163       GCBarrierType::oop_store_in_heap_at(base, offset, value);
 164     }
 165   };
 166 
 167   template <class GCBarrierType, DecoratorSet decorators>
 168   struct PostRuntimeDispatch<GCBarrierType, BARRIER_LOAD_AT, decorators>: public AllStatic {
 169     template <typename T>
 170     static T access_barrier(oop base, ptrdiff_t offset) {
 171       return GCBarrierType::template load_in_heap_at<T>(base, offset);
 172     }
 173 
 174     static oop oop_access_barrier(oop base, ptrdiff_t offset) {
 175       return GCBarrierType::oop_load_in_heap_at(base, offset);
 176     }
 177   };
 178 
 179   template <class GCBarrierType, DecoratorSet decorators>
 180   struct PostRuntimeDispatch<GCBarrierType, BARRIER_ATOMIC_XCHG_AT, decorators>: public AllStatic {
 181     template <typename T>
 182     static T access_barrier(T new_value, oop base, ptrdiff_t offset) {
 183       return GCBarrierType::atomic_xchg_in_heap_at(new_value, base, offset);
 184     }
 185 
 186     static oop oop_access_barrier(oop new_value, oop base, ptrdiff_t offset) {
 187       return GCBarrierType::oop_atomic_xchg_in_heap_at(new_value, base, offset);
 188     }
 189   };
 190 
 191   template <class GCBarrierType, DecoratorSet decorators>
 192   struct PostRuntimeDispatch<GCBarrierType, BARRIER_ATOMIC_CMPXCHG_AT, decorators>: public AllStatic {
 193     template <typename T>
 194     static T access_barrier(T new_value, oop base, ptrdiff_t offset, T compare_value) {
 195       return GCBarrierType::atomic_cmpxchg_in_heap_at(new_value, base, offset, compare_value);
 196     }
 197 
 198     static oop oop_access_barrier(oop new_value, oop base, ptrdiff_t offset, oop compare_value) {
 199       return GCBarrierType::oop_atomic_cmpxchg_in_heap_at(new_value, base, offset, compare_value);
 200     }
 201   };
 202 
 203   template <class GCBarrierType, DecoratorSet decorators>
 204   struct PostRuntimeDispatch<GCBarrierType, BARRIER_CLONE, decorators>: public AllStatic {
 205     static void access_barrier(oop src, oop dst, size_t size) {
 206       GCBarrierType::clone_in_heap(src, dst, size);
 207     }
 208   };
 209 
 210   template <class GCBarrierType, DecoratorSet decorators>
 211   struct PostRuntimeDispatch<GCBarrierType, BARRIER_RESOLVE, decorators>: public AllStatic {
 212     static oop access_barrier(oop obj) {
 213       return GCBarrierType::resolve(obj);
 214     }
 215   };
 216 
 217   template <class GCBarrierType, DecoratorSet decorators>
 218   struct PostRuntimeDispatch<GCBarrierType, BARRIER_EQUALS, decorators>: public AllStatic {
 219     static bool access_barrier(oop o1, oop o2) {
 220       return GCBarrierType::equals(o1, o2);
 221     }
 222   };
 223 
 224   // Resolving accessors with barriers from the barrier set happens in two steps.
 225   // 1. Expand paths with runtime-decorators, e.g. is UseCompressedOops on or off.
 226   // 2. Expand paths for each BarrierSet available in the system.
 227   template <DecoratorSet decorators, typename FunctionPointerT, BarrierType barrier_type>
 228   struct BarrierResolver: public AllStatic {
 229     template <DecoratorSet ds>
 230     static typename EnableIf<
 231       HasDecorator<ds, INTERNAL_VALUE_IS_OOP>::value,
 232       FunctionPointerT>::type
 233     resolve_barrier_gc() {
 234       BarrierSet* bs = BarrierSet::barrier_set();
 235       assert(bs != NULL, "GC barriers invoked before BarrierSet is set");
 236       switch (bs->kind()) {
 237 #define BARRIER_SET_RESOLVE_BARRIER_CLOSURE(bs_name)                    \
 238         case BarrierSet::bs_name: {                                     \
 239           return PostRuntimeDispatch<typename BarrierSet::GetType<BarrierSet::bs_name>::type:: \
 240             AccessBarrier<ds>, barrier_type, ds>::oop_access_barrier; \
 241         }                                                               \
 242         break;
 243         FOR_EACH_CONCRETE_BARRIER_SET_DO(BARRIER_SET_RESOLVE_BARRIER_CLOSURE)
 244 #undef BARRIER_SET_RESOLVE_BARRIER_CLOSURE
 245 
 246       default:
 247         fatal("BarrierSet AccessBarrier resolving not implemented");
 248         return NULL;
 249       };
 250     }
 251 
 252     template <DecoratorSet ds>
 253     static typename EnableIf<
 254       !HasDecorator<ds, INTERNAL_VALUE_IS_OOP>::value,
 255       FunctionPointerT>::type
 256     resolve_barrier_gc() {
 257       BarrierSet* bs = BarrierSet::barrier_set();
 258       assert(bs != NULL, "GC barriers invoked before BarrierSet is set");
 259       switch (bs->kind()) {
 260 #define BARRIER_SET_RESOLVE_BARRIER_CLOSURE(bs_name)                    \
 261         case BarrierSet::bs_name: {                                       \
 262           return PostRuntimeDispatch<typename BarrierSet::GetType<BarrierSet::bs_name>::type:: \
 263             AccessBarrier<ds>, barrier_type, ds>::access_barrier; \
 264         }                                                                 \
 265         break;
 266         FOR_EACH_CONCRETE_BARRIER_SET_DO(BARRIER_SET_RESOLVE_BARRIER_CLOSURE)
 267 #undef BARRIER_SET_RESOLVE_BARRIER_CLOSURE
 268 
 269       default:
 270         fatal("BarrierSet AccessBarrier resolving not implemented");
 271         return NULL;
 272       };
 273     }
 274 
 275     static FunctionPointerT resolve_barrier_rt() {
 276       if (UseCompressedOops) {
 277         const DecoratorSet expanded_decorators = decorators | INTERNAL_RT_USE_COMPRESSED_OOPS;
 278         return resolve_barrier_gc<expanded_decorators>();
 279       } else {
 280         return resolve_barrier_gc<decorators>();
 281       }
 282     }
 283 
 284     static FunctionPointerT resolve_barrier() {
 285       return resolve_barrier_rt();
 286     }
 287   };
 288 
 289   // Step 4: Runtime dispatch
 290   // The RuntimeDispatch class is responsible for performing a runtime dispatch of the
 291   // accessor. This is required when the access either depends on whether compressed oops
 292   // is being used, or it depends on which GC implementation was chosen (e.g. requires GC
 293   // barriers). The way it works is that a function pointer initially pointing to an
 294   // accessor resolution function gets called for each access. Upon first invocation,
 295   // it resolves which accessor to be used in future invocations and patches the
 296   // function pointer to this new accessor.
 297 
 298   template <DecoratorSet decorators, typename T, BarrierType type>
 299   struct RuntimeDispatch: AllStatic {};
 300 
 301   template <DecoratorSet decorators, typename T>
 302   struct RuntimeDispatch<decorators, T, BARRIER_STORE>: AllStatic {
 303     typedef typename AccessFunction<decorators, T, BARRIER_STORE>::type func_t;
 304     static func_t _store_func;
 305 
 306     static void store_init(void* addr, T value) {
 307       func_t function = BarrierResolver<decorators, func_t, BARRIER_STORE>::resolve_barrier();
 308       _store_func = function;
 309       function(addr, value);
 310     }
 311 
 312     static inline void store(void* addr, T value) {
 313       _store_func(addr, value);
 314     }
 315   };
 316 
 317   template <DecoratorSet decorators, typename T>
 318   struct RuntimeDispatch<decorators, T, BARRIER_STORE_AT>: AllStatic {
 319     typedef typename AccessFunction<decorators, T, BARRIER_STORE_AT>::type func_t;
 320     static func_t _store_at_func;
 321 
 322     static void store_at_init(oop base, ptrdiff_t offset, T value) {
 323       func_t function = BarrierResolver<decorators, func_t, BARRIER_STORE_AT>::resolve_barrier();
 324       _store_at_func = function;
 325       function(base, offset, value);
 326     }
 327 
 328     static inline void store_at(oop base, ptrdiff_t offset, T value) {
 329       _store_at_func(base, offset, value);
 330     }
 331   };
 332 
 333   template <DecoratorSet decorators, typename T>
 334   struct RuntimeDispatch<decorators, T, BARRIER_LOAD>: AllStatic {
 335     typedef typename AccessFunction<decorators, T, BARRIER_LOAD>::type func_t;
 336     static func_t _load_func;
 337 
 338     static T load_init(void* addr) {
 339       func_t function = BarrierResolver<decorators, func_t, BARRIER_LOAD>::resolve_barrier();
 340       _load_func = function;
 341       return function(addr);
 342     }
 343 
 344     static inline T load(void* addr) {
 345       return _load_func(addr);
 346     }
 347   };
 348 
 349   template <DecoratorSet decorators, typename T>
 350   struct RuntimeDispatch<decorators, T, BARRIER_LOAD_AT>: AllStatic {
 351     typedef typename AccessFunction<decorators, T, BARRIER_LOAD_AT>::type func_t;
 352     static func_t _load_at_func;
 353 
 354     static T load_at_init(oop base, ptrdiff_t offset) {
 355       func_t function = BarrierResolver<decorators, func_t, BARRIER_LOAD_AT>::resolve_barrier();
 356       _load_at_func = function;
 357       return function(base, offset);
 358     }
 359 
 360     static inline T load_at(oop base, ptrdiff_t offset) {
 361       return _load_at_func(base, offset);
 362     }
 363   };
 364 
 365   template <DecoratorSet decorators, typename T>
 366   struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG>: AllStatic {
 367     typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG>::type func_t;
 368     static func_t _atomic_cmpxchg_func;
 369 
 370     static T atomic_cmpxchg_init(T new_value, void* addr, T compare_value) {
 371       func_t function = BarrierResolver<decorators, func_t, BARRIER_ATOMIC_CMPXCHG>::resolve_barrier();
 372       _atomic_cmpxchg_func = function;
 373       return function(new_value, addr, compare_value);
 374     }
 375 
 376     static inline T atomic_cmpxchg(T new_value, void* addr, T compare_value) {
 377       return _atomic_cmpxchg_func(new_value, addr, compare_value);
 378     }
 379   };
 380 
 381   template <DecoratorSet decorators, typename T>
 382   struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>: AllStatic {
 383     typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::type func_t;
 384     static func_t _atomic_cmpxchg_at_func;
 385 
 386     static T atomic_cmpxchg_at_init(T new_value, oop base, ptrdiff_t offset, T compare_value) {
 387       func_t function = BarrierResolver<decorators, func_t, BARRIER_ATOMIC_CMPXCHG_AT>::resolve_barrier();
 388       _atomic_cmpxchg_at_func = function;
 389       return function(new_value, base, offset, compare_value);
 390     }
 391 
 392     static inline T atomic_cmpxchg_at(T new_value, oop base, ptrdiff_t offset, T compare_value) {
 393       return _atomic_cmpxchg_at_func(new_value, base, offset, compare_value);
 394     }
 395   };
 396 
 397   template <DecoratorSet decorators, typename T>
 398   struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG>: AllStatic {
 399     typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG>::type func_t;
 400     static func_t _atomic_xchg_func;
 401 
 402     static T atomic_xchg_init(T new_value, void* addr) {
 403       func_t function = BarrierResolver<decorators, func_t, BARRIER_ATOMIC_XCHG>::resolve_barrier();
 404       _atomic_xchg_func = function;
 405       return function(new_value, addr);
 406     }
 407 
 408     static inline T atomic_xchg(T new_value, void* addr) {
 409       return _atomic_xchg_func(new_value, addr);
 410     }
 411   };
 412 
 413   template <DecoratorSet decorators, typename T>
 414   struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG_AT>: AllStatic {
 415     typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG_AT>::type func_t;
 416     static func_t _atomic_xchg_at_func;
 417 
 418     static T atomic_xchg_at_init(T new_value, oop base, ptrdiff_t offset) {
 419       func_t function = BarrierResolver<decorators, func_t, BARRIER_ATOMIC_XCHG_AT>::resolve_barrier();
 420       _atomic_xchg_at_func = function;
 421       return function(new_value, base, offset);
 422     }
 423 
 424     static inline T atomic_xchg_at(T new_value, oop base, ptrdiff_t offset) {
 425       return _atomic_xchg_at_func(new_value, base, offset);
 426     }
 427   };
 428 
 429   template <DecoratorSet decorators, typename T>
 430   struct RuntimeDispatch<decorators, T, BARRIER_ARRAYCOPY>: AllStatic {
 431     typedef typename AccessFunction<decorators, T, BARRIER_ARRAYCOPY>::type func_t;
 432     static func_t _arraycopy_func;
 433 
 434     static bool arraycopy_init(arrayOop src_obj, arrayOop dst_obj, T *src, T* dst, size_t length) {
 435       func_t function = BarrierResolver<decorators, func_t, BARRIER_ARRAYCOPY>::resolve_barrier();
 436       _arraycopy_func = function;
 437       return function(src_obj, dst_obj, src, dst, length);
 438     }
 439 
 440     static inline bool arraycopy(arrayOop src_obj, arrayOop dst_obj, T *src, T* dst, size_t length) {
 441       return _arraycopy_func(src_obj, dst_obj, src, dst, length);
 442     }
 443   };
 444 
 445   template <DecoratorSet decorators, typename T>
 446   struct RuntimeDispatch<decorators, T, BARRIER_CLONE>: AllStatic {
 447     typedef typename AccessFunction<decorators, T, BARRIER_CLONE>::type func_t;
 448     static func_t _clone_func;
 449 
 450     static void clone_init(oop src, oop dst, size_t size) {
 451       func_t function = BarrierResolver<decorators, func_t, BARRIER_CLONE>::resolve_barrier();
 452       _clone_func = function;
 453       function(src, dst, size);
 454     }
 455 
 456     static inline void clone(oop src, oop dst, size_t size) {
 457       _clone_func(src, dst, size);
 458     }
 459   };
 460 
 461   template <DecoratorSet decorators, typename T>
 462   struct RuntimeDispatch<decorators, T, BARRIER_RESOLVE>: AllStatic {
 463     typedef typename AccessFunction<decorators, T, BARRIER_RESOLVE>::type func_t;
 464     static func_t _resolve_func;
 465 
 466     static oop resolve_init(oop obj) {
 467       func_t function = BarrierResolver<decorators, func_t, BARRIER_RESOLVE>::resolve_barrier();
 468       _resolve_func = function;
 469       return function(obj);
 470     }
 471 
 472     static inline oop resolve(oop obj) {
 473       return _resolve_func(obj);
 474     }
 475   };
 476 
 477   template <DecoratorSet decorators, typename T>
 478   struct RuntimeDispatch<decorators, T, BARRIER_EQUALS>: AllStatic {
 479     typedef typename AccessFunction<decorators, T, BARRIER_EQUALS>::type func_t;
 480     static func_t _equals_func;
 481 
 482     static bool equals_init(oop o1, oop o2) {
 483       func_t function = BarrierResolver<decorators, func_t, BARRIER_EQUALS>::resolve_barrier();
 484       _equals_func = function;
 485       return function(o1, o2);
 486     }
 487 
 488     static inline bool equals(oop o1, oop o2) {
 489       return _equals_func(o1, o2);
 490     }
 491   };
 492 
 493   // Initialize the function pointers to point to the resolving function.
 494   template <DecoratorSet decorators, typename T>
 495   typename AccessFunction<decorators, T, BARRIER_STORE>::type
 496   RuntimeDispatch<decorators, T, BARRIER_STORE>::_store_func = &store_init;
 497 
 498   template <DecoratorSet decorators, typename T>
 499   typename AccessFunction<decorators, T, BARRIER_STORE_AT>::type
 500   RuntimeDispatch<decorators, T, BARRIER_STORE_AT>::_store_at_func = &store_at_init;
 501 
 502   template <DecoratorSet decorators, typename T>
 503   typename AccessFunction<decorators, T, BARRIER_LOAD>::type
 504   RuntimeDispatch<decorators, T, BARRIER_LOAD>::_load_func = &load_init;
 505 
 506   template <DecoratorSet decorators, typename T>
 507   typename AccessFunction<decorators, T, BARRIER_LOAD_AT>::type
 508   RuntimeDispatch<decorators, T, BARRIER_LOAD_AT>::_load_at_func = &load_at_init;
 509 
 510   template <DecoratorSet decorators, typename T>
 511   typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG>::type
 512   RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG>::_atomic_cmpxchg_func = &atomic_cmpxchg_init;
 513 
 514   template <DecoratorSet decorators, typename T>
 515   typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::type
 516   RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::_atomic_cmpxchg_at_func = &atomic_cmpxchg_at_init;
 517 
 518   template <DecoratorSet decorators, typename T>
 519   typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG>::type
 520   RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG>::_atomic_xchg_func = &atomic_xchg_init;
 521 
 522   template <DecoratorSet decorators, typename T>
 523   typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG_AT>::type
 524   RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG_AT>::_atomic_xchg_at_func = &atomic_xchg_at_init;
 525 
 526   template <DecoratorSet decorators, typename T>
 527   typename AccessFunction<decorators, T, BARRIER_ARRAYCOPY>::type
 528   RuntimeDispatch<decorators, T, BARRIER_ARRAYCOPY>::_arraycopy_func = &arraycopy_init;
 529 
 530   template <DecoratorSet decorators, typename T>
 531   typename AccessFunction<decorators, T, BARRIER_CLONE>::type
 532   RuntimeDispatch<decorators, T, BARRIER_CLONE>::_clone_func = &clone_init;
 533 
 534   template <DecoratorSet decorators, typename T>
 535   typename AccessFunction<decorators, T, BARRIER_RESOLVE>::type
 536   RuntimeDispatch<decorators, T, BARRIER_RESOLVE>::_resolve_func = &resolve_init;
 537 
 538   template <DecoratorSet decorators, typename T>
 539   typename AccessFunction<decorators, T, BARRIER_EQUALS>::type
 540   RuntimeDispatch<decorators, T, BARRIER_EQUALS>::_equals_func = &equals_init;
 541 
 542   // Step 3: Pre-runtime dispatching.
 543   // The PreRuntimeDispatch class is responsible for filtering the barrier strength
 544   // decorators. That is, for AS_RAW, it hardwires the accesses without a runtime
 545   // dispatch point. Otherwise it goes through a runtime check if hardwiring was
 546   // not possible.
 547   struct PreRuntimeDispatch: AllStatic {
 548     template<DecoratorSet decorators>
 549     struct CanHardwireRaw: public IntegralConstant<
 550       bool,
 551       !HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value || // primitive access
 552       !HasDecorator<decorators, INTERNAL_CONVERT_COMPRESSED_OOP>::value || // don't care about compressed oops (oop* address)
 553       HasDecorator<decorators, INTERNAL_RT_USE_COMPRESSED_OOPS>::value> // we can infer we use compressed oops (narrowOop* address)
 554     {};
 555 
 556     static const DecoratorSet convert_compressed_oops = INTERNAL_RT_USE_COMPRESSED_OOPS | INTERNAL_CONVERT_COMPRESSED_OOP;
 557 
 558     template<DecoratorSet decorators>
 559     static bool is_hardwired_primitive() {
 560       return !HasDecorator<decorators, INTERNAL_BT_BARRIER_ON_PRIMITIVES>::value &&
 561              !HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value;
 562     }
 563 
 564     template <DecoratorSet decorators, typename T>
 565     inline static typename EnableIf<
 566       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value>::type
 567     store(void* addr, T value) {
 568       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 569       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 570         Raw::oop_store(addr, value);
 571       } else {
 572         Raw::store(addr, value);
 573       }
 574     }
 575 
 576     template <DecoratorSet decorators, typename T>
 577     inline static typename EnableIf<
 578       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value>::type
 579     store(void* addr, T value) {
 580       if (UseCompressedOops) {
 581         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 582         PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 583       } else {
 584         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 585         PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 586       }
 587     }
 588 
 589     template <DecoratorSet decorators, typename T>
 590     inline static typename EnableIf<
 591       !HasDecorator<decorators, AS_RAW>::value>::type
 592     store(void* addr, T value) {
 593       if (is_hardwired_primitive<decorators>()) {
 594         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 595         PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 596       } else {
 597         RuntimeDispatch<decorators, T, BARRIER_STORE>::store(addr, value);
 598       }
 599     }
 600 
 601     template <DecoratorSet decorators, typename T>
 602     inline static typename EnableIf<
 603       HasDecorator<decorators, AS_RAW>::value>::type
 604     store_at(oop base, ptrdiff_t offset, T value) {
 605       store<decorators>(field_addr(base, offset), value);
 606     }
 607 
 608     template <DecoratorSet decorators, typename T>
 609     inline static typename EnableIf<
 610       !HasDecorator<decorators, AS_RAW>::value>::type
 611     store_at(oop base, ptrdiff_t offset, T value) {
 612       if (is_hardwired_primitive<decorators>()) {
 613         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 614         PreRuntimeDispatch::store_at<expanded_decorators>(base, offset, value);
 615       } else {
 616         RuntimeDispatch<decorators, T, BARRIER_STORE_AT>::store_at(base, offset, value);
 617       }
 618     }
 619 
 620     template <DecoratorSet decorators, typename T>
 621     inline static typename EnableIf<
 622       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, T>::type
 623     load(void* addr) {
 624       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 625       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 626         return Raw::template oop_load<T>(addr);
 627       } else {
 628         return Raw::template load<T>(addr);
 629       }
 630     }
 631 
 632     template <DecoratorSet decorators, typename T>
 633     inline static typename EnableIf<
 634       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, T>::type
 635     load(void* addr) {
 636       if (UseCompressedOops) {
 637         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 638         return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
 639       } else {
 640         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 641         return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
 642       }
 643     }
 644 
 645     template <DecoratorSet decorators, typename T>
 646     inline static typename EnableIf<
 647       !HasDecorator<decorators, AS_RAW>::value, T>::type
 648     load(void* addr) {
 649       if (is_hardwired_primitive<decorators>()) {
 650         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 651         return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
 652       } else {
 653         return RuntimeDispatch<decorators, T, BARRIER_LOAD>::load(addr);
 654       }
 655     }
 656 
 657     template <DecoratorSet decorators, typename T>
 658     inline static typename EnableIf<
 659       HasDecorator<decorators, AS_RAW>::value, T>::type
 660     load_at(oop base, ptrdiff_t offset) {
 661       return load<decorators, T>(field_addr(base, offset));
 662     }
 663 
 664     template <DecoratorSet decorators, typename T>
 665     inline static typename EnableIf<
 666       !HasDecorator<decorators, AS_RAW>::value, T>::type
 667     load_at(oop base, ptrdiff_t offset) {
 668       if (is_hardwired_primitive<decorators>()) {
 669         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 670         return PreRuntimeDispatch::load_at<expanded_decorators, T>(base, offset);
 671       } else {
 672         return RuntimeDispatch<decorators, T, BARRIER_LOAD_AT>::load_at(base, offset);
 673       }
 674     }
 675 
 676     template <DecoratorSet decorators, typename T>
 677     inline static typename EnableIf<
 678       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, T>::type
 679     atomic_cmpxchg(T new_value, void* addr, T compare_value) {
 680       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 681       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 682         return Raw::oop_atomic_cmpxchg(new_value, addr, compare_value);
 683       } else {
 684         return Raw::atomic_cmpxchg(new_value, addr, compare_value);
 685       }
 686     }
 687 
 688     template <DecoratorSet decorators, typename T>
 689     inline static typename EnableIf<
 690       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, T>::type
 691     atomic_cmpxchg(T new_value, void* addr, T compare_value) {
 692       if (UseCompressedOops) {
 693         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 694         return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
 695       } else {
 696         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 697         return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
 698       }
 699     }
 700 
 701     template <DecoratorSet decorators, typename T>
 702     inline static typename EnableIf<
 703       !HasDecorator<decorators, AS_RAW>::value, T>::type
 704     atomic_cmpxchg(T new_value, void* addr, T compare_value) {
 705       if (is_hardwired_primitive<decorators>()) {
 706         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 707         return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
 708       } else {
 709         return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG>::atomic_cmpxchg(new_value, addr, compare_value);
 710       }
 711     }
 712 
 713     template <DecoratorSet decorators, typename T>
 714     inline static typename EnableIf<
 715       HasDecorator<decorators, AS_RAW>::value, T>::type
 716     atomic_cmpxchg_at(T new_value, oop base, ptrdiff_t offset, T compare_value) {
 717       return atomic_cmpxchg<decorators>(new_value, field_addr(base, offset), compare_value);
 718     }
 719 
 720     template <DecoratorSet decorators, typename T>
 721     inline static typename EnableIf<
 722       !HasDecorator<decorators, AS_RAW>::value, T>::type
 723     atomic_cmpxchg_at(T new_value, oop base, ptrdiff_t offset, T compare_value) {
 724       if (is_hardwired_primitive<decorators>()) {
 725         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 726         return PreRuntimeDispatch::atomic_cmpxchg_at<expanded_decorators>(new_value, base, offset, compare_value);
 727       } else {
 728         return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::atomic_cmpxchg_at(new_value, base, offset, compare_value);
 729       }
 730     }
 731 
 732     template <DecoratorSet decorators, typename T>
 733     inline static typename EnableIf<
 734       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, T>::type
 735     atomic_xchg(T new_value, void* addr) {
 736       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 737       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 738         return Raw::oop_atomic_xchg(new_value, addr);
 739       } else {
 740         return Raw::atomic_xchg(new_value, addr);
 741       }
 742     }
 743 
 744     template <DecoratorSet decorators, typename T>
 745     inline static typename EnableIf<
 746       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, T>::type
 747     atomic_xchg(T new_value, void* addr) {
 748       if (UseCompressedOops) {
 749         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 750         return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
 751       } else {
 752         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 753         return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
 754       }
 755     }
 756 
 757     template <DecoratorSet decorators, typename T>
 758     inline static typename EnableIf<
 759       !HasDecorator<decorators, AS_RAW>::value, T>::type
 760     atomic_xchg(T new_value, void* addr) {
 761       if (is_hardwired_primitive<decorators>()) {
 762         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 763         return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
 764       } else {
 765         return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG>::atomic_xchg(new_value, addr);
 766       }
 767     }
 768 
 769     template <DecoratorSet decorators, typename T>
 770     inline static typename EnableIf<
 771       HasDecorator<decorators, AS_RAW>::value, T>::type
 772     atomic_xchg_at(T new_value, oop base, ptrdiff_t offset) {
 773       return atomic_xchg<decorators>(new_value, field_addr(base, offset));
 774     }
 775 
 776     template <DecoratorSet decorators, typename T>
 777     inline static typename EnableIf<
 778       !HasDecorator<decorators, AS_RAW>::value, T>::type
 779     atomic_xchg_at(T new_value, oop base, ptrdiff_t offset) {
 780       if (is_hardwired_primitive<decorators>()) {
 781         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 782         return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, base, offset);
 783       } else {
 784         return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG_AT>::atomic_xchg_at(new_value, base, offset);
 785       }
 786     }
 787 
 788     template <DecoratorSet decorators, typename T>
 789     inline static typename EnableIf<
 790       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, bool>::type
 791     arraycopy(arrayOop src_obj, arrayOop dst_obj, T* src, T* dst, size_t length) {
 792       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 793       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 794         return Raw::oop_arraycopy(src_obj, dst_obj, src, dst, length);
 795       } else {
 796         return Raw::arraycopy(src_obj, dst_obj, src, dst, length);
 797       }
 798     }
 799 
 800     template <DecoratorSet decorators, typename T>
 801     inline static typename EnableIf<
 802       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, bool>::type
 803     arraycopy(arrayOop src_obj, arrayOop dst_obj, T* src, T* dst, size_t length) {
 804       if (UseCompressedOops) {
 805         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 806         return PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, dst_obj, src, dst, length);
 807       } else {
 808         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 809         return PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, dst_obj, src, dst, length);
 810       }
 811     }
 812 
 813     template <DecoratorSet decorators, typename T>
 814     inline static typename EnableIf<
 815       !HasDecorator<decorators, AS_RAW>::value, bool>::type
 816     arraycopy(arrayOop src_obj, arrayOop dst_obj, T* src, T* dst, size_t length) {
 817       if (is_hardwired_primitive<decorators>()) {
 818         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 819         return PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, dst_obj, src, dst, length);
 820       } else {
 821         return RuntimeDispatch<decorators, T, BARRIER_ARRAYCOPY>::arraycopy(src_obj, dst_obj, src, dst, length);
 822       }
 823     }
 824 
 825     template <DecoratorSet decorators>
 826     inline static typename EnableIf<
 827       HasDecorator<decorators, AS_RAW>::value>::type
 828     clone(oop src, oop dst, size_t size) {
 829       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 830       Raw::clone(src, dst, size);
 831     }
 832 
 833     template <DecoratorSet decorators>
 834     inline static typename EnableIf<
 835       !HasDecorator<decorators, AS_RAW>::value>::type
 836     clone(oop src, oop dst, size_t size) {
 837       RuntimeDispatch<decorators, oop, BARRIER_CLONE>::clone(src, dst, size);
 838     }
 839 
 840     template <DecoratorSet decorators>
 841     inline static typename EnableIf<
 842       HasDecorator<decorators, INTERNAL_BT_TO_SPACE_INVARIANT>::value, oop>::type
 843     resolve(oop obj) {
 844       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 845       return Raw::resolve(obj);
 846     }
 847 
 848     template <DecoratorSet decorators>
 849     inline static typename EnableIf<
 850       !HasDecorator<decorators, INTERNAL_BT_TO_SPACE_INVARIANT>::value, oop>::type
 851     resolve(oop obj) {
 852       return RuntimeDispatch<decorators, oop, BARRIER_RESOLVE>::resolve(obj);
 853     }
 854 
 855     template <DecoratorSet decorators>
 856     inline static typename EnableIf<
 857       HasDecorator<decorators, AS_RAW>::value, bool>::type
 858     equals(oop o1, oop o2) {
 859       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 860       return Raw::equals(o1, o2);
 861     }
 862 
 863     template <DecoratorSet decorators>
 864     inline static typename EnableIf<
 865       !HasDecorator<decorators, AS_RAW>::value, bool>::type
 866     equals(oop o1, oop o2) {
 867       return RuntimeDispatch<decorators, oop, BARRIER_EQUALS>::equals(o1, o2);
 868     }
 869   };
 870 
 871   // This class adds implied decorators that follow according to decorator rules.
 872   // For example adding default reference strength and default memory ordering
 873   // semantics.
 874   template <DecoratorSet input_decorators>
 875   struct DecoratorFixup: AllStatic {
 876     // If no reference strength has been picked, then strong will be picked
 877     static const DecoratorSet ref_strength_default = input_decorators |
 878       (((ON_DECORATOR_MASK & input_decorators) == 0 && (INTERNAL_VALUE_IS_OOP & input_decorators) != 0) ?
 879        ON_STRONG_OOP_REF : INTERNAL_EMPTY);
 880     // If no memory ordering has been picked, unordered will be picked
 881     static const DecoratorSet memory_ordering_default = ref_strength_default |
 882       ((MO_DECORATOR_MASK & ref_strength_default) == 0 ? MO_UNORDERED : INTERNAL_EMPTY);
 883     // If no barrier strength has been picked, normal will be used
 884     static const DecoratorSet barrier_strength_default = memory_ordering_default |
 885       ((AS_DECORATOR_MASK & memory_ordering_default) == 0 ? AS_NORMAL : INTERNAL_EMPTY);
 886     // Heap array accesses imply it is a heap access
 887     static const DecoratorSet heap_array_is_in_heap = barrier_strength_default |
 888       ((IN_HEAP_ARRAY & barrier_strength_default) != 0 ? IN_HEAP : INTERNAL_EMPTY);
 889     static const DecoratorSet conc_root_is_root = heap_array_is_in_heap |
 890       ((IN_CONCURRENT_ROOT & heap_array_is_in_heap) != 0 ? IN_ROOT : INTERNAL_EMPTY);
 891     static const DecoratorSet archive_root_is_root = conc_root_is_root |
 892       ((IN_ARCHIVE_ROOT & conc_root_is_root) != 0 ? IN_ROOT : INTERNAL_EMPTY);
 893     static const DecoratorSet value = archive_root_is_root | BT_BUILDTIME_DECORATORS;
 894   };
 895 
 896   // Step 2: Reduce types.
 897   // Enforce that for non-oop types, T and P have to be strictly the same.
 898   // P is the type of the address and T is the type of the values.
 899   // As for oop types, it is allow to send T in {narrowOop, oop} and
 900   // P in {narrowOop, oop, HeapWord*}. The following rules apply according to
 901   // the subsequent table. (columns are P, rows are T)
 902   // |           | HeapWord  |   oop   | narrowOop |
 903   // |   oop     |  rt-comp  | hw-none |  hw-comp  |
 904   // | narrowOop |     x     |    x    |  hw-none  |
 905   //
 906   // x means not allowed
 907   // rt-comp means it must be checked at runtime whether the oop is compressed.
 908   // hw-none means it is statically known the oop will not be compressed.
 909   // hw-comp means it is statically known the oop will be compressed.
 910 
 911   template <DecoratorSet decorators, typename T>
 912   inline void store_reduce_types(T* addr, T value) {
 913     PreRuntimeDispatch::store<decorators>(addr, value);
 914   }
 915 
 916   template <DecoratorSet decorators>
 917   inline void store_reduce_types(narrowOop* addr, oop value) {
 918     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
 919                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
 920     PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 921   }
 922 
 923   template <DecoratorSet decorators>
 924   inline void store_reduce_types(narrowOop* addr, narrowOop value) {
 925     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
 926                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
 927     PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 928   }
 929 
 930   template <DecoratorSet decorators>
 931   inline void store_reduce_types(HeapWord* addr, oop value) {
 932     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
 933     PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 934   }
 935 
 936   template <DecoratorSet decorators, typename T>
 937   inline T atomic_cmpxchg_reduce_types(T new_value, T* addr, T compare_value) {
 938     return PreRuntimeDispatch::atomic_cmpxchg<decorators>(new_value, addr, compare_value);
 939   }
 940 
 941   template <DecoratorSet decorators>
 942   inline oop atomic_cmpxchg_reduce_types(oop new_value, narrowOop* addr, oop compare_value) {
 943     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
 944                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
 945     return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
 946   }
 947 
 948   template <DecoratorSet decorators>
 949   inline narrowOop atomic_cmpxchg_reduce_types(narrowOop new_value, narrowOop* addr, narrowOop compare_value) {
 950     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
 951                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
 952     return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
 953   }
 954 
 955   template <DecoratorSet decorators>
 956   inline oop atomic_cmpxchg_reduce_types(oop new_value,
 957                                          HeapWord* addr,
 958                                          oop compare_value) {
 959     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
 960     return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
 961   }
 962 
 963   template <DecoratorSet decorators, typename T>
 964   inline T atomic_xchg_reduce_types(T new_value, T* addr) {
 965     const DecoratorSet expanded_decorators = decorators;
 966     return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
 967   }
 968 
 969   template <DecoratorSet decorators>
 970   inline oop atomic_xchg_reduce_types(oop new_value, narrowOop* addr) {
 971     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
 972                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
 973     return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
 974   }
 975 
 976   template <DecoratorSet decorators>
 977   inline narrowOop atomic_xchg_reduce_types(narrowOop new_value, narrowOop* addr) {
 978     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
 979                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
 980     return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
 981   }
 982 
 983   template <DecoratorSet decorators>
 984   inline oop atomic_xchg_reduce_types(oop new_value, HeapWord* addr) {
 985     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
 986     return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
 987   }
 988 
 989   template <DecoratorSet decorators, typename T>
 990   inline T load_reduce_types(T* addr) {
 991     return PreRuntimeDispatch::load<decorators, T>(addr);
 992   }
 993 
 994   template <DecoratorSet decorators, typename T>
 995   inline typename OopOrNarrowOop<T>::type load_reduce_types(narrowOop* addr) {
 996     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
 997                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
 998     return PreRuntimeDispatch::load<expanded_decorators, typename OopOrNarrowOop<T>::type>(addr);
 999   }
1000 
1001   template <DecoratorSet decorators, typename T>
1002   inline oop load_reduce_types(HeapWord* addr) {
1003     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1004     return PreRuntimeDispatch::load<expanded_decorators, oop>(addr);
1005   }
1006 
1007   template <DecoratorSet decorators, typename T>
1008   inline bool arraycopy_reduce_types(arrayOop src_obj, arrayOop dst_obj, T* src, T* dst, size_t length) {
1009     return PreRuntimeDispatch::arraycopy<decorators>(src_obj, dst_obj, src, dst, length);
1010   }
1011 
1012   template <DecoratorSet decorators>
1013   inline bool arraycopy_reduce_types(arrayOop src_obj, arrayOop dst_obj, HeapWord* src, HeapWord* dst, size_t length) {
1014     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1015     return PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, dst_obj, src, dst, length);
1016   }
1017 
1018   template <DecoratorSet decorators>
1019   inline bool arraycopy_reduce_types(arrayOop src_obj, arrayOop dst_obj, narrowOop* src, narrowOop* dst, size_t length) {
1020     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1021                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
1022     return PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, dst_obj, src, dst, length);
1023   }
1024 
1025   // Step 1: Set default decorators. This step remembers if a type was volatile
1026   // and then sets the MO_VOLATILE decorator by default. Otherwise, a default
1027   // memory ordering is set for the access, and the implied decorator rules
1028   // are applied to select sensible defaults for decorators that have not been
1029   // explicitly set. For example, default object referent strength is set to strong.
1030   // This step also decays the types passed in (e.g. getting rid of CV qualifiers
1031   // and references from the types). This step also perform some type verification
1032   // that the passed in types make sense.
1033 
1034   template <DecoratorSet decorators, typename T>
1035   static void verify_types(){
1036     // If this fails to compile, then you have sent in something that is
1037     // not recognized as a valid primitive type to a primitive Access function.
1038     STATIC_ASSERT((HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value || // oops have already been validated
1039                    (IsPointer<T>::value || IsIntegral<T>::value) ||
1040                     IsFloatingPoint<T>::value)); // not allowed primitive type
1041   }
1042 
1043   template <DecoratorSet decorators, typename P, typename T>
1044   inline void store(P* addr, T value) {
1045     verify_types<decorators, T>();
1046     typedef typename Decay<P>::type DecayedP;
1047     typedef typename Decay<T>::type DecayedT;
1048     DecayedT decayed_value = value;
1049     // If a volatile address is passed in but no memory ordering decorator,
1050     // set the memory ordering to MO_VOLATILE by default.
1051     const DecoratorSet expanded_decorators = DecoratorFixup<
1052       (IsVolatile<P>::value && !HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1053       (MO_VOLATILE | decorators) : decorators>::value;
1054     store_reduce_types<expanded_decorators>(const_cast<DecayedP*>(addr), decayed_value);
1055   }
1056 
1057   template <DecoratorSet decorators, typename T>
1058   inline void store_at(oop base, ptrdiff_t offset, T value) {
1059     verify_types<decorators, T>();
1060     typedef typename Decay<T>::type DecayedT;
1061     DecayedT decayed_value = value;
1062     const DecoratorSet expanded_decorators = DecoratorFixup<decorators |
1063                                              (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1064                                               INTERNAL_CONVERT_COMPRESSED_OOP : INTERNAL_EMPTY)>::value;
1065     PreRuntimeDispatch::store_at<expanded_decorators>(base, offset, decayed_value);
1066   }
1067 
1068   template <DecoratorSet decorators, typename P, typename T>
1069   inline T load(P* addr) {
1070     verify_types<decorators, T>();
1071     typedef typename Decay<P>::type DecayedP;
1072     typedef typename Conditional<HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value,
1073                                  typename OopOrNarrowOop<T>::type,
1074                                  typename Decay<T>::type>::type DecayedT;
1075     // If a volatile address is passed in but no memory ordering decorator,
1076     // set the memory ordering to MO_VOLATILE by default.
1077     const DecoratorSet expanded_decorators = DecoratorFixup<
1078       (IsVolatile<P>::value && !HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1079       (MO_VOLATILE | decorators) : decorators>::value;
1080     return load_reduce_types<expanded_decorators, DecayedT>(const_cast<DecayedP*>(addr));
1081   }
1082 
1083   template <DecoratorSet decorators, typename T>
1084   inline T load_at(oop base, ptrdiff_t offset) {
1085     verify_types<decorators, T>();
1086     typedef typename Conditional<HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value,
1087                                  typename OopOrNarrowOop<T>::type,
1088                                  typename Decay<T>::type>::type DecayedT;
1089     // Expand the decorators (figure out sensible defaults)
1090     // Potentially remember if we need compressed oop awareness
1091     const DecoratorSet expanded_decorators = DecoratorFixup<decorators |
1092                                              (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1093                                               INTERNAL_CONVERT_COMPRESSED_OOP : INTERNAL_EMPTY)>::value;
1094     return PreRuntimeDispatch::load_at<expanded_decorators, DecayedT>(base, offset);
1095   }
1096 
1097   template <DecoratorSet decorators, typename P, typename T>
1098   inline T atomic_cmpxchg(T new_value, P* addr, T compare_value) {
1099     verify_types<decorators, T>();
1100     typedef typename Decay<P>::type DecayedP;
1101     typedef typename Decay<T>::type DecayedT;
1102     DecayedT new_decayed_value = new_value;
1103     DecayedT compare_decayed_value = compare_value;
1104     const DecoratorSet expanded_decorators = DecoratorFixup<
1105       (!HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1106       (MO_SEQ_CST | decorators) : decorators>::value;
1107     return atomic_cmpxchg_reduce_types<expanded_decorators>(new_decayed_value,
1108                                                             const_cast<DecayedP*>(addr),
1109                                                             compare_decayed_value);
1110   }
1111 
1112   template <DecoratorSet decorators, typename T>
1113   inline T atomic_cmpxchg_at(T new_value, oop base, ptrdiff_t offset, T compare_value) {
1114     verify_types<decorators, T>();
1115     typedef typename Decay<T>::type DecayedT;
1116     DecayedT new_decayed_value = new_value;
1117     DecayedT compare_decayed_value = compare_value;
1118     // Determine default memory ordering
1119     const DecoratorSet expanded_decorators = DecoratorFixup<
1120       (!HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1121       (MO_SEQ_CST | decorators) : decorators>::value;
1122     // Potentially remember that we need compressed oop awareness
1123     const DecoratorSet final_decorators = expanded_decorators |
1124                                           (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1125                                            INTERNAL_CONVERT_COMPRESSED_OOP : INTERNAL_EMPTY);
1126     return PreRuntimeDispatch::atomic_cmpxchg_at<final_decorators>(new_decayed_value, base,
1127                                                                    offset, compare_decayed_value);
1128   }
1129 
1130   template <DecoratorSet decorators, typename P, typename T>
1131   inline T atomic_xchg(T new_value, P* addr) {
1132     verify_types<decorators, T>();
1133     typedef typename Decay<P>::type DecayedP;
1134     typedef typename Decay<T>::type DecayedT;
1135     DecayedT new_decayed_value = new_value;
1136     // atomic_xchg is only available in SEQ_CST flavour.
1137     const DecoratorSet expanded_decorators = DecoratorFixup<decorators | MO_SEQ_CST>::value;
1138     return atomic_xchg_reduce_types<expanded_decorators>(new_decayed_value,
1139                                                          const_cast<DecayedP*>(addr));
1140   }
1141 
1142   template <DecoratorSet decorators, typename T>
1143   inline T atomic_xchg_at(T new_value, oop base, ptrdiff_t offset) {
1144     verify_types<decorators, T>();
1145     typedef typename Decay<T>::type DecayedT;
1146     DecayedT new_decayed_value = new_value;
1147     // atomic_xchg is only available in SEQ_CST flavour.
1148     const DecoratorSet expanded_decorators = DecoratorFixup<decorators | MO_SEQ_CST |
1149                                              (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1150                                               INTERNAL_CONVERT_COMPRESSED_OOP : INTERNAL_EMPTY)>::value;
1151     return PreRuntimeDispatch::atomic_xchg_at<expanded_decorators>(new_decayed_value, base, offset);
1152   }
1153 
1154   template <DecoratorSet decorators, typename T>
1155   inline bool arraycopy(arrayOop src_obj, arrayOop dst_obj, T* src, T* dst, size_t length) {
1156     STATIC_ASSERT((HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ||
1157                    (IsSame<T, void>::value || IsIntegral<T>::value) ||
1158                     IsFloatingPoint<T>::value)); // arraycopy allows type erased void elements
1159     typedef typename Decay<T>::type DecayedT;
1160     const DecoratorSet expanded_decorators = DecoratorFixup<decorators | IN_HEAP_ARRAY | IN_HEAP>::value;
1161     return arraycopy_reduce_types<expanded_decorators>(src_obj, dst_obj,
1162                                                        const_cast<DecayedT*>(src),
1163                                                        const_cast<DecayedT*>(dst),
1164                                                        length);
1165   }
1166 
1167   template <DecoratorSet decorators>
1168   inline void clone(oop src, oop dst, size_t size) {
1169     const DecoratorSet expanded_decorators = DecoratorFixup<decorators>::value;
1170     PreRuntimeDispatch::clone<expanded_decorators>(src, dst, size);
1171   }
1172 
1173   template <DecoratorSet decorators>
1174   inline oop resolve(oop obj) {
1175     const DecoratorSet expanded_decorators = DecoratorFixup<decorators>::value;
1176     return PreRuntimeDispatch::resolve<expanded_decorators>(obj);
1177   }
1178 
1179   template <DecoratorSet decorators>
1180   inline bool equals(oop o1, oop o2) {
1181     const DecoratorSet expanded_decorators = DecoratorFixup<decorators>::value;
1182     return PreRuntimeDispatch::equals<expanded_decorators>(o1, o2);
1183   }
1184 }
1185 
1186 template <DecoratorSet decorators>
1187 template <DecoratorSet expected_decorators>
1188 void Access<decorators>::verify_decorators() {
1189   STATIC_ASSERT((~expected_decorators & decorators) == 0); // unexpected decorator used
1190   const DecoratorSet barrier_strength_decorators = decorators & AS_DECORATOR_MASK;
1191   STATIC_ASSERT(barrier_strength_decorators == 0 || ( // make sure barrier strength decorators are disjoint if set
1192     (barrier_strength_decorators ^ AS_NO_KEEPALIVE) == 0 ||
1193     (barrier_strength_decorators ^ AS_DEST_NOT_INITIALIZED) == 0 ||
1194     (barrier_strength_decorators ^ AS_RAW) == 0 ||
1195     (barrier_strength_decorators ^ AS_NORMAL) == 0
1196   ));
1197   const DecoratorSet ref_strength_decorators = decorators & ON_DECORATOR_MASK;
1198   STATIC_ASSERT(ref_strength_decorators == 0 || ( // make sure ref strength decorators are disjoint if set
1199     (ref_strength_decorators ^ ON_STRONG_OOP_REF) == 0 ||
1200     (ref_strength_decorators ^ ON_WEAK_OOP_REF) == 0 ||
1201     (ref_strength_decorators ^ ON_PHANTOM_OOP_REF) == 0 ||
1202     (ref_strength_decorators ^ ON_UNKNOWN_OOP_REF) == 0
1203   ));
1204   const DecoratorSet memory_ordering_decorators = decorators & MO_DECORATOR_MASK;
1205   STATIC_ASSERT(memory_ordering_decorators == 0 || ( // make sure memory ordering decorators are disjoint if set
1206     (memory_ordering_decorators ^ MO_UNORDERED) == 0 ||
1207     (memory_ordering_decorators ^ MO_VOLATILE) == 0 ||
1208     (memory_ordering_decorators ^ MO_RELAXED) == 0 ||
1209     (memory_ordering_decorators ^ MO_ACQUIRE) == 0 ||
1210     (memory_ordering_decorators ^ MO_RELEASE) == 0 ||
1211     (memory_ordering_decorators ^ MO_SEQ_CST) == 0
1212   ));
1213   const DecoratorSet location_decorators = decorators & IN_DECORATOR_MASK;
1214   STATIC_ASSERT(location_decorators == 0 || ( // make sure location decorators are disjoint if set
1215     (location_decorators ^ IN_ROOT) == 0 ||
1216     (location_decorators ^ IN_HEAP) == 0 ||
1217     (location_decorators ^ (IN_HEAP | IN_HEAP_ARRAY)) == 0 ||
1218     (location_decorators ^ (IN_ROOT | IN_CONCURRENT_ROOT)) == 0 ||
1219     (location_decorators ^ (IN_ROOT | IN_ARCHIVE_ROOT)) == 0
1220   ));
1221 }
1222 
1223 #endif // SHARE_VM_RUNTIME_ACCESS_INLINE_HPP