/* * Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. * */ #ifndef SHARE_VM_RUNTIME_ACCESSBACKEND_HPP #define SHARE_VM_RUNTIME_ACCESSBACKEND_HPP #include "utilities/traits/enableIf.hpp" #include "utilities/traits/typeIf.hpp" #include "utilities/traits/integer.hpp" #include "utilities/traits/isDerived.hpp" #include "utilities/traits/isFloatLike.hpp" #include "utilities/traits/isVolatile.hpp" enum BarrierType { BARRIER_STORE, BARRIER_STORE_AT, BARRIER_LOAD, BARRIER_LOAD_AT, BARRIER_CAS, BARRIER_CAS_AT, BARRIER_SWAP, BARRIER_SWAP_AT, BARRIER_COPY, BARRIER_CLONE }; enum { RAW_DECORATOR_MASK = MO_RELAXED | MO_VOLATILE | MO_ATOMIC | MO_ACQUIRE | MO_RELEASE | MO_SEQ_CST | DEST_CONJOINT | DEST_DISJOINT | COPY_ARRAYOF | ACCESS_ARRAYCOPY | ACCESS_ATOMIC | ACCESS_ALIGNED }; enum { BASIC_DECORATOR_MASK = RAW_DECORATOR_MASK | GC_CONVERT_COMPRESSED_OOP | RT_USE_COMPRESSED_OOPS | VALUE_NOT_NULL | VALUE_IS_OOP | ACCESS_ON_ANONYMOUS }; template struct TestEncodable { enum { value = DecoratorTest::HAS_VALUE_IS_OOP && DecoratorTest::NEEDS_OOP_COMPRESS }; }; template struct EncodedTypeHelper { typedef T type; }; template struct EncodedTypeHelper { typedef narrowOop type; }; template struct EncodedType { typedef typename EncodedTypeHelper::value, T>::type type; }; template struct EncodedOopType { typedef typename EncodedTypeHelper::value, oop>::type type; }; template struct PossiblyLockedAccess { enum { #ifndef SUPPORTS_NATIVE_CX8 value = (sizeof(T) == sizeof(int64_t)) && DecoratorTest::HAS_MO_ATOMIC #else value = false #endif }; }; template struct BaseTypeHelper { typedef void* type; }; template <> struct BaseTypeHelper { typedef oop type; }; template <> struct BaseTypeHelper { typedef nmethod* type; }; template <> struct BaseTypeHelper { typedef Klass* type; }; template struct BaseType { typedef typename BaseTypeHelper::HAS_ACCESS_ON_HEAP, DecoratorTest::HAS_ACCESS_ON_NMETHOD, DecoratorTest::HAS_ACCESS_ON_KLASS>::type type; }; namespace AccessInternal { template struct AccessFunctionTypes { typedef void (*store_func_t)(void* addr, T value); typedef void (*store_at_func_t)(typename BaseType::type base, ptrdiff_t offset, T value); typedef T (*load_func_t)(void* addr); typedef T (*load_at_func_t)(typename BaseType::type, ptrdiff_t offset); typedef T (*cas_func_t)(T new_value, void* addr, T compare_value); typedef T (*cas_at_func_t)(T new_value, typename BaseType::type, ptrdiff_t offset, T compare_value); typedef T (*swap_func_t)(T new_value, void* addr); typedef T (*swap_at_func_t)(T new_value, typename BaseType::type, ptrdiff_t offset); typedef bool (*copy_func_t)(arrayOop src_obj, arrayOop dst_obj, T* src, T* dst, size_t length); }; bool wide_atomic_needs_locking(); int64_t load_locked(void* addr); void store_locked(void* addr, int64_t val); int64_t swap_locked(int64_t new_val, void* addr); int64_t cas_locked(int64_t new_val, void* addr, int64_t expected_val); void* field_addr(void* base, ptrdiff_t offset); // Forward calls to Copy:: in the cpp file to reduce dependencies and allow // faster build times, given how frequently included access is. void copy_arrayof_conjoint_oops(void* src, void* dst, size_t length); void copy_conjoint_oops(void* src, void* dst, size_t length); void copy_conjoint_memory_atomic(void* src, void* dst, size_t length); void copy_conjoint_jbytes(void* src, void* dst, size_t length); void copy_conjoint_jlongs_atomic(void* src, void* dst, size_t length); void copy_disjoint_words(void* src, void* dst, size_t length); } template class RawAccessBarrier: public AllStatic { protected: template static typename EnableIf::value, void>::type atomic_store(void* addr, T value); template static typename EnableIf::value, void>::type atomic_store(void* addr, T value); template static typename EnableIf::value, void>::type release_store(void* addr, T value); template static typename EnableIf::value, void>::type release_store(void* addr, T value); template static typename EnableIf::value, void>::type release_store_fence(void* addr, T value); template static typename EnableIf::value, void>::type release_store_fence(void* addr, T value); template static typename EnableIf::value, T>::type atomic_load(void* addr); template static typename EnableIf::value, T>::type atomic_load(void* addr); template static typename EnableIf::value, T>::type load_acquire(void* addr); template static typename EnableIf::value, T>::type load_acquire(void* addr); template static typename EnableIf::value, T>::type fence_load_acquire(void* addr); template static inline typename EnableIf::value, T>::type fence_load_acquire(void* addr); template static typename EnableIf::value, T>::type cas_relaxed(T new_value, void* addr, T compare_value); template static typename EnableIf::value, T>::type cas_relaxed(T new_value, void* addr, T compare_value); template static typename EnableIf::value, T>::type cas_seq_cst(T new_value, void* addr, T compare_value); template static typename EnableIf::value, T>::type cas_seq_cst(T new_value, void* addr, T compare_value); template static typename EnableIf::value, T>::type swap_seq_cst(T new_value, void* addr); template static typename EnableIf::value, T>::type swap_seq_cst(T new_value, void* addr); // The following *_locked mechanisms serve the purpose of handling atomic operations // that are larger than a machine can handle, and then possibly opt for using // a slower path using a mutex to perform the operation. template static inline T load_not_locked(void* addr) { if (DecoratorTest::HAS_MO_SEQ_CST) { return fence_load_acquire(addr); } else if (DecoratorTest::HAS_MO_ACQUIRE) { return load_acquire(addr); } else if (DecoratorTest::HAS_MO_ATOMIC) { return atomic_load(addr); } else if (DecoratorTest::HAS_MO_VOLATILE) { return *(volatile T*)addr; } else { return *(T*)addr; } } template static inline typename EnableIf::value, T>::type load_maybe_locked(void* addr) { return load_not_locked(addr); } template static typename EnableIf::value, T>::type load_maybe_locked(void* addr); template static inline void store_not_locked(void* addr, T value) { if (DecoratorTest::HAS_MO_SEQ_CST) { release_store_fence(addr, value); } else if (DecoratorTest::HAS_MO_RELEASE) { release_store(addr, value); } else if (DecoratorTest::HAS_MO_ATOMIC) { atomic_store(addr, value); } else if (DecoratorTest::HAS_MO_VOLATILE) { (void)const_cast(*(volatile T*)addr = value); } else { *(T*)addr = value; } } template static inline typename EnableIf::value, void>::type store_maybe_locked(void* addr, T value) { store_not_locked(addr, value); } template static typename EnableIf::value, void>::type store_maybe_locked(void* addr, T value); template static inline T cas_not_locked(T new_value, void* addr, T compare_value) { if (DecoratorTest::HAS_MO_SEQ_CST) { return cas_seq_cst(new_value, addr, compare_value); } else { return cas_relaxed(new_value, addr, compare_value); } } template static inline typename EnableIf::value, T>::type cas_maybe_locked(T new_value, void* addr, T compare_value) { return cas_not_locked(new_value, addr, compare_value); } template static typename EnableIf::value, T>::type cas_maybe_locked(T new_value, void* addr, T compare_value); template static inline T swap_not_locked(T new_value, void* addr) { return swap_seq_cst(new_value, addr); } template static inline typename EnableIf::value, T>::type swap_maybe_locked(T new_value, void* addr) { return swap_not_locked(new_value, addr); } template static typename EnableIf::value, T>::type swap_maybe_locked(T new_value, void* addr); public: template static inline typename EnableIf::value, void>::type store(void* addr, T value) { store_maybe_locked(addr, value); } template static inline typename EnableIf::value, T>::type load(void* addr) { return load_maybe_locked(addr); } template static inline typename EnableIf::value, T>::type cas(T new_value, void* addr, T compare_value) { return cas_maybe_locked(new_value, addr, compare_value); } template static inline typename EnableIf::value, T>::type swap(T new_value, void* addr) { return swap_maybe_locked(new_value, addr); } template static typename EnableIf::value, bool>::type copy(T* src, T* dst, size_t length); static void clone(oop src, oop dst, size_t size); template static inline typename EnableIf::value, void>::type store(void* addr, T value) { typedef IntegerType IntType; store(addr, IntType::cast_to_signed(value)); } template static inline typename EnableIf::value, T>::type load(void* addr) { typedef IntegerType IntType; return IntType::cast_from_signed(load(addr)); } template static inline typename EnableIf::value, T>::type cas(T new_value, void* addr, T compare_value) { typedef IntegerType IntType; return IntType::cast_from_signed(cas(IntType::cast_to_signed(new_value), addr, IntType::cast_to_signed(compare_value))); } template static inline typename EnableIf::value, T>::type swap(T new_value, void* addr) { typedef IntegerType IntType; return IntType::cast_from_signed(cas(IntType::cast_to_signed(new_value), addr)); } template static typename EnableIf::value, bool>::type copy(T* src, T* dst, size_t length) { typedef typename IntegerType::signed_type IntType; return copy((IntType*)src, (IntType*)dst, length); } }; template class BasicAccessBarrier: public AllStatic { typedef RawAccessBarrier::value> Raw; protected: static inline void* field_addr(void* base, ptrdiff_t byte_offset) { return AccessInternal::field_addr(base, byte_offset); } protected: template static inline typename EnableIf::value, typename EncodedType::type>::type encode_internal(T value); template static inline typename EnableIf::value, typename EncodedType::type>::type encode_internal(T value) { return value; } template static inline typename EncodedType::type encode(T value) { return encode_internal(value); } template static inline typename EnableIf::value, T>::type decode_internal(typename EncodedType::type value); template static inline typename EnableIf::value, T>::type decode_internal(typename EncodedType::type value) { return value; } template static inline T decode(typename EncodedType::type value) { return decode_internal(value); } public: template static void store(void* addr, T value) { Raw::template store(addr, value); } template static void store_at(void* base, ptrdiff_t offset, T value) { store(field_addr(base, offset), value); } template static T load(void* addr) { return Raw::template load(addr); } template static T load_at(void* base, ptrdiff_t offset) { return load(field_addr(base, offset)); } template static T cas(T new_value, void* addr, T compare_value) { return Raw::template cas(new_value, addr, compare_value); } template static T cas_at(T new_value, void* base, ptrdiff_t offset, T compare_value) { return cas(new_value, field_addr(base, offset), compare_value); } template static T swap(T new_value, void* addr) { return Raw::template swap(new_value, addr); } template static T swap_at(T new_value, void* base, ptrdiff_t offset) { return swap(new_value, field_addr(base, offset)); } template static bool copy(arrayOop src_obj, arrayOop dst_obj, T* src, T* dst, size_t length) { return Raw::template copy(src, dst, length); } template static void oop_store(void* addr, T value); template static void oop_store_at(void* base, ptrdiff_t offset, T value); template static T oop_load(void* addr); template static T oop_load_at(void* base, ptrdiff_t offset); template static T oop_cas(T new_value, void* addr, T compare_value); template static T oop_cas_at(T new_value, void* base, ptrdiff_t offset, T compare_value); template static T oop_swap(T new_value, void* addr); template static T oop_swap_at(T new_value, void* base, ptrdiff_t offset); template static bool oop_copy(arrayOop src_obj, arrayOop dst_obj, T* src, T* dst, size_t length); static void clone(oop src, oop dst, size_t size); }; #endif // SHARE_VM_RUNTIME_ACCESSBACKEND_HPP