--- old/src/share/vm/gc/shared/barrierSet.inline.hpp 2017-04-25 16:45:01.903174256 +0200 +++ new/src/share/vm/gc/shared/barrierSet.inline.hpp 2017-04-25 16:45:01.751174261 +0200 @@ -1,5 +1,5 @@ /* - * Copyright (c) 2001, 2015, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 2001, 2016, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it @@ -26,68 +26,291 @@ #define SHARE_VM_GC_SHARED_BARRIERSET_INLINE_HPP #include "gc/shared/barrierSet.hpp" -#include "gc/shared/cardTableModRefBS.inline.hpp" +#include "gc/shared/barrierSetConfig.inline.hpp" +#include "runtime/access.hpp" -// Inline functions of BarrierSet, which de-virtualize certain -// performance-critical calls when the barrier is the most common -// card-table kind. +namespace AccessInternal { -inline bool BarrierSet::devirtualize_reference_writes() const { + template + struct HasOverloadHelper {}; + + template + struct HasOopStoreAtOverload { + typedef jint yes; + typedef jbyte no; + + template + static yes& test(HasOverloadHelper*); + template + static no& test(...); + + enum { + value = sizeof(test(0)) == sizeof(yes) + }; + }; + + template + class AccessBarrierResolverProxy: public AllStatic { }; + + template + class AccessBarrierResolverProxy: public AllStatic { + template + static typename EnableIf::HAS_VALUE_IS_OOP, void*>::type resolve_barrier_type_internal() { + return (void*)&GCBarrierType::template store; + } + + template + static typename EnableIf::HAS_VALUE_IS_OOP, void*>::type resolve_barrier_type_internal() { + return (void*)&GCBarrierType::oop_store; + } + + public: + static void* resolve_barrier_type() { + return resolve_barrier_type_internal(); + } + }; + + template + class AccessBarrierResolverProxy: public AllStatic { + public: + template + static typename EnableIf::HAS_VALUE_IS_OOP, void*>::type resolve_barrier_type_internal() { + return (void*)&GCBarrierType::template store_at; + } + + // resolve_barrier_type_base searches the AccessBarrier class hierarchy for a + // matching overload of oop_store_at. The different overloads represent + // different base pointer types such as nmethod* oop or Klass* + template + static typename EnableIf::store_at_func_t>::value, void*>::type resolve_barrier_type_base() { + typename AccessFunctionTypes::store_at_func_t resolved = &BarrierType::oop_store_at; + return (void*)resolved; + } + + template + static typename EnableIf::store_at_func_t>::value, void*>::type resolve_barrier_type_base() { + return AccessBarrierResolverProxy::template resolve_barrier_type_base(); + } + + template + static typename EnableIf::HAS_VALUE_IS_OOP, void*>::type resolve_barrier_type_internal() { + return resolve_barrier_type_base(); + } + + static void* resolve_barrier_type() { + return resolve_barrier_type_internal(); + } + }; + + template + class AccessBarrierResolverProxy: public AllStatic { + template + static typename EnableIf::HAS_VALUE_IS_OOP, void*>::type resolve_barrier_type_internal() { + return (void*)&GCBarrierType::template load; + } + + template + static typename EnableIf::HAS_VALUE_IS_OOP, void*>::type resolve_barrier_type_internal() { + return (void*)&GCBarrierType::oop_load; + } + public: + static void* resolve_barrier_type() { + return resolve_barrier_type_internal(); + } + }; + + template + class AccessBarrierResolverProxy: public AllStatic { + template + static typename EnableIf::HAS_VALUE_IS_OOP, void*>::type resolve_barrier_type_internal() { + return (void*)&GCBarrierType::template load_at; + } + + template + static typename EnableIf::HAS_VALUE_IS_OOP, void*>::type resolve_barrier_type_internal() { + return (void*)&GCBarrierType::oop_load_at; + } + + public: + static void* resolve_barrier_type() { + return resolve_barrier_type_internal(); + } + }; + + template + class AccessBarrierResolverProxy: public AllStatic { + template + static typename EnableIf::HAS_VALUE_IS_OOP, void*>::type resolve_barrier_type_internal() { + return (void*)&GCBarrierType::template cas; + } + + template + static typename EnableIf::HAS_VALUE_IS_OOP, void*>::type resolve_barrier_type_internal() { + return (void*)&GCBarrierType::oop_cas; + } + + public: + static void* resolve_barrier_type() { + return resolve_barrier_type_internal(); + } + }; + + template + class AccessBarrierResolverProxy: public AllStatic { + template + static typename EnableIf::HAS_VALUE_IS_OOP, void*>::type resolve_barrier_type_internal() { + return (void*)&GCBarrierType::template cas_at; + } + + template + static typename EnableIf::HAS_VALUE_IS_OOP, void*>::type resolve_barrier_type_internal() { + return (void*)&GCBarrierType::oop_cas_at; + } + + public: + static void* resolve_barrier_type() { + return resolve_barrier_type_internal(); + } + }; + + template + class AccessBarrierResolverProxy: public AllStatic { + template + static typename EnableIf::HAS_VALUE_IS_OOP, void*>::type resolve_barrier_type_internal() { + return (void*)&GCBarrierType::template swap; + } + + template + static typename EnableIf::HAS_VALUE_IS_OOP, void*>::type resolve_barrier_type_internal() { + return (void*)&GCBarrierType::oop_swap; + } + + public: + static void* resolve_barrier_type() { + return resolve_barrier_type_internal(); + } + }; + + template + class AccessBarrierResolverProxy: public AllStatic { + template + static typename EnableIf::HAS_VALUE_IS_OOP, void*>::type resolve_barrier_type_internal() { + return (void*)&GCBarrierType::template swap_at; + } + + template + static typename EnableIf::HAS_VALUE_IS_OOP, void*>::type resolve_barrier_type_internal() { + return (void*)&GCBarrierType::oop_swap_at; + } + + public: + static void* resolve_barrier_type() { + return resolve_barrier_type_internal(); + } + }; + + template + class AccessBarrierResolverProxy: public AllStatic { + template + static typename EnableIf::HAS_VALUE_IS_OOP, void*>::type resolve_barrier_type_internal() { + return (void*)&GCBarrierType::template copy; + } + + template + static typename EnableIf::HAS_VALUE_IS_OOP, void*>::type resolve_barrier_type_internal() { + return (void*)&GCBarrierType::template oop_copy; + } + + public: + static void* resolve_barrier_type() { + return resolve_barrier_type_internal(); + } + }; + +} + +template +void* BarrierSet::resolve_barrier() { switch (kind()) { - case CardTableForRS: - case CardTableExtension: - return true; +#define BARRIER_SET_RESOLVE_BARRIER_CLOSURE(bs_name) \ + case bs_name : { \ + return AccessInternal::AccessBarrierResolverProxy::type::AccessBarrier, barrier_type, decorators, T>::resolve_barrier_type(); \ + } \ + break; + FOR_EACH_CONCRETE_BARRIER_SET_DO(BARRIER_SET_RESOLVE_BARRIER_CLOSURE) +#undef BARRIER_SET_RESOLVE_BARRIER_CLOSURE + default: - return false; - } + fatal("BarrierSet AccessBarrier resolving not implemented"); + return NULL; + }; +} + +template +void* BarrierSet::resolve_clone_barrier() { + switch (kind()) { +#define BARRIER_SET_RESOLVE_BARRIER_CLOSURE(bs_name) \ + case bs_name : { \ + return (void*)&BSNameToType< bs_name >::type::AccessBarrier::clone; \ + } \ + break; + FOR_EACH_CONCRETE_BARRIER_SET_DO(BARRIER_SET_RESOLVE_BARRIER_CLOSURE) +#undef BARRIER_SET_RESOLVE_BARRIER_CLOSURE + + default: + fatal("BarrierSet AccessBarrier resolving not implemented"); + return NULL; + }; +} + +template +inline void BarrierSet::AccessBarrier::oop_store(void* addr, oop value) { + Basic::template oop_store(addr, value); +} + +template +inline void BarrierSet::AccessBarrier::oop_store_at(oop base, ptrdiff_t offset, oop value) { + Basic::template oop_store_at((void*)base, offset, value); +} + +template +inline void BarrierSet::AccessBarrier::oop_store_at(nmethod* base, ptrdiff_t offset, oop value) { + Basic::template oop_store_at((void*)base, offset, value); +} + +template +inline void BarrierSet::AccessBarrier::oop_store_at(Klass* base, ptrdiff_t offset, oop value) { + Basic::template oop_store_at((void*)base, offset, value); } -template void BarrierSet::write_ref_field_pre(T* field, oop new_val) { - if (devirtualize_reference_writes()) { - barrier_set_cast(this)->inline_write_ref_field_pre(field, new_val); - } else { - write_ref_field_pre_work(field, new_val); - } -} - -void BarrierSet::write_ref_field(void* field, oop new_val, bool release) { - if (devirtualize_reference_writes()) { - barrier_set_cast(this)->inline_write_ref_field(field, new_val, release); - } else { - write_ref_field_work(field, new_val, release); - } -} - -// count is number of array elements being written -void BarrierSet::write_ref_array(HeapWord* start, size_t count) { - assert(count <= (size_t)max_intx, "count too large"); - HeapWord* end = (HeapWord*)((char*)start + (count*heapOopSize)); - // In the case of compressed oops, start and end may potentially be misaligned; - // so we need to conservatively align the first downward (this is not - // strictly necessary for current uses, but a case of good hygiene and, - // if you will, aesthetics) and the second upward (this is essential for - // current uses) to a HeapWord boundary, so we mark all cards overlapping - // this write. If this evolves in the future to calling a - // logging barrier of narrow oop granularity, like the pre-barrier for G1 - // (mentioned here merely by way of example), we will need to change this - // interface, so it is "exactly precise" (if i may be allowed the adverbial - // redundancy for emphasis) and does not include narrow oop slots not - // included in the original write interval. - HeapWord* aligned_start = (HeapWord*)align_size_down((uintptr_t)start, HeapWordSize); - HeapWord* aligned_end = (HeapWord*)align_size_up ((uintptr_t)end, HeapWordSize); - // If compressed oops were not being used, these should already be aligned - assert(UseCompressedOops || (aligned_start == start && aligned_end == end), - "Expected heap word alignment of start and end"); - write_ref_array_work(MemRegion(aligned_start, aligned_end)); +template +inline oop BarrierSet::AccessBarrier::oop_load(void* addr) { + return Basic::template oop_load(addr); } +template +inline oop BarrierSet::AccessBarrier::oop_load_at(oop base, ptrdiff_t offset) { + return Basic::template oop_load_at((void*)base, offset); +} + +template +inline oop BarrierSet::AccessBarrier::oop_cas(oop new_value, void* addr, oop compare_value) { + return Basic::template oop_cas(new_value, addr, compare_value); +} + +template +inline oop BarrierSet::AccessBarrier::oop_cas_at(oop new_value, oop base, ptrdiff_t offset, oop compare_value) { + return Basic::template oop_cas_at(new_value, (void*)base, offset, compare_value); +} + +template +inline oop BarrierSet::AccessBarrier::oop_swap(oop new_value, void* addr) { + return Basic::template oop_swap(new_value, addr); +} -inline void BarrierSet::write_region(MemRegion mr) { - if (devirtualize_reference_writes()) { - barrier_set_cast(this)->inline_write_region(mr); - } else { - write_region_work(mr); - } +template +inline oop BarrierSet::AccessBarrier::oop_swap_at(oop new_value, oop base, ptrdiff_t offset) { + return Basic::template oop_swap_at(new_value, (void*)base, offset); } #endif // SHARE_VM_GC_SHARED_BARRIERSET_INLINE_HPP