1 /* 2 * Copyright (c) 2015, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_VM_OOPS_INSTANCEREFKLASS_INLINE_HPP 26 #define SHARE_VM_OOPS_INSTANCEREFKLASS_INLINE_HPP 27 28 #include "classfile/javaClasses.hpp" 29 #include "gc/shared/referenceProcessor.hpp" 30 #include "oops/instanceKlass.inline.hpp" 31 #include "oops/instanceRefKlass.hpp" 32 #include "oops/oop.inline.hpp" 33 #include "utilities/debug.hpp" 34 #include "utilities/globalDefinitions.hpp" 35 #include "utilities/macros.hpp" 36 37 template <bool nv, typename T, class OopClosureType, class Contains> 38 void InstanceRefKlass::oop_oop_iterate_ref_processing_specialized(oop obj, OopClosureType* closure, Contains& contains) { 39 T* disc_addr = (T*)java_lang_ref_Reference::discovered_addr(obj); 40 if (closure->apply_to_weak_ref_discovered_field()) { 41 Devirtualizer<nv>::do_oop(closure, disc_addr); 42 } 43 44 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj); 45 T heap_oop = oopDesc::load_heap_oop(referent_addr); 46 ReferenceProcessor* rp = closure->_ref_processor; 47 if (!oopDesc::is_null(heap_oop)) { 48 oop referent = oopDesc::decode_heap_oop_not_null(heap_oop); 49 if (UseShenandoahGC) { 50 referent = ShenandoahBarrierSet::resolve_and_update_oop_static(referent_addr, referent); 51 } 52 if ((UseShenandoahGC || !referent->is_gc_marked()) && (rp != NULL) && 53 rp->discover_reference(obj, reference_type())) { 54 return; 55 } else if (contains(referent_addr)) { 56 // treat referent as normal oop 57 Devirtualizer<nv>::do_oop(closure, referent_addr); 58 } 59 } 60 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj); 61 T next_oop = oopDesc::load_heap_oop(next_addr); 62 // Treat discovered as normal oop, if ref is not "active" (next non-NULL) 63 if (!oopDesc::is_null(next_oop) && contains(disc_addr)) { 64 // i.e. ref is not "active" 65 debug_only( 66 if(TraceReferenceGC && PrintGCDetails) { 67 gclog_or_tty->print_cr(" Process discovered as normal " 68 PTR_FORMAT, p2i(disc_addr)); 69 } 70 ) 71 Devirtualizer<nv>::do_oop(closure, disc_addr); 72 } 73 // treat next as normal oop 74 if (contains(next_addr)) { 75 Devirtualizer<nv>::do_oop(closure, next_addr); 76 } 77 } 78 79 class AlwaysContains { 80 public: 81 template <typename T> bool operator()(T* p) const { return true; } 82 }; 83 84 template <bool nv, class OopClosureType> 85 void InstanceRefKlass::oop_oop_iterate_ref_processing(oop obj, OopClosureType* closure) { 86 AlwaysContains always_contains; 87 if (UseCompressedOops) { 88 oop_oop_iterate_ref_processing_specialized<nv, narrowOop>(obj, closure, always_contains); 89 } else { 90 oop_oop_iterate_ref_processing_specialized<nv, oop>(obj, closure, always_contains); 91 } 92 } 93 94 class MrContains { 95 const MemRegion _mr; 96 public: 97 MrContains(MemRegion mr) : _mr(mr) {} 98 template <typename T> bool operator()(T* p) const { return _mr.contains(p); } 99 }; 100 101 template <bool nv, class OopClosureType> 102 void InstanceRefKlass::oop_oop_iterate_ref_processing_bounded(oop obj, OopClosureType* closure, MemRegion mr) { 103 const MrContains contains(mr); 104 if (UseCompressedOops) { 105 oop_oop_iterate_ref_processing_specialized<nv, narrowOop>(obj, closure, contains); 106 } else { 107 oop_oop_iterate_ref_processing_specialized<nv, oop>(obj, closure, contains); 108 } 109 } 110 111 template <bool nv, class OopClosureType> 112 void InstanceRefKlass::oop_oop_iterate(oop obj, OopClosureType* closure) { 113 InstanceKlass::oop_oop_iterate<nv>(obj, closure); 114 115 oop_oop_iterate_ref_processing<nv>(obj, closure); 116 } 117 118 #if INCLUDE_ALL_GCS 119 template <bool nv, class OopClosureType> 120 void InstanceRefKlass::oop_oop_iterate_reverse(oop obj, OopClosureType* closure) { 121 InstanceKlass::oop_oop_iterate_reverse<nv>(obj, closure); 122 123 oop_oop_iterate_ref_processing<nv>(obj, closure); 124 } 125 #endif // INCLUDE_ALL_GCS 126 127 128 template <bool nv, class OopClosureType> 129 void InstanceRefKlass::oop_oop_iterate_bounded(oop obj, OopClosureType* closure, MemRegion mr) { 130 InstanceKlass::oop_oop_iterate_bounded<nv>(obj, closure, mr); 131 132 oop_oop_iterate_ref_processing_bounded<nv>(obj, closure, mr); 133 } 134 135 // Macro to define InstanceRefKlass::oop_oop_iterate for virtual/nonvirtual for 136 // all closures. Macros calling macros above for each oop size. 137 #define ALL_INSTANCE_REF_KLASS_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix) \ 138 OOP_OOP_ITERATE_DEFN( InstanceRefKlass, OopClosureType, nv_suffix) \ 139 OOP_OOP_ITERATE_DEFN_BOUNDED( InstanceRefKlass, OopClosureType, nv_suffix) \ 140 OOP_OOP_ITERATE_DEFN_BACKWARDS(InstanceRefKlass, OopClosureType, nv_suffix) 141 142 #endif // SHARE_VM_OOPS_INSTANCEREFKLASS_INLINE_HPP