1 /*
   2  * Copyright (c) 2017, 2019, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  */
  23 
  24 #include "precompiled.hpp"
  25 #include "classfile/classLoaderData.hpp"
  26 #include "classfile/classLoaderDataGraph.hpp"
  27 #include "gc/z/zAddress.inline.hpp"
  28 #include "gc/z/zBarrier.inline.hpp"
  29 #include "gc/z/zGlobals.hpp"
  30 #include "gc/z/zGranuleMap.inline.hpp"
  31 #include "gc/z/zHeapIterator.hpp"
  32 #include "gc/z/zOop.inline.hpp"
  33 #include "gc/z/zRootsIterator.hpp"
  34 #include "gc/z/zStat.hpp"
  35 #include "memory/iterator.inline.hpp"
  36 #include "utilities/bitMap.inline.hpp"
  37 #include "utilities/stack.inline.hpp"
  38 
  39 class ZHeapIteratorBitMap : public CHeapObj<mtGC> {
  40 private:
  41   CHeapBitMap _map;
  42 
  43 public:
  44   ZHeapIteratorBitMap(size_t size_in_bits) :
  45       _map(size_in_bits) {}
  46 
  47   bool try_set_bit(size_t index) {
  48     if (_map.at(index)) {
  49       return false;
  50     }
  51 
  52     _map.set_bit(index);
  53     return true;
  54   }
  55 };
  56 
  57 template <bool Concurrent, bool Weak>
  58 class ZHeapIteratorRootOopClosure : public ZRootsIteratorClosure {
  59 private:
  60   ZHeapIterator* const _iter;
  61 
  62   oop load_oop(oop* p) {
  63     if (Weak) {
  64       return NativeAccess<AS_NO_KEEPALIVE | ON_PHANTOM_OOP_REF>::oop_load(p);
  65     }
  66 
  67     if (Concurrent) {
  68       return NativeAccess<AS_NO_KEEPALIVE>::oop_load(p);
  69     }
  70 
  71     return RawAccess<>::oop_load(p);
  72   }
  73 
  74 public:
  75   ZHeapIteratorRootOopClosure(ZHeapIterator* iter) :
  76       _iter(iter) {}
  77 
  78   virtual void do_oop(oop* p) {
  79     const oop obj = load_oop(p);
  80     _iter->push(obj);
  81   }
  82 
  83   virtual void do_oop(narrowOop* p) {
  84     ShouldNotReachHere();
  85   }
  86 };
  87 
  88 template <bool VisitReferents>
  89 class ZHeapIteratorOopClosure : public ClaimMetadataVisitingOopIterateClosure {
  90 private:
  91   ZHeapIterator* const _iter;
  92   const oop            _base;
  93 
  94   oop load_oop(oop* p) {
  95     if (VisitReferents) {
  96       return HeapAccess<AS_NO_KEEPALIVE | ON_UNKNOWN_OOP_REF>::oop_load_at(_base, _base->field_offset(p));
  97     }
  98 
  99     return HeapAccess<AS_NO_KEEPALIVE>::oop_load(p);
 100   }
 101 
 102 public:
 103   ZHeapIteratorOopClosure(ZHeapIterator* iter, oop base) :
 104       ClaimMetadataVisitingOopIterateClosure(ClassLoaderData::_claim_other),
 105       _iter(iter),
 106       _base(base) {}
 107 
 108   virtual ReferenceIterationMode reference_iteration_mode() {
 109     return VisitReferents ? DO_FIELDS : DO_FIELDS_EXCEPT_REFERENT;
 110   }
 111 
 112   virtual void do_oop(oop* p) {
 113     const oop obj = load_oop(p);
 114     _iter->push(obj);
 115   }
 116 
 117   virtual void do_oop(narrowOop* p) {
 118     ShouldNotReachHere();
 119   }
 120 
 121 #ifdef ASSERT
 122   virtual bool should_verify_oops() {
 123     return false;
 124   }
 125 #endif
 126 };
 127 
 128 ZHeapIterator::ZHeapIterator() :
 129     _visit_stack(),
 130     _visit_map(ZAddressOffsetMax) {}
 131 
 132 ZHeapIterator::~ZHeapIterator() {
 133   ZVisitMapIterator iter(&_visit_map);
 134   for (ZHeapIteratorBitMap* map; iter.next(&map);) {
 135     delete map;
 136   }
 137   ClassLoaderDataGraph::clear_claimed_marks(ClassLoaderData::_claim_other);
 138 }
 139 
 140 static size_t object_index_max() {
 141   return ZGranuleSize >> ZObjectAlignmentSmallShift;
 142 }
 143 
 144 static size_t object_index(oop obj) {
 145   const uintptr_t addr = ZOop::to_address(obj);
 146   const uintptr_t offset = ZAddress::offset(addr);
 147   const uintptr_t mask = ZGranuleSize - 1;
 148   return (offset & mask) >> ZObjectAlignmentSmallShift;
 149 }
 150 
 151 ZHeapIteratorBitMap* ZHeapIterator::object_map(oop obj) {
 152   const uintptr_t offset = ZAddress::offset(ZOop::to_address(obj));
 153   ZHeapIteratorBitMap* map = _visit_map.get(offset);
 154   if (map == NULL) {
 155     map = new ZHeapIteratorBitMap(object_index_max());
 156     _visit_map.put(offset, map);
 157   }
 158 
 159   return map;
 160 }
 161 
 162 void ZHeapIterator::push(oop obj) {
 163   if (obj == NULL) {
 164     // Ignore
 165     return;
 166   }
 167 
 168   ZHeapIteratorBitMap* const map = object_map(obj);
 169   const size_t index = object_index(obj);
 170   if (!map->try_set_bit(index)) {
 171     // Already pushed
 172     return;
 173   }
 174 
 175   // Push
 176   _visit_stack.push(obj);
 177 }
 178 
 179 template <typename RootsIterator, bool Concurrent, bool Weak>
 180 void ZHeapIterator::push_roots() {
 181   ZHeapIteratorRootOopClosure<Concurrent, Weak> cl(this);
 182   RootsIterator roots;
 183   roots.oops_do(&cl);
 184 }
 185 
 186 template <bool VisitReferents>
 187 void ZHeapIterator::push_fields(oop obj) {
 188   ZHeapIteratorOopClosure<VisitReferents> cl(this, obj);
 189   obj->oop_iterate(&cl);
 190 }
 191 
 192 template <bool VisitWeaks>
 193 void ZHeapIterator::objects_do(ObjectClosure* cl) {
 194   ZStatTimerDisable disable;
 195 
 196   // Push roots to visit
 197   push_roots<ZRootsIterator,                     false /* Concurrent */, false /* Weak */>();
 198   push_roots<ZConcurrentRootsIteratorClaimOther, true  /* Concurrent */, false /* Weak */>();
 199   if (VisitWeaks) {
 200     push_roots<ZWeakRootsIterator,           false /* Concurrent */, true  /* Weak */>();
 201     push_roots<ZConcurrentWeakRootsIterator, true  /* Concurrent */, true  /* Weak */>();
 202   }
 203 
 204   // Drain stack
 205   while (!_visit_stack.is_empty()) {
 206     const oop obj = _visit_stack.pop();
 207 
 208     // Visit object
 209     cl->do_object(obj);
 210 
 211     // Push fields to visit
 212     push_fields<VisitWeaks>(obj);
 213   }
 214 }
 215 
 216 void ZHeapIterator::objects_do(ObjectClosure* cl, bool visit_weaks) {
 217   if (visit_weaks) {
 218     objects_do<true /* VisitWeaks */>(cl);
 219   } else {
 220     objects_do<false /* VisitWeaks */>(cl);
 221   }
 222 }