1 /*
   2  * Copyright (c) 2001, 2019, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #ifndef SHARE_GC_G1_G1OOPCLOSURES_INLINE_HPP
  26 #define SHARE_GC_G1_G1OOPCLOSURES_INLINE_HPP
  27 
  28 #include "gc/g1/g1CollectedHeap.hpp"
  29 #include "gc/g1/g1ConcurrentMark.inline.hpp"
  30 #include "gc/g1/g1OopClosures.hpp"
  31 #include "gc/g1/g1ParScanThreadState.inline.hpp"
  32 #include "gc/g1/g1RemSet.hpp"
  33 #include "gc/g1/heapRegion.inline.hpp"
  34 #include "gc/g1/heapRegionRemSet.hpp"
  35 #include "memory/iterator.inline.hpp"
  36 #include "oops/access.inline.hpp"
  37 #include "oops/compressedOops.inline.hpp"
  38 #include "oops/oopsHierarchy.hpp"
  39 #include "oops/oop.inline.hpp"
  40 #include "runtime/prefetch.inline.hpp"
  41 #include "utilities/align.hpp"
  42 
  43 template <class T>
  44 inline void G1ScanClosureBase::prefetch_and_push(T* p, const oop obj) {
  45   // We're not going to even bother checking whether the object is
  46   // already forwarded or not, as this usually causes an immediate
  47   // stall. We'll try to prefetch the object (for write, given that
  48   // we might need to install the forwarding reference) and we'll
  49   // get back to it when pop it from the queue
  50   Prefetch::write(obj->mark_addr_raw(), 0);
  51   Prefetch::read(obj->mark_addr_raw(), (HeapWordSize*2));
  52 
  53   // slightly paranoid test; I'm trying to catch potential
  54   // problems before we go into push_on_queue to know where the
  55   // problem is coming from
  56   assert((obj == RawAccess<>::oop_load(p)) ||
  57          (obj->is_forwarded() &&
  58          obj->forwardee() == RawAccess<>::oop_load(p)),
  59          "p should still be pointing to obj or to its forwardee");
  60 
  61   _par_scan_state->push_on_queue(p);
  62 }
  63 
  64 template <class T>
  65 inline void G1ScanClosureBase::handle_non_cset_obj_common(G1HeapRegionAttr const region_attr, T* p, oop const obj) {
  66   if (region_attr.is_humongous()) {
  67     _g1h->set_humongous_is_live(obj);
  68   } else if (region_attr.is_optional()) {
  69     _par_scan_state->remember_reference_into_optional_region(p);
  70   }
  71 }
  72 
  73 inline void G1ScanClosureBase::trim_queue_partially() {
  74   _par_scan_state->trim_queue_partially();
  75 }
  76 
  77 template <class T>
  78 inline void G1ScanEvacuatedObjClosure::do_oop_work(T* p) {
  79   T heap_oop = RawAccess<>::oop_load(p);
  80 
  81   if (CompressedOops::is_null(heap_oop)) {
  82     return;
  83   }
  84   oop obj = CompressedOops::decode_not_null(heap_oop);
  85   assert_object_is_in_heap(p, obj);
  86 
  87   const G1HeapRegionAttr region_attr = _g1h->region_attr(obj);
  88   if (region_attr.is_in_cset()) {
  89     prefetch_and_push(p, obj);
  90   } else if (!HeapRegion::is_in_same_region(p, obj)) {
  91     handle_non_cset_obj_common(region_attr, p, obj);
  92     assert(_scanning_in_young != Uninitialized, "Scan location has not been initialized.");
  93     if (_scanning_in_young == True) {
  94       return;
  95     }
  96     _par_scan_state->enqueue_card_if_tracked(region_attr, p, obj);
  97   }
  98 }
  99 
 100 template <class T>
 101 inline void G1CMOopClosure::do_oop_work(T* p) {
 102   _task->deal_with_reference(p);
 103 }
 104 
 105 template <class T>
 106 inline void G1RootRegionScanClosure::do_oop_work(T* p) {
 107   T heap_oop = RawAccess<MO_VOLATILE>::oop_load(p);
 108   if (CompressedOops::is_null(heap_oop)) {
 109     return;
 110   }
 111   oop obj = CompressedOops::decode_not_null(heap_oop);
 112   assert_object_is_in_heap(p, obj);
 113 
 114   _cm->mark_in_next_bitmap(_worker_id, obj);
 115 }
 116 
 117 template <class T>
 118 inline static void check_obj_during_refinement(T* p, oop const obj) {
 119 #ifdef ASSERT
 120   G1CollectedHeap* g1h = G1CollectedHeap::heap();
 121   // can't do because of races
 122   // assert(oopDesc::is_oop_or_null(obj), "expected an oop");
 123   assert(is_object_aligned(obj), "oop must be aligned");
 124   assert(g1h->is_in_reserved(obj), "oop must be in reserved");
 125   assert_object_is_in_heap(p, obj);
 126 
 127   HeapRegion* from = g1h->heap_region_containing(p);
 128 
 129   assert(from != NULL, "from region must be non-NULL");
 130   assert(from->is_in_reserved(p) ||
 131          (from->is_humongous() &&
 132           g1h->heap_region_containing(p)->is_humongous() &&
 133           from->humongous_start_region() == g1h->heap_region_containing(p)->humongous_start_region()),
 134          "p " PTR_FORMAT " is not in the same region %u or part of the correct humongous object starting at region %u.",
 135          p2i(p), from->hrm_index(), from->humongous_start_region()->hrm_index());
 136 #endif // ASSERT
 137 }
 138 
 139 template <class T>
 140 inline void G1ConcurrentRefineOopClosure::do_oop_work(T* p) {
 141   T o = RawAccess<MO_VOLATILE>::oop_load(p);
 142   if (CompressedOops::is_null(o)) {
 143     return;
 144   }
 145   oop obj = CompressedOops::decode_not_null(o);
 146 
 147   check_obj_during_refinement(p, obj);
 148 
 149   if (HeapRegion::is_in_same_region(p, obj)) {
 150     // Normally this closure should only be called with cross-region references.
 151     // But since Java threads are manipulating the references concurrently and we
 152     // reload the values things may have changed.
 153     // Also this check lets slip through references from a humongous continues region
 154     // to its humongous start region, as they are in different regions, and adds a
 155     // remembered set entry. This is benign (apart from memory usage), as we never
 156     // try to either evacuate or eager reclaim humonguous arrays of j.l.O.
 157     return;
 158   }
 159 
 160   HeapRegionRemSet* to_rem_set = _g1h->heap_region_containing(obj)->rem_set();
 161 
 162   assert(to_rem_set != NULL, "Need per-region 'into' remsets.");
 163   if (to_rem_set->is_tracked()) {
 164     to_rem_set->add_reference(p, _worker_id);
 165   }
 166 }
 167 
 168 template <class T>
 169 inline void G1ScanCardClosure::do_oop_work(T* p) {
 170   T o = RawAccess<>::oop_load(p);
 171   if (CompressedOops::is_null(o)) {
 172     return;
 173   }
 174   oop obj = CompressedOops::decode_not_null(o);
 175 
 176   check_obj_during_refinement(p, obj);
 177 
 178   assert(!_g1h->is_in_cset((HeapWord*)p),
 179          "Oop originates from " PTR_FORMAT " (region: %u) which is in the collection set.",
 180          p2i(p), _g1h->addr_to_region((HeapWord*)p));
 181 
 182   const G1HeapRegionAttr region_attr = _g1h->region_attr(obj);
 183   if (region_attr.is_in_cset()) {
 184     // Since the source is always from outside the collection set, here we implicitly know
 185     // that this is a cross-region reference too.
 186     prefetch_and_push(p, obj);
 187   } else if (!HeapRegion::is_in_same_region(p, obj)) {
 188     handle_non_cset_obj_common(region_attr, p, obj);
 189     _par_scan_state->enqueue_card_if_tracked(region_attr, p, obj);
 190   }
 191 }
 192 
 193 template <class T>
 194 inline void G1ScanRSForOptionalClosure::do_oop_work(T* p) {
 195   const G1HeapRegionAttr region_attr = _g1h->region_attr(p);
 196   // Entries in the optional collection set may start to originate from the collection
 197   // set after one or more increments. In this case, previously optional regions
 198   // became actual collection set regions. Filter them out here.
 199   if (region_attr.is_in_cset()) {
 200     return;
 201   }
 202   _scan_cl->do_oop_work(p);
 203   _scan_cl->trim_queue_partially();
 204 }
 205 
 206 void G1ParCopyHelper::do_cld_barrier(oop new_obj) {
 207   if (_g1h->heap_region_containing(new_obj)->is_young()) {
 208     _scanned_cld->record_modified_oops();
 209   }
 210 }
 211 
 212 void G1ParCopyHelper::mark_object(oop obj) {
 213   assert(!_g1h->heap_region_containing(obj)->in_collection_set(), "should not mark objects in the CSet");
 214 
 215   // We know that the object is not moving so it's safe to read its size.
 216   _cm->mark_in_next_bitmap(_worker_id, obj);
 217 }
 218 
 219 void G1ParCopyHelper::trim_queue_partially() {
 220   _par_scan_state->trim_queue_partially();
 221 }
 222 
 223 template <G1Barrier barrier, G1Mark do_mark_object>
 224 template <class T>
 225 void G1ParCopyClosure<barrier, do_mark_object>::do_oop_work(T* p) {
 226   T heap_oop = RawAccess<>::oop_load(p);
 227 
 228   if (CompressedOops::is_null(heap_oop)) {
 229     return;
 230   }
 231 
 232   oop obj = CompressedOops::decode_not_null(heap_oop);
 233 
 234   assert(_worker_id == _par_scan_state->worker_id(), "sanity");
 235 
 236   const G1HeapRegionAttr state = _g1h->region_attr(obj);
 237   if (state.is_in_cset()) {
 238     oop forwardee;
 239     markWord m = obj->mark_raw();
 240     if (m.is_marked()) {
 241       forwardee = (oop) m.decode_pointer();
 242     } else {
 243       forwardee = _par_scan_state->copy_to_survivor_space(state, obj, m);
 244     }
 245     assert(forwardee != NULL, "forwardee should not be NULL");
 246     RawAccess<IS_NOT_NULL>::oop_store(p, forwardee);
 247 
 248     if (barrier == G1BarrierCLD) {
 249       do_cld_barrier(forwardee);
 250     }
 251   } else {
 252     if (state.is_humongous()) {
 253       _g1h->set_humongous_is_live(obj);
 254     } else if (state.is_optional()) {
 255       _par_scan_state->remember_root_into_optional_region(p);
 256     }
 257 
 258     // The object is not in collection set. If we're a root scanning
 259     // closure during an initial mark pause then attempt to mark the object.
 260     if (do_mark_object == G1MarkFromRoot) {
 261       mark_object(obj);
 262     }
 263   }
 264   trim_queue_partially();
 265 }
 266 
 267 template <class T> void G1RebuildRemSetClosure::do_oop_work(T* p) {
 268   oop const obj = RawAccess<MO_VOLATILE>::oop_load(p);
 269   if (obj == NULL) {
 270     return;
 271   }
 272   assert_object_is_in_heap(p, obj);
 273 
 274   if (HeapRegion::is_in_same_region(p, obj)) {
 275     return;
 276   }
 277 
 278   HeapRegion* to = _g1h->heap_region_containing(obj);
 279   HeapRegionRemSet* rem_set = to->rem_set();
 280   rem_set->add_reference(p, _worker_id);
 281 }
 282 
 283 #endif // SHARE_GC_G1_G1OOPCLOSURES_INLINE_HPP