< prev index next >

src/hotspot/share/gc/shenandoah/shenandoahFreeSet.cpp

Print this page
rev 50076 : Fold Partial GC into Traversal GC


   8  * This code is distributed in the hope that it will be useful, but WITHOUT
   9  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  10  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  11  * version 2 for more details (a copy is included in the LICENSE file that
  12  * accompanied this code).
  13  *
  14  * You should have received a copy of the GNU General Public License version
  15  * 2 along with this work; if not, write to the Free Software Foundation,
  16  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  17  *
  18  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  19  * or visit www.oracle.com if you need additional information or have any
  20  * questions.
  21  *
  22  */
  23 
  24 #include "precompiled.hpp"
  25 #include "logging/logStream.hpp"
  26 #include "gc/shenandoah/shenandoahFreeSet.hpp"
  27 #include "gc/shenandoah/shenandoahHeap.inline.hpp"


  28 
  29 ShenandoahFreeSet::ShenandoahFreeSet(ShenandoahHeap* heap, size_t max_regions) :
  30   _heap(heap),
  31   _mutator_free_bitmap(max_regions, mtGC),
  32   _collector_free_bitmap(max_regions, mtGC),
  33   _max(max_regions)
  34 {
  35   clear_internal();
  36 }
  37 
  38 void ShenandoahFreeSet::increase_used(size_t num_bytes) {
  39   assert_heaplock_owned_by_current_thread();
  40   _used += num_bytes;
  41 
  42   assert(_used <= _capacity, "must not use more than we have: used: "SIZE_FORMAT
  43          ", capacity: "SIZE_FORMAT", num_bytes: "SIZE_FORMAT, _used, _capacity, num_bytes);
  44 }
  45 
  46 bool ShenandoahFreeSet::is_mutator_free(size_t idx) const {
  47   assert (idx < _max, "index is sane: " SIZE_FORMAT " < " SIZE_FORMAT " (left: " SIZE_FORMAT ", right: " SIZE_FORMAT ")",


 166   assert (!has_no_alloc_capacity(r), "Performance: should avoid full regions on this path: " SIZE_FORMAT, r->region_number());
 167 
 168   try_recycle_trashed(r);
 169 
 170   in_new_region = r->is_empty();
 171 
 172   HeapWord* result = r->allocate(word_size, type);
 173 
 174   if (result != NULL) {
 175     // Allocation successful, bump live data stats:
 176     r->increase_live_data_alloc_words(word_size);
 177     increase_used(word_size * HeapWordSize);
 178 
 179     switch (type) {
 180       case ShenandoahHeap::_alloc_gclab:
 181       case ShenandoahHeap::_alloc_shared_gc:
 182         if (_heap->is_concurrent_traversal_in_progress()) {
 183           // We're updating TAMS for evacuation-allocs, such that we will not
 184           // treat evacuated objects as implicitely live and traverse through them.
 185           // See top of shenandoahTraversal.cpp for an explanation.
 186           _heap->set_next_top_at_mark_start(r->bottom(), r->end());

 187           OrderAccess::fence();
 188         }
 189         break;
 190       case ShenandoahHeap::_alloc_tlab:
 191       case ShenandoahHeap::_alloc_shared:
 192         break;
 193       default:
 194         ShouldNotReachHere();
 195     }
 196   }
 197 
 198   if (result == NULL || has_no_alloc_capacity(r)) {
 199     // Region cannot afford this or future allocations. Retire it.
 200     //
 201     // While this seems a bit harsh, especially in the case when this large allocation does not
 202     // fit, but the next small one would, we are risking to inflate scan times when lots of
 203     // almost-full regions precede the fully-empty region where we want allocate the entire TLAB.
 204     // TODO: Record first fully-empty region, and use that for large allocations
 205 
 206     // Record the remainder as allocation waste




   8  * This code is distributed in the hope that it will be useful, but WITHOUT
   9  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  10  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  11  * version 2 for more details (a copy is included in the LICENSE file that
  12  * accompanied this code).
  13  *
  14  * You should have received a copy of the GNU General Public License version
  15  * 2 along with this work; if not, write to the Free Software Foundation,
  16  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  17  *
  18  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  19  * or visit www.oracle.com if you need additional information or have any
  20  * questions.
  21  *
  22  */
  23 
  24 #include "precompiled.hpp"
  25 #include "logging/logStream.hpp"
  26 #include "gc/shenandoah/shenandoahFreeSet.hpp"
  27 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
  28 #include "gc/shenandoah/shenandoahHeapRegionSet.hpp"
  29 #include "gc/shenandoah/shenandoahTraversalGC.hpp"
  30 
  31 ShenandoahFreeSet::ShenandoahFreeSet(ShenandoahHeap* heap, size_t max_regions) :
  32   _heap(heap),
  33   _mutator_free_bitmap(max_regions, mtGC),
  34   _collector_free_bitmap(max_regions, mtGC),
  35   _max(max_regions)
  36 {
  37   clear_internal();
  38 }
  39 
  40 void ShenandoahFreeSet::increase_used(size_t num_bytes) {
  41   assert_heaplock_owned_by_current_thread();
  42   _used += num_bytes;
  43 
  44   assert(_used <= _capacity, "must not use more than we have: used: "SIZE_FORMAT
  45          ", capacity: "SIZE_FORMAT", num_bytes: "SIZE_FORMAT, _used, _capacity, num_bytes);
  46 }
  47 
  48 bool ShenandoahFreeSet::is_mutator_free(size_t idx) const {
  49   assert (idx < _max, "index is sane: " SIZE_FORMAT " < " SIZE_FORMAT " (left: " SIZE_FORMAT ", right: " SIZE_FORMAT ")",


 168   assert (!has_no_alloc_capacity(r), "Performance: should avoid full regions on this path: " SIZE_FORMAT, r->region_number());
 169 
 170   try_recycle_trashed(r);
 171 
 172   in_new_region = r->is_empty();
 173 
 174   HeapWord* result = r->allocate(word_size, type);
 175 
 176   if (result != NULL) {
 177     // Allocation successful, bump live data stats:
 178     r->increase_live_data_alloc_words(word_size);
 179     increase_used(word_size * HeapWordSize);
 180 
 181     switch (type) {
 182       case ShenandoahHeap::_alloc_gclab:
 183       case ShenandoahHeap::_alloc_shared_gc:
 184         if (_heap->is_concurrent_traversal_in_progress()) {
 185           // We're updating TAMS for evacuation-allocs, such that we will not
 186           // treat evacuated objects as implicitely live and traverse through them.
 187           // See top of shenandoahTraversal.cpp for an explanation.
 188           _heap->set_next_top_at_mark_start(r->bottom(), r->top());
 189           _heap->traversal_gc()->traversal_set()->add_region_check_for_duplicates(r);
 190           OrderAccess::fence();
 191         }
 192         break;
 193       case ShenandoahHeap::_alloc_tlab:
 194       case ShenandoahHeap::_alloc_shared:
 195         break;
 196       default:
 197         ShouldNotReachHere();
 198     }
 199   }
 200 
 201   if (result == NULL || has_no_alloc_capacity(r)) {
 202     // Region cannot afford this or future allocations. Retire it.
 203     //
 204     // While this seems a bit harsh, especially in the case when this large allocation does not
 205     // fit, but the next small one would, we are risking to inflate scan times when lots of
 206     // almost-full regions precede the fully-empty region where we want allocate the entire TLAB.
 207     // TODO: Record first fully-empty region, and use that for large allocations
 208 
 209     // Record the remainder as allocation waste


< prev index next >