1 /*
   2  * Copyright (c) 2016, Red Hat, Inc. and/or its affiliates.
   3  *
   4  * This code is free software; you can redistribute it and/or modify it
   5  * under the terms of the GNU General Public License version 2 only, as
   6  * published by the Free Software Foundation.
   7  *
   8  * This code is distributed in the hope that it will be useful, but WITHOUT
   9  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  10  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  11  * version 2 for more details (a copy is included in the LICENSE file that
  12  * accompanied this code).
  13  *
  14  * You should have received a copy of the GNU General Public License version
  15  * 2 along with this work; if not, write to the Free Software Foundation,
  16  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  17  *
  18  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  19  * or visit www.oracle.com if you need additional information or have any
  20  * questions.
  21  *
  22  */
  23 
  24 #include "precompiled.hpp"
  25 #include "gc/shenandoah/shenandoahFreeSet.hpp"
  26 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
  27 
  28 ShenandoahFreeSet::ShenandoahFreeSet(ShenandoahHeapRegionSet* regions, size_t max_regions) :
  29   _regions(regions),
  30   _free_bitmap(max_regions, mtGC),
  31   _max(max_regions)
  32 {
  33   clear_internal();
  34 }
  35 
  36 void ShenandoahFreeSet::increase_used(size_t num_bytes) {
  37   assert_heaplock_owned_by_current_thread();
  38   _used += num_bytes;
  39 
  40   assert(_used <= _capacity, "must not use more than we have: used: "SIZE_FORMAT
  41          ", capacity: "SIZE_FORMAT", num_bytes: "SIZE_FORMAT, _used, _capacity, num_bytes);
  42 }
  43 
  44 bool ShenandoahFreeSet::is_free(size_t idx) const {
  45   assert (idx < _max, "index is sane: " SIZE_FORMAT " < " SIZE_FORMAT " (left: " SIZE_FORMAT ", right: " SIZE_FORMAT ")",
  46           idx, _max, _leftmost, _rightmost);
  47   return _free_bitmap.at(idx);
  48 }
  49 
  50 HeapWord* ShenandoahFreeSet::allocate_single(size_t word_size, ShenandoahHeap::AllocType type, bool& in_new_region) {
  51   // Scan the bitmap looking for a first fit.
  52   //
  53   // Leftmost and rightmost bounds provide enough caching to walk bitmap efficiently. Normally,
  54   // we would find the region to allocate at right away.
  55   //
  56   // Allocations are biased: new application allocs go to beginning of the heap, and GC allocs
  57   // go to the end. This makes application allocation faster, because we would clear lots
  58   // of regions from the beginning most of the time.
  59 
  60   switch (type) {
  61     case ShenandoahHeap::_alloc_tlab:
  62     case ShenandoahHeap::_alloc_shared: {
  63       for (size_t idx = _leftmost; idx <= _rightmost; idx++) {
  64         if (is_free(idx)) {
  65           HeapWord* result = try_allocate_in(word_size, type, idx, in_new_region);
  66           if (result != NULL) {
  67             return result;
  68           }
  69         }
  70       }
  71       break;
  72     }
  73     case ShenandoahHeap::_alloc_gclab:
  74     case ShenandoahHeap::_alloc_shared_gc: {
  75       for (size_t idx = _rightmost; idx > _leftmost; idx--) {
  76         if (is_free(idx)) {
  77           HeapWord* result = try_allocate_in(word_size, type, idx, in_new_region);
  78           if (result != NULL) {
  79             return result;
  80           }
  81         }
  82       }
  83       break;
  84     }
  85     default:
  86       ShouldNotReachHere();
  87   }
  88 
  89   return NULL;
  90 }
  91 
  92 HeapWord* ShenandoahFreeSet::try_allocate_in(size_t word_size, ShenandoahHeap::AllocType type, size_t idx, bool& in_new_region) {
  93   ShenandoahHeapRegion* r = _regions->get(idx);
  94   in_new_region = r->is_empty();
  95 
  96   HeapWord* result = r->allocate(word_size, type);
  97 
  98   if (result != NULL) {
  99     // Allocation successful, bump live data stats:
 100     if (implicit_live(type)) {
 101       r->increase_live_data_words(word_size);
 102     }
 103     increase_used(word_size * HeapWordSize);
 104     ShenandoahHeap::heap()->increase_used(word_size * HeapWordSize);
 105   } else {
 106     // Region cannot afford allocation. Retire it.
 107     // While this seems a bit harsh, especially in the case when this large allocation does not
 108     // fit, but the next small one would, we are risking to inflate scan times when lots of
 109     // almost-full regions precede the fully-empty region where we want allocate the entire TLAB.
 110     // TODO: Record first fully-empty region, and use that for large allocations
 111     size_t num = r->region_number();
 112     increase_used(r->free());
 113     _free_bitmap.clear_bit(num);
 114 
 115     // Touched the bounds? Need to update:
 116     if (num == _leftmost || num == _rightmost) {
 117       adjust_bounds();
 118     }
 119     assert_bounds();
 120   }
 121   return result;
 122 }
 123 
 124 void ShenandoahFreeSet::adjust_bounds() {
 125   // Rewind both bounds until the next bit.
 126   while (_leftmost < _max && !is_free(_leftmost)) {
 127     _leftmost++;
 128   }
 129   while (_rightmost > 0 && !is_free(_rightmost)) {
 130     _rightmost--;
 131   }
 132 }
 133 
 134 HeapWord* ShenandoahFreeSet::allocate_contiguous(size_t words_size) {
 135   assert_heaplock_owned_by_current_thread();
 136 
 137   size_t num = ShenandoahHeapRegion::required_regions(words_size * HeapWordSize);
 138 
 139   // No regions left to satisfy allocation, bye.
 140   if (num > count()) {
 141     return NULL;
 142   }
 143 
 144   // Find the continuous interval of $num regions, starting from $beg and ending in $end,
 145   // inclusive. Contiguous allocations are biased to the beginning.
 146 
 147   size_t beg = _leftmost;
 148   size_t end = beg;
 149 
 150   while (true) {
 151     if (end >= _max) {
 152       // Hit the end, goodbye
 153       return NULL;
 154     }
 155 
 156     // If regions are not adjacent, then current [beg; end] is useless, and we may fast-forward.
 157     // If region is not empty, the current [beg; end] is useless, and we may fast-forward.
 158     if (!is_free(end) || !_regions->get(end)->is_empty()) {
 159       end++;
 160       beg = end;
 161       continue;
 162     }
 163 
 164     if ((end - beg + 1) == num) {
 165       // found the match
 166       break;
 167     }
 168 
 169     end++;
 170   };
 171 
 172 #ifdef ASSERT
 173   assert ((end - beg + 1) == num, "Found just enough regions");
 174   for (size_t i = beg; i <= end; i++) {
 175     assert(_regions->get(i)->is_empty(), "Should be empty");
 176     assert(i == beg || _regions->get(i-1)->region_number() + 1 == _regions->get(i)->region_number(), "Should be contiguous");
 177   }
 178 #endif
 179 
 180   ShenandoahHeap* sh = ShenandoahHeap::heap();
 181 
 182   // Initialize regions:
 183   for (size_t i = beg; i <= end; i++) {
 184     ShenandoahHeapRegion* r = _regions->get(i);
 185     if (i == beg) {
 186       r->make_humongous_start();
 187     } else {
 188       r->make_humongous_cont();
 189     }
 190 
 191     // Trailing region may be non-full, record the remainder there
 192     size_t remainder = words_size & ShenandoahHeapRegion::region_size_words_mask();
 193     size_t used_words;
 194     if ((i == end) && (remainder != 0)) {
 195       used_words = remainder;
 196     } else {
 197       used_words = ShenandoahHeapRegion::region_size_words();
 198     }
 199 
 200     if (implicit_live(ShenandoahHeap::_alloc_shared)) {
 201       r->increase_live_data_words(used_words);
 202     }
 203     r->set_top(r->bottom() + used_words);
 204     r->reset_alloc_stats_to_shared();
 205     sh->increase_used(used_words * HeapWordSize);
 206 
 207     _free_bitmap.clear_bit(r->region_number());
 208   }
 209 
 210   // While individual regions report their true use, all humongous regions are
 211   // marked used in the free set.
 212   increase_used(ShenandoahHeapRegion::region_size_bytes() * num);
 213 
 214   // Allocated at left/rightmost? Move the bounds appropriately.
 215   if (beg == _leftmost || end == _rightmost) {
 216     adjust_bounds();
 217   }
 218   assert_bounds();
 219 
 220   return _regions->get(beg)->bottom();
 221 }
 222 
 223 void ShenandoahFreeSet::add_region(ShenandoahHeapRegion* r) {
 224   assert_heaplock_owned_by_current_thread();
 225   assert(!r->in_collection_set(), "Shouldn't be adding those to the free set");
 226   assert(r->is_alloc_allowed(), "Should only add regions that can be allocated at");
 227 
 228 #ifdef ASSERT
 229   assert (!is_free(r->region_number()), "We are about to add it, it shouldn't be there already");
 230 #endif
 231   _free_bitmap.set_bit(r->region_number());
 232   _leftmost  = MIN2(_leftmost,  r->region_number());
 233   _rightmost = MAX2(_rightmost, r->region_number());
 234   _capacity += r->free();
 235   assert(_used <= _capacity, "must not use more than we have");
 236 }
 237 
 238 void ShenandoahFreeSet::clear() {
 239   assert_heaplock_owned_by_current_thread();
 240   clear_internal();
 241 }
 242 
 243 void ShenandoahFreeSet::clear_internal() {
 244   _free_bitmap.clear();
 245   _leftmost = _max;
 246   _rightmost = 0;
 247   _capacity = 0;
 248   _used = 0;
 249 }
 250 
 251 HeapWord* ShenandoahFreeSet::allocate(size_t word_size, ShenandoahHeap::AllocType type, bool& in_new_region) {
 252   assert_heaplock_owned_by_current_thread();
 253   assert_bounds();
 254 
 255   // Not enough memory in free region set. Coming out of full GC, it is possible that
 256   // there are no free regions available, so current_index may be invalid. Have to
 257   // poll capacity as the precaution here.
 258   if (word_size * HeapWordSize > capacity()) return NULL;
 259 
 260   if (word_size > ShenandoahHeapRegion::humongous_threshold_words()) {
 261     switch (type) {
 262       case ShenandoahHeap::_alloc_shared:
 263       case ShenandoahHeap::_alloc_shared_gc:
 264         in_new_region = true;
 265         return allocate_large_memory(word_size);
 266       case ShenandoahHeap::_alloc_gclab:
 267       case ShenandoahHeap::_alloc_tlab:
 268         in_new_region = false;
 269         log_warning(gc)("Trying to allocate TLAB larger than the humongous threshold: " SIZE_FORMAT " > " SIZE_FORMAT,
 270                         word_size, ShenandoahHeapRegion::humongous_threshold_words());
 271         return NULL;
 272       default:
 273         ShouldNotReachHere();
 274         return NULL;
 275     }
 276   } else {
 277     return allocate_small_memory(word_size, type, in_new_region);
 278   }
 279 }
 280 
 281 HeapWord* ShenandoahFreeSet::allocate_small_memory(size_t word_size, ShenandoahHeap::AllocType type, bool& in_new_region) {
 282   // Try to allocate right away:
 283   HeapWord* result = allocate_single(word_size, type, in_new_region);
 284 
 285   if (result == NULL) {
 286     // No free regions? Chances are, we have acquired the lock before the recycler.
 287     // Ask allocator to recycle some trash and try to allocate again.
 288     ShenandoahHeap::heap()->recycle_trash_assist(1);
 289     result = allocate_single(word_size, type, in_new_region);
 290   }
 291 
 292   return result;
 293 }
 294 
 295 HeapWord* ShenandoahFreeSet::allocate_large_memory(size_t words) {
 296   assert_heaplock_owned_by_current_thread();
 297 
 298   // Try to allocate right away:
 299   HeapWord* r = allocate_contiguous(words);
 300   if (r != NULL) {
 301     return r;
 302   }
 303 
 304   // Try to recycle up enough regions for this allocation:
 305   ShenandoahHeap::heap()->recycle_trash_assist(ShenandoahHeapRegion::required_regions(words*HeapWordSize));
 306   r = allocate_contiguous(words);
 307   if (r != NULL) {
 308     return r;
 309   }
 310 
 311   // Try to recycle all regions: it is possible we have cleaned up a fragmented block before:
 312   ShenandoahHeap::heap()->recycle_trash_assist(_max);
 313   r = allocate_contiguous(words);
 314   if (r != NULL) {
 315     return r;
 316   }
 317 
 318   return NULL;
 319 }
 320 
 321 size_t ShenandoahFreeSet::unsafe_peek_free() const {
 322   // Deliberately not locked, this method is unsafe when free set is modified.
 323 
 324   for (size_t index = _leftmost; index <= _rightmost; index++) {
 325     if (index < _max && is_free(index)) {
 326       ShenandoahHeapRegion* r = _regions->get(index);
 327       if (r->free() >= MinTLABSize) {
 328         return r->free();
 329       }
 330     }
 331   }
 332 
 333   // It appears that no regions left
 334   return 0;
 335 }
 336 
 337 void ShenandoahFreeSet::print_on(outputStream* out) const {
 338   out->print_cr("Free Set: " SIZE_FORMAT "", count());
 339   for (size_t index = _leftmost; index <= _rightmost; index++) {
 340     if (is_free(index)) {
 341       _regions->get(index)->print_on(out);
 342     }
 343   }
 344 }
 345 
 346 bool ShenandoahFreeSet::implicit_live(ShenandoahHeap::AllocType type) const {
 347   if (heap->shenandoahPolicy()->can_do_traversal_gc()) {
 348     if (heap->is_concurrent_traversal_in_progress()) {
 349       return false;
 350     }
 351     switch (type) {
 352       case ShenandoahHeap::_alloc_tlab:
 353       case ShenandoahHeap::_alloc_shared:
 354         return false;
 355       case ShenandoahHeap::_alloc_gclab:
 356       case ShenandoahHeap::_alloc_shared_gc:
 357         return true;
 358       default:
 359         ShouldNotReachHere();
 360     }
 361   }
 362   return true;
 363 }
 364 
 365 #ifdef ASSERT
 366 void ShenandoahFreeSet::assert_heaplock_owned_by_current_thread() const {
 367   ShenandoahHeap::heap()->assert_heaplock_owned_by_current_thread();
 368 }
 369 
 370 void ShenandoahFreeSet::assert_bounds() const {
 371   // Performance invariants. Failing these would not break the free set, but performance
 372   // would suffer.
 373   assert (_leftmost <= _max, "leftmost in bounds: "  SIZE_FORMAT " < " SIZE_FORMAT, _leftmost,  _max);
 374   assert (_rightmost < _max, "rightmost in bounds: " SIZE_FORMAT " < " SIZE_FORMAT, _rightmost, _max);
 375 
 376   assert (_leftmost == _max || is_free(_leftmost),  "leftmost region should be free: " SIZE_FORMAT,  _leftmost);
 377   assert (_rightmost == 0   || is_free(_rightmost), "rightmost region should be free: " SIZE_FORMAT, _rightmost);
 378 
 379   size_t beg_off = _free_bitmap.get_next_one_offset(0);
 380   size_t end_off = _free_bitmap.get_next_one_offset(_rightmost + 1);
 381   assert (beg_off >= _leftmost, "free regions before the leftmost: " SIZE_FORMAT ", bound " SIZE_FORMAT, beg_off, _leftmost);
 382   assert (end_off == _max,      "free regions past the rightmost: " SIZE_FORMAT ", bound " SIZE_FORMAT,  end_off, _rightmost);
 383 }
 384 #endif