< prev index next >

src/share/vm/gc/shenandoah/shenandoahHeap.cpp

Print this page




 179 
 180   size_t i = 0;
 181   for (i = 0; i < _num_regions; i++) {
 182     _in_cset_fast_test_base[i] = false; // Not in cset
 183     HeapWord* bottom = (HeapWord*) pgc_rs.base() + regionSizeWords * i;
 184     _complete_top_at_mark_starts_base[i] = bottom;
 185     _next_top_at_mark_starts_base[i] = bottom;
 186   }
 187 
 188   {
 189     ShenandoahHeapLock lock(this);
 190     for (i = 0; i < _num_regions; i++) {
 191       ShenandoahHeapRegion* current = new ShenandoahHeapRegion(this, (HeapWord*) pgc_rs.base() +
 192                                                                regionSizeWords * i, regionSizeWords, i);
 193       _free_regions->add_region(current);
 194       _ordered_regions->add_region(current);
 195     }
 196   }
 197   assert(((size_t) _ordered_regions->active_regions()) == _num_regions, "");
 198   _first_region = _ordered_regions->get(0);
 199   _first_region_bottom = _first_region->bottom();
 200   assert((((size_t) _first_region_bottom) &
 201           (ShenandoahHeapRegion::region_size_bytes() - 1)) == 0,
 202          "misaligned heap: "PTR_FORMAT, p2i(_first_region_bottom));
 203 
 204   if (log_is_enabled(Trace, gc, region)) {
 205     ResourceMark rm;
 206     outputStream* out = Log(gc, region)::trace_stream();
 207     log_trace(gc, region)("All Regions");
 208     _ordered_regions->print(out);
 209     log_trace(gc, region)("Free Regions");
 210     _free_regions->print(out);
 211   }
 212 
 213   // The call below uses stuff (the SATB* things) that are in G1, but probably
 214   // belong into a shared location.
 215   JavaThread::satb_mark_queue_set().initialize(SATB_Q_CBL_mon,
 216                                                SATB_Q_FL_lock,
 217                                                20 /*G1SATBProcessCompletedThreshold */,
 218                                                Shared_SATB_Q_lock);
 219 
 220   // Reserve space for prev and next bitmap.
 221   _bitmap_size = CMBitMap::compute_size(heap_rs.size());
 222   _heap_region = MemRegion((HeapWord*) heap_rs.base(), heap_rs.size() / HeapWordSize);


 514 }
 515 
 516 bool ShenandoahHeap::is_maximal_no_gc() const {
 517   Unimplemented();
 518   return true;
 519 }
 520 
 521 size_t ShenandoahHeap::max_capacity() const {
 522   return _max_regions * ShenandoahHeapRegion::region_size_bytes();
 523 }
 524 
 525 size_t ShenandoahHeap::min_capacity() const {
 526   return _initialSize;
 527 }
 528 
 529 VirtualSpace* ShenandoahHeap::storage() const {
 530   return (VirtualSpace*) &_storage;
 531 }
 532 
 533 bool ShenandoahHeap::is_in(const void* p) const {
 534   HeapWord* first_region_bottom = _first_region->bottom();
 535   HeapWord* last_region_end = first_region_bottom + (ShenandoahHeapRegion::region_size_bytes() / HeapWordSize) * _num_regions;
 536   return p >= _first_region_bottom && p < last_region_end;
 537 }
 538 
 539 bool ShenandoahHeap::is_scavengable(const void* p) {
 540   return true;
 541 }
 542 
 543 HeapWord* ShenandoahHeap::allocate_from_gclab_slow(Thread* thread, size_t size) {
 544   // Retain tlab and allocate object in shared space if
 545   // the amount free in the tlab is too large to discard.
 546   if (thread->gclab().free() > thread->gclab().refill_waste_limit()) {
 547     thread->gclab().record_slow_allocation(size);
 548     return NULL;
 549   }
 550 
 551   // Discard gclab and allocate a new one.
 552   // To minimize fragmentation, the last GCLAB may be smaller than the rest.
 553   size_t new_gclab_size = thread->gclab().compute_size(size);
 554 
 555   thread->gclab().clear_before_allocation();
 556 


2045     log_warning(gc)("OOM during evacuation. Let Java thread wait until evacuation finishes.");
2046     while (_evacuation_in_progress) { // wait.
2047       Thread::current()->_ParkEvent->park(1);
2048     }
2049   }
2050 
2051 }
2052 
2053 HeapWord* ShenandoahHeap::tlab_post_allocation_setup(HeapWord* obj) {
2054   // Initialize Brooks pointer for the next object
2055   HeapWord* result = obj + BrooksPointer::word_size();
2056   BrooksPointer::initialize(oop(result));
2057   return result;
2058 }
2059 
2060 uint ShenandoahHeap::oop_extra_words() {
2061   return BrooksPointer::word_size();
2062 }
2063 
2064 void ShenandoahHeap::grow_heap_by(size_t num_regions) {
2065   size_t base = _num_regions;
2066   ensure_new_regions(num_regions);
2067   for (size_t i = 0; i < num_regions; i++) {
2068     size_t new_region_index = i + base;
2069     HeapWord* start = _first_region_bottom + (ShenandoahHeapRegion::region_size_bytes() / HeapWordSize) * new_region_index;
2070     ShenandoahHeapRegion* new_region = new ShenandoahHeapRegion(this, start, ShenandoahHeapRegion::region_size_bytes() / HeapWordSize, new_region_index);
2071 
2072     if (log_is_enabled(Trace, gc, region)) {
2073       ResourceMark rm;
2074       outputStream* out = Log(gc, region)::trace_stream();
2075       out->print_cr("allocating new region at index: "SIZE_FORMAT, new_region_index);
2076       new_region->print_on(out);
2077     }
2078 
2079     assert(_ordered_regions->active_regions() == new_region->region_number(), "must match");
2080     _ordered_regions->add_region(new_region);
2081     _in_cset_fast_test_base[new_region_index] = false; // Not in cset
2082     _next_top_at_mark_starts_base[new_region_index] = new_region->bottom();
2083     _complete_top_at_mark_starts_base[new_region_index] = new_region->bottom();
2084 
2085     _free_regions->add_region(new_region);
2086   }
2087 }
2088 
2089 void ShenandoahHeap::ensure_new_regions(size_t new_regions) {




 179 
 180   size_t i = 0;
 181   for (i = 0; i < _num_regions; i++) {
 182     _in_cset_fast_test_base[i] = false; // Not in cset
 183     HeapWord* bottom = (HeapWord*) pgc_rs.base() + regionSizeWords * i;
 184     _complete_top_at_mark_starts_base[i] = bottom;
 185     _next_top_at_mark_starts_base[i] = bottom;
 186   }
 187 
 188   {
 189     ShenandoahHeapLock lock(this);
 190     for (i = 0; i < _num_regions; i++) {
 191       ShenandoahHeapRegion* current = new ShenandoahHeapRegion(this, (HeapWord*) pgc_rs.base() +
 192                                                                regionSizeWords * i, regionSizeWords, i);
 193       _free_regions->add_region(current);
 194       _ordered_regions->add_region(current);
 195     }
 196   }
 197   assert(((size_t) _ordered_regions->active_regions()) == _num_regions, "");
 198   _first_region = _ordered_regions->get(0);
 199   assert((((size_t) base()) &

 200           (ShenandoahHeapRegion::region_size_bytes() - 1)) == 0,
 201          "misaligned heap: "PTR_FORMAT, p2i(base()));
 202 
 203   if (log_is_enabled(Trace, gc, region)) {
 204     ResourceMark rm;
 205     outputStream* out = Log(gc, region)::trace_stream();
 206     log_trace(gc, region)("All Regions");
 207     _ordered_regions->print(out);
 208     log_trace(gc, region)("Free Regions");
 209     _free_regions->print(out);
 210   }
 211 
 212   // The call below uses stuff (the SATB* things) that are in G1, but probably
 213   // belong into a shared location.
 214   JavaThread::satb_mark_queue_set().initialize(SATB_Q_CBL_mon,
 215                                                SATB_Q_FL_lock,
 216                                                20 /*G1SATBProcessCompletedThreshold */,
 217                                                Shared_SATB_Q_lock);
 218 
 219   // Reserve space for prev and next bitmap.
 220   _bitmap_size = CMBitMap::compute_size(heap_rs.size());
 221   _heap_region = MemRegion((HeapWord*) heap_rs.base(), heap_rs.size() / HeapWordSize);


 513 }
 514 
 515 bool ShenandoahHeap::is_maximal_no_gc() const {
 516   Unimplemented();
 517   return true;
 518 }
 519 
 520 size_t ShenandoahHeap::max_capacity() const {
 521   return _max_regions * ShenandoahHeapRegion::region_size_bytes();
 522 }
 523 
 524 size_t ShenandoahHeap::min_capacity() const {
 525   return _initialSize;
 526 }
 527 
 528 VirtualSpace* ShenandoahHeap::storage() const {
 529   return (VirtualSpace*) &_storage;
 530 }
 531 
 532 bool ShenandoahHeap::is_in(const void* p) const {
 533   HeapWord* heap_base = (HeapWord*) base();
 534   HeapWord* last_region_end = heap_base + (ShenandoahHeapRegion::region_size_bytes() / HeapWordSize) * _num_regions;
 535   return p >= heap_base && p < last_region_end;
 536 }
 537 
 538 bool ShenandoahHeap::is_scavengable(const void* p) {
 539   return true;
 540 }
 541 
 542 HeapWord* ShenandoahHeap::allocate_from_gclab_slow(Thread* thread, size_t size) {
 543   // Retain tlab and allocate object in shared space if
 544   // the amount free in the tlab is too large to discard.
 545   if (thread->gclab().free() > thread->gclab().refill_waste_limit()) {
 546     thread->gclab().record_slow_allocation(size);
 547     return NULL;
 548   }
 549 
 550   // Discard gclab and allocate a new one.
 551   // To minimize fragmentation, the last GCLAB may be smaller than the rest.
 552   size_t new_gclab_size = thread->gclab().compute_size(size);
 553 
 554   thread->gclab().clear_before_allocation();
 555 


2044     log_warning(gc)("OOM during evacuation. Let Java thread wait until evacuation finishes.");
2045     while (_evacuation_in_progress) { // wait.
2046       Thread::current()->_ParkEvent->park(1);
2047     }
2048   }
2049 
2050 }
2051 
2052 HeapWord* ShenandoahHeap::tlab_post_allocation_setup(HeapWord* obj) {
2053   // Initialize Brooks pointer for the next object
2054   HeapWord* result = obj + BrooksPointer::word_size();
2055   BrooksPointer::initialize(oop(result));
2056   return result;
2057 }
2058 
2059 uint ShenandoahHeap::oop_extra_words() {
2060   return BrooksPointer::word_size();
2061 }
2062 
2063 void ShenandoahHeap::grow_heap_by(size_t num_regions) {
2064   size_t old_num_regions = _num_regions;
2065   ensure_new_regions(num_regions);
2066   for (size_t i = 0; i < num_regions; i++) {
2067     size_t new_region_index = i + old_num_regions;
2068     HeapWord* start = ((HeapWord*) base()) + (ShenandoahHeapRegion::region_size_bytes() / HeapWordSize) * new_region_index;
2069     ShenandoahHeapRegion* new_region = new ShenandoahHeapRegion(this, start, ShenandoahHeapRegion::region_size_bytes() / HeapWordSize, new_region_index);
2070 
2071     if (log_is_enabled(Trace, gc, region)) {
2072       ResourceMark rm;
2073       outputStream* out = Log(gc, region)::trace_stream();
2074       out->print_cr("allocating new region at index: "SIZE_FORMAT, new_region_index);
2075       new_region->print_on(out);
2076     }
2077 
2078     assert(_ordered_regions->active_regions() == new_region->region_number(), "must match");
2079     _ordered_regions->add_region(new_region);
2080     _in_cset_fast_test_base[new_region_index] = false; // Not in cset
2081     _next_top_at_mark_starts_base[new_region_index] = new_region->bottom();
2082     _complete_top_at_mark_starts_base[new_region_index] = new_region->bottom();
2083 
2084     _free_regions->add_region(new_region);
2085   }
2086 }
2087 
2088 void ShenandoahHeap::ensure_new_regions(size_t new_regions) {


< prev index next >