< prev index next >

src/hotspot/share/gc/g1/g1CollectedHeap.cpp

Print this page
rev 52310 : imported patch 8071913-almost-done
rev 52311 : imported patch 8071913-kbarrett-review
rev 52312 : imported patch 6490394-uncommit-at-remark
rev 52313 : imported patch 6490394-sangheon-review

*** 1041,1051 **** // Prepare heap for normal collections. assert(num_free_regions() == 0, "we should not have added any free regions"); rebuild_region_sets(false /* free_list_only */); abort_refinement(); ! resize_if_necessary_after_full_collection(); // Rebuild the strong code root lists for each region rebuild_strong_code_roots(); // Purge code root memory --- 1041,1051 ---- // Prepare heap for normal collections. assert(num_free_regions() == 0, "we should not have added any free regions"); rebuild_region_sets(false /* free_list_only */); abort_refinement(); ! resize_heap_if_necessary(); // Rebuild the strong code root lists for each region rebuild_strong_code_roots(); // Purge code root memory
*** 1147,1157 **** // out by the GC locker). So, right now, we'll ignore the return value. bool dummy = do_full_collection(true, /* explicit_gc */ clear_all_soft_refs); } ! void G1CollectedHeap::resize_if_necessary_after_full_collection() { // Capacity, free and used after the GC counted as full regions to // include the waste in the following calculations. const size_t capacity_after_gc = capacity(); const size_t used_after_gc = capacity_after_gc - unused_committed_regions_in_bytes(); --- 1147,1157 ---- // out by the GC locker). So, right now, we'll ignore the return value. bool dummy = do_full_collection(true, /* explicit_gc */ clear_all_soft_refs); } ! void G1CollectedHeap::resize_heap_if_necessary() { // Capacity, free and used after the GC counted as full regions to // include the waste in the following calculations. const size_t capacity_after_gc = capacity(); const size_t used_after_gc = capacity_after_gc - unused_committed_regions_in_bytes();
*** 1204,1214 **** if (capacity_after_gc < minimum_desired_capacity) { // Don't expand unless it's significant size_t expand_bytes = minimum_desired_capacity - capacity_after_gc; ! log_debug(gc, ergo, heap)("Attempt heap expansion (capacity lower than min desired capacity after Full GC). " "Capacity: " SIZE_FORMAT "B occupancy: " SIZE_FORMAT "B live: " SIZE_FORMAT "B " "min_desired_capacity: " SIZE_FORMAT "B (" UINTX_FORMAT " %%)", capacity_after_gc, used_after_gc, used(), minimum_desired_capacity, MinHeapFreeRatio); expand(expand_bytes, _workers); --- 1204,1214 ---- if (capacity_after_gc < minimum_desired_capacity) { // Don't expand unless it's significant size_t expand_bytes = minimum_desired_capacity - capacity_after_gc; ! log_debug(gc, ergo, heap)("Attempt heap expansion (capacity lower than min desired capacity). " "Capacity: " SIZE_FORMAT "B occupancy: " SIZE_FORMAT "B live: " SIZE_FORMAT "B " "min_desired_capacity: " SIZE_FORMAT "B (" UINTX_FORMAT " %%)", capacity_after_gc, used_after_gc, used(), minimum_desired_capacity, MinHeapFreeRatio); expand(expand_bytes, _workers);
*** 1216,1226 **** // No expansion, now see if we want to shrink } else if (capacity_after_gc > maximum_desired_capacity) { // Capacity too large, compute shrinking size size_t shrink_bytes = capacity_after_gc - maximum_desired_capacity; ! log_debug(gc, ergo, heap)("Attempt heap shrinking (capacity higher than max desired capacity after Full GC). " "Capacity: " SIZE_FORMAT "B occupancy: " SIZE_FORMAT "B live: " SIZE_FORMAT "B " "maximum_desired_capacity: " SIZE_FORMAT "B (" UINTX_FORMAT " %%)", capacity_after_gc, used_after_gc, used(), maximum_desired_capacity, MaxHeapFreeRatio); shrink(shrink_bytes); --- 1216,1226 ---- // No expansion, now see if we want to shrink } else if (capacity_after_gc > maximum_desired_capacity) { // Capacity too large, compute shrinking size size_t shrink_bytes = capacity_after_gc - maximum_desired_capacity; ! log_debug(gc, ergo, heap)("Attempt heap shrinking (capacity higher than max desired capacity). " "Capacity: " SIZE_FORMAT "B occupancy: " SIZE_FORMAT "B live: " SIZE_FORMAT "B " "maximum_desired_capacity: " SIZE_FORMAT "B (" UINTX_FORMAT " %%)", capacity_after_gc, used_after_gc, used(), maximum_desired_capacity, MaxHeapFreeRatio); shrink(shrink_bytes);
*** 1392,1403 **** } void G1CollectedHeap::shrink(size_t shrink_bytes) { _verifier->verify_region_sets_optional(); ! // We should only reach here at the end of a Full GC which means we ! // should not not be holding to any GC alloc regions. The method // below will make sure of that and do any remaining clean up. _allocator->abandon_gc_alloc_regions(); // Instead of tearing down / rebuilding the free lists here, we // could instead use the remove_all_pending() method on free_list to --- 1392,1403 ---- } void G1CollectedHeap::shrink(size_t shrink_bytes) { _verifier->verify_region_sets_optional(); ! // We should only reach here at the end of a Full GC or during Remark which ! // means we should not not be holding to any GC alloc regions. The method // below will make sure of that and do any remaining clean up. _allocator->abandon_gc_alloc_regions(); // Instead of tearing down / rebuilding the free lists here, we // could instead use the remove_all_pending() method on free_list to
*** 4397,4413 **** assert(_old_set->is_empty(), "pre-condition"); } } bool do_heap_region(HeapRegion* r) { - // After full GC, no region should have a remembered set. - r->rem_set()->clear(true); if (r->is_empty()) { // Add free regions to the free list r->set_free(); _hrm->insert_into_free_list(r); } else if (!_free_list_only) { if (r->is_archive() || r->is_humongous()) { // We ignore archive and humongous regions. We left these sets unchanged. } else { assert(r->is_young() || r->is_free() || r->is_old(), "invariant"); --- 4397,4413 ---- assert(_old_set->is_empty(), "pre-condition"); } } bool do_heap_region(HeapRegion* r) { if (r->is_empty()) { + assert(r->rem_set()->is_empty(), "Empty regions should have empty remembered sets."); // Add free regions to the free list r->set_free(); _hrm->insert_into_free_list(r); } else if (!_free_list_only) { + assert(r->rem_set()->is_empty(), "At this point remembered sets must have been cleared."); if (r->is_archive() || r->is_humongous()) { // We ignore archive and humongous regions. We left these sets unchanged. } else { assert(r->is_young() || r->is_free() || r->is_old(), "invariant");
*** 4441,4454 **** set_used(cl.total_used()); if (_archive_allocator != NULL) { _archive_allocator->clear_used(); } } ! assert(used_unlocked() == recalculate_used(), ! "inconsistent used_unlocked(), " ! "value: " SIZE_FORMAT " recalculated: " SIZE_FORMAT, ! used_unlocked(), recalculate_used()); } bool G1CollectedHeap::is_in_closed_subset(const void* p) const { HeapRegion* hr = heap_region_containing(p); return hr->is_in(p); --- 4441,4453 ---- set_used(cl.total_used()); if (_archive_allocator != NULL) { _archive_allocator->clear_used(); } } ! assert(used() == recalculate_used(), ! "inconsistent used(), value: " SIZE_FORMAT " recalculated: " SIZE_FORMAT, ! used(), recalculate_used()); } bool G1CollectedHeap::is_in_closed_subset(const void* p) const { HeapRegion* hr = heap_region_containing(p); return hr->is_in(p);
< prev index next >