11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "gc/g1/g1Allocator.inline.hpp"
27 #include "gc/g1/g1AllocRegion.inline.hpp"
28 #include "gc/g1/g1EvacStats.inline.hpp"
29 #include "gc/g1/g1EvacuationInfo.hpp"
30 #include "gc/g1/g1CollectedHeap.inline.hpp"
31 #include "gc/g1/g1Policy.hpp"
32 #include "gc/g1/heapRegion.inline.hpp"
33 #include "gc/g1/heapRegionSet.inline.hpp"
34 #include "gc/g1/heapRegionType.hpp"
35 #include "utilities/align.hpp"
36
37 G1Allocator::G1Allocator(G1CollectedHeap* heap) :
38 _g1h(heap),
39 _survivor_is_full(false),
40 _old_is_full(false),
41 _mutator_alloc_region(),
42 _survivor_gc_alloc_region(heap->alloc_buffer_stats(G1HeapRegionAttr::Young)),
43 _old_gc_alloc_region(heap->alloc_buffer_stats(G1HeapRegionAttr::Old)),
44 _retained_old_gc_alloc_region(NULL) {
45 }
46
47 void G1Allocator::init_mutator_alloc_region() {
48 assert(_mutator_alloc_region.get() == NULL, "pre-condition");
49 _mutator_alloc_region.init();
50 }
51
52 void G1Allocator::release_mutator_alloc_region() {
53 _mutator_alloc_region.release();
54 assert(_mutator_alloc_region.get() == NULL, "post-condition");
55 }
56
57 bool G1Allocator::is_retained_old_region(HeapRegion* hr) {
58 return _retained_old_gc_alloc_region == hr;
59 }
60
61 void G1Allocator::reuse_retained_old_region(G1EvacuationInfo& evacuation_info,
62 OldGCAllocRegion* old,
63 HeapRegion** retained_old) {
64 HeapRegion* retained_region = *retained_old;
65 *retained_old = NULL;
66 assert(retained_region == NULL || !retained_region->is_archive(),
67 "Archive region should not be alloc region (index %u)", retained_region->hrm_index());
68
69 // We will discard the current GC alloc region if:
70 // a) it's in the collection set (it can happen!),
71 // b) it's already full (no point in using it),
72 // c) it's empty (this means that it was emptied during
73 // a cleanup and it should be on the free list now), or
74 // d) it's humongous (this means that it was emptied
129 bool G1Allocator::old_is_full() const {
130 return _old_is_full;
131 }
132
133 void G1Allocator::set_survivor_full() {
134 _survivor_is_full = true;
135 }
136
137 void G1Allocator::set_old_full() {
138 _old_is_full = true;
139 }
140
141 size_t G1Allocator::unsafe_max_tlab_alloc() {
142 // Return the remaining space in the cur alloc region, but not less than
143 // the min TLAB size.
144
145 // Also, this value can be at most the humongous object threshold,
146 // since we can't allow tlabs to grow big enough to accommodate
147 // humongous objects.
148
149 HeapRegion* hr = mutator_alloc_region()->get();
150 size_t max_tlab = _g1h->max_tlab_size() * wordSize;
151 if (hr == NULL) {
152 return max_tlab;
153 } else {
154 return MIN2(MAX2(hr->free(), (size_t) MinTLABSize), max_tlab);
155 }
156 }
157
158 size_t G1Allocator::used_in_alloc_regions() {
159 assert(Heap_lock->owner() != NULL, "Should be owned on this thread's behalf.");
160 return mutator_alloc_region()->used_in_alloc_regions();
161 }
162
163
164 HeapWord* G1Allocator::par_allocate_during_gc(G1HeapRegionAttr dest,
165 size_t word_size) {
166 size_t temp = 0;
167 HeapWord* result = par_allocate_during_gc(dest, word_size, word_size, &temp);
168 assert(result == NULL || temp == word_size,
169 "Requested " SIZE_FORMAT " words, but got " SIZE_FORMAT " at " PTR_FORMAT,
170 word_size, temp, p2i(result));
171 return result;
172 }
173
174 HeapWord* G1Allocator::par_allocate_during_gc(G1HeapRegionAttr dest,
175 size_t min_word_size,
176 size_t desired_word_size,
177 size_t* actual_word_size) {
178 switch (dest.type()) {
179 case G1HeapRegionAttr::Young:
180 return survivor_attempt_allocation(min_word_size, desired_word_size, actual_word_size);
|
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "gc/g1/g1Allocator.inline.hpp"
27 #include "gc/g1/g1AllocRegion.inline.hpp"
28 #include "gc/g1/g1EvacStats.inline.hpp"
29 #include "gc/g1/g1EvacuationInfo.hpp"
30 #include "gc/g1/g1CollectedHeap.inline.hpp"
31 #include "gc/g1/g1NUMA.hpp"
32 #include "gc/g1/g1Policy.hpp"
33 #include "gc/g1/heapRegion.inline.hpp"
34 #include "gc/g1/heapRegionSet.inline.hpp"
35 #include "gc/g1/heapRegionType.hpp"
36 #include "utilities/align.hpp"
37
38 G1Allocator::G1Allocator(G1CollectedHeap* heap) :
39 _g1h(heap),
40 _survivor_is_full(false),
41 _old_is_full(false),
42 _num_alloc_regions(heap->mem_node_mgr()->num_active_nodes()),
43 _mutator_alloc_regions(NULL),
44 _survivor_gc_alloc_region(heap->alloc_buffer_stats(G1HeapRegionAttr::Young)),
45 _old_gc_alloc_region(heap->alloc_buffer_stats(G1HeapRegionAttr::Old)),
46 _retained_old_gc_alloc_region(NULL) {
47
48 _mutator_alloc_regions = NEW_C_HEAP_ARRAY(MutatorAllocRegion, _num_alloc_regions, mtGC);
49 for (uint i = 0; i < _num_alloc_regions; i++) {
50 ::new(_mutator_alloc_regions + i) MutatorAllocRegion(i);
51 }
52 }
53
54 G1Allocator::~G1Allocator() {
55 for (uint i = 0; i < _num_alloc_regions; i++) {
56 _mutator_alloc_regions[i].~MutatorAllocRegion();
57 }
58 FREE_C_HEAP_ARRAY(MutatorAllocRegion, _mutator_alloc_regions);
59 }
60
61 #ifdef ASSERT
62 bool G1Allocator::has_mutator_alloc_region() {
63 uint node_index = current_node_index();
64 return mutator_alloc_region(node_index)->get() != NULL;
65 }
66 #endif
67
68 void G1Allocator::init_mutator_alloc_regions() {
69 for (uint i = 0; i < _num_alloc_regions; i++) {
70 assert(mutator_alloc_region(i)->get() == NULL, "pre-condition");
71 mutator_alloc_region(i)->init();
72 }
73 }
74
75 void G1Allocator::release_mutator_alloc_regions() {
76 for (uint i = 0; i < _num_alloc_regions; i++) {
77 mutator_alloc_region(i)->release();
78 assert(mutator_alloc_region(i)->get() == NULL, "post-condition");
79 }
80 }
81
82 bool G1Allocator::is_retained_old_region(HeapRegion* hr) {
83 return _retained_old_gc_alloc_region == hr;
84 }
85
86 void G1Allocator::reuse_retained_old_region(G1EvacuationInfo& evacuation_info,
87 OldGCAllocRegion* old,
88 HeapRegion** retained_old) {
89 HeapRegion* retained_region = *retained_old;
90 *retained_old = NULL;
91 assert(retained_region == NULL || !retained_region->is_archive(),
92 "Archive region should not be alloc region (index %u)", retained_region->hrm_index());
93
94 // We will discard the current GC alloc region if:
95 // a) it's in the collection set (it can happen!),
96 // b) it's already full (no point in using it),
97 // c) it's empty (this means that it was emptied during
98 // a cleanup and it should be on the free list now), or
99 // d) it's humongous (this means that it was emptied
154 bool G1Allocator::old_is_full() const {
155 return _old_is_full;
156 }
157
158 void G1Allocator::set_survivor_full() {
159 _survivor_is_full = true;
160 }
161
162 void G1Allocator::set_old_full() {
163 _old_is_full = true;
164 }
165
166 size_t G1Allocator::unsafe_max_tlab_alloc() {
167 // Return the remaining space in the cur alloc region, but not less than
168 // the min TLAB size.
169
170 // Also, this value can be at most the humongous object threshold,
171 // since we can't allow tlabs to grow big enough to accommodate
172 // humongous objects.
173
174 uint node_index = current_node_index();
175 HeapRegion* hr = mutator_alloc_region(node_index)->get();
176 size_t max_tlab = _g1h->max_tlab_size() * wordSize;
177 if (hr == NULL) {
178 return max_tlab;
179 } else {
180 return MIN2(MAX2(hr->free(), (size_t) MinTLABSize), max_tlab);
181 }
182 }
183
184 size_t G1Allocator::used_in_alloc_regions() {
185 size_t used = 0;
186 for (uint i = 0; i < _num_alloc_regions; i++) {
187 used += mutator_alloc_region(i)->used_in_alloc_regions();
188 }
189 return used;
190 }
191
192
193 HeapWord* G1Allocator::par_allocate_during_gc(G1HeapRegionAttr dest,
194 size_t word_size) {
195 size_t temp = 0;
196 HeapWord* result = par_allocate_during_gc(dest, word_size, word_size, &temp);
197 assert(result == NULL || temp == word_size,
198 "Requested " SIZE_FORMAT " words, but got " SIZE_FORMAT " at " PTR_FORMAT,
199 word_size, temp, p2i(result));
200 return result;
201 }
202
203 HeapWord* G1Allocator::par_allocate_during_gc(G1HeapRegionAttr dest,
204 size_t min_word_size,
205 size_t desired_word_size,
206 size_t* actual_word_size) {
207 switch (dest.type()) {
208 case G1HeapRegionAttr::Young:
209 return survivor_attempt_allocation(min_word_size, desired_word_size, actual_word_size);
|