rev 47399 : [mq]: add_ptr
1 /*
2 * Copyright (c) 2005, 2017, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "gc/parallel/parMarkBitMap.inline.hpp"
27 #include "gc/parallel/psCompactionManager.inline.hpp"
28 #include "gc/parallel/psParallelCompact.inline.hpp"
29 #include "oops/oop.inline.hpp"
30 #include "runtime/atomic.hpp"
31 #include "runtime/os.hpp"
32 #include "services/memTracker.hpp"
33 #include "utilities/align.hpp"
34 #include "utilities/bitMap.inline.hpp"
35
36 bool
37 ParMarkBitMap::initialize(MemRegion covered_region)
38 {
39 const idx_t bits = bits_required(covered_region);
40 // The bits will be divided evenly between two bitmaps; each of them should be
41 // an integral number of words.
42 assert(bits % (BitsPerWord * 2) == 0, "region size unaligned");
43
44 const size_t words = bits / BitsPerWord;
45 const size_t raw_bytes = words * sizeof(idx_t);
46 const size_t page_sz = os::page_size_for_region_aligned(raw_bytes, 10);
47 const size_t granularity = os::vm_allocation_granularity();
48 _reserved_byte_size = align_up(raw_bytes, MAX2(page_sz, granularity));
49
50 const size_t rs_align = page_sz == (size_t) os::vm_page_size() ? 0 :
51 MAX2(page_sz, granularity);
52 ReservedSpace rs(_reserved_byte_size, rs_align, rs_align > 0);
53 os::trace_page_sizes("Mark Bitmap", raw_bytes, raw_bytes, page_sz,
54 rs.base(), rs.size());
55
56 MemTracker::record_virtual_memory_type((address)rs.base(), mtGC);
57
58 _virtual_space = new PSVirtualSpace(rs, page_sz);
59 if (_virtual_space != NULL && _virtual_space->expand_by(_reserved_byte_size)) {
60 _region_start = covered_region.start();
61 _region_size = covered_region.word_size();
62 BitMap::bm_word_t* map = (BitMap::bm_word_t*)_virtual_space->reserved_low_addr();
63 _beg_bits = BitMapView(map, bits / 2);
64 _end_bits = BitMapView(map + words / 2, bits / 2);
65 return true;
66 }
67
68 _region_start = 0;
69 _region_size = 0;
70 if (_virtual_space != NULL) {
71 delete _virtual_space;
72 _virtual_space = NULL;
73 // Release memory reserved in the space.
74 rs.release();
75 }
76 return false;
77 }
78
79 #ifdef ASSERT
80 extern size_t mark_bitmap_count;
81 extern size_t mark_bitmap_size;
82 #endif // #ifdef ASSERT
83
84 bool
85 ParMarkBitMap::mark_obj(HeapWord* addr, size_t size)
86 {
87 const idx_t beg_bit = addr_to_bit(addr);
88 if (_beg_bits.par_set_bit(beg_bit)) {
89 const idx_t end_bit = addr_to_bit(addr + size - 1);
90 bool end_bit_ok = _end_bits.par_set_bit(end_bit);
91 assert(end_bit_ok, "concurrency problem");
92 DEBUG_ONLY(Atomic::inc(&mark_bitmap_count));
93 DEBUG_ONLY(Atomic::add_ptr(size, &mark_bitmap_size));
94 return true;
95 }
96 return false;
97 }
98
99 inline bool
100 ParMarkBitMap::is_live_words_in_range_in_cache(ParCompactionManager* cm, HeapWord* beg_addr) const {
101 return cm->last_query_begin() == beg_addr;
102 }
103
104 inline void
105 ParMarkBitMap::update_live_words_in_range_cache(ParCompactionManager* cm, HeapWord* beg_addr, oop end_obj, size_t result) const {
106 cm->set_last_query_begin(beg_addr);
107 cm->set_last_query_object(end_obj);
108 cm->set_last_query_return(result);
109 }
110
111 size_t
112 ParMarkBitMap::live_words_in_range_helper(HeapWord* beg_addr, oop end_obj) const
113 {
114 assert(beg_addr <= (HeapWord*)end_obj, "bad range");
115 assert(is_marked(end_obj), "end_obj must be live");
116
117 idx_t live_bits = 0;
118
119 // The bitmap routines require the right boundary to be word-aligned.
120 const idx_t end_bit = addr_to_bit((HeapWord*)end_obj);
121 const idx_t range_end = BitMap::word_align_up(end_bit);
122
123 idx_t beg_bit = find_obj_beg(addr_to_bit(beg_addr), range_end);
124 while (beg_bit < end_bit) {
125 idx_t tmp_end = find_obj_end(beg_bit, range_end);
126 assert(tmp_end < end_bit, "missing end bit");
127 live_bits += tmp_end - beg_bit + 1;
128 beg_bit = find_obj_beg(tmp_end + 1, range_end);
129 }
130 return bits_to_words(live_bits);
131 }
132
133 size_t
134 ParMarkBitMap::live_words_in_range_use_cache(ParCompactionManager* cm, HeapWord* beg_addr, oop end_obj) const
135 {
136 HeapWord* last_beg = cm->last_query_begin();
137 oop last_obj = cm->last_query_object();
138 size_t last_ret = cm->last_query_return();
139 if (end_obj > last_obj) {
140 last_ret = last_ret + live_words_in_range_helper((HeapWord*)last_obj, end_obj);
141 last_obj = end_obj;
142 } else if (end_obj < last_obj) {
143 // The cached value is for an object that is to the left (lower address) of the current
144 // end_obj. Calculate back from that cached value.
145 if (pointer_delta((HeapWord*)end_obj, (HeapWord*)beg_addr) > pointer_delta((HeapWord*)last_obj, (HeapWord*)end_obj)) {
146 last_ret = last_ret - live_words_in_range_helper((HeapWord*)end_obj, last_obj);
147 } else {
148 last_ret = live_words_in_range_helper(beg_addr, end_obj);
149 }
150 last_obj = end_obj;
151 }
152
153 update_live_words_in_range_cache(cm, last_beg, last_obj, last_ret);
154 return last_ret;
155 }
156
157 size_t
158 ParMarkBitMap::live_words_in_range(ParCompactionManager* cm, HeapWord* beg_addr, oop end_obj) const
159 {
160 // Try to reuse result from ParCompactionManager cache first.
161 if (is_live_words_in_range_in_cache(cm, beg_addr)) {
162 return live_words_in_range_use_cache(cm, beg_addr, end_obj);
163 }
164 size_t ret = live_words_in_range_helper(beg_addr, end_obj);
165 update_live_words_in_range_cache(cm, beg_addr, end_obj, ret);
166 return ret;
167 }
168
169 ParMarkBitMap::IterationStatus
170 ParMarkBitMap::iterate(ParMarkBitMapClosure* live_closure,
171 idx_t range_beg, idx_t range_end) const
172 {
173 DEBUG_ONLY(verify_bit(range_beg);)
174 DEBUG_ONLY(verify_bit(range_end);)
175 assert(range_beg <= range_end, "live range invalid");
176
177 // The bitmap routines require the right boundary to be word-aligned.
178 const idx_t search_end = BitMap::word_align_up(range_end);
179
180 idx_t cur_beg = find_obj_beg(range_beg, search_end);
181 while (cur_beg < range_end) {
182 const idx_t cur_end = find_obj_end(cur_beg, search_end);
183 if (cur_end >= range_end) {
184 // The obj ends outside the range.
185 live_closure->set_source(bit_to_addr(cur_beg));
186 return incomplete;
187 }
188
189 const size_t size = obj_size(cur_beg, cur_end);
190 IterationStatus status = live_closure->do_addr(bit_to_addr(cur_beg), size);
191 if (status != incomplete) {
192 assert(status == would_overflow || status == full, "sanity");
193 return status;
194 }
195
196 // Successfully processed the object; look for the next object.
197 cur_beg = find_obj_beg(cur_end + 1, search_end);
198 }
199
200 live_closure->set_source(bit_to_addr(range_end));
201 return complete;
202 }
203
204 ParMarkBitMap::IterationStatus
205 ParMarkBitMap::iterate(ParMarkBitMapClosure* live_closure,
206 ParMarkBitMapClosure* dead_closure,
207 idx_t range_beg, idx_t range_end,
208 idx_t dead_range_end) const
209 {
210 DEBUG_ONLY(verify_bit(range_beg);)
211 DEBUG_ONLY(verify_bit(range_end);)
212 DEBUG_ONLY(verify_bit(dead_range_end);)
213 assert(range_beg <= range_end, "live range invalid");
214 assert(range_end <= dead_range_end, "dead range invalid");
215
216 // The bitmap routines require the right boundary to be word-aligned.
217 const idx_t live_search_end = BitMap::word_align_up(range_end);
218 const idx_t dead_search_end = BitMap::word_align_up(dead_range_end);
219
220 idx_t cur_beg = range_beg;
221 if (range_beg < range_end && is_unmarked(range_beg)) {
222 // The range starts with dead space. Look for the next object, then fill.
223 cur_beg = find_obj_beg(range_beg + 1, dead_search_end);
224 const idx_t dead_space_end = MIN2(cur_beg - 1, dead_range_end - 1);
225 const size_t size = obj_size(range_beg, dead_space_end);
226 dead_closure->do_addr(bit_to_addr(range_beg), size);
227 }
228
229 while (cur_beg < range_end) {
230 const idx_t cur_end = find_obj_end(cur_beg, live_search_end);
231 if (cur_end >= range_end) {
232 // The obj ends outside the range.
233 live_closure->set_source(bit_to_addr(cur_beg));
234 return incomplete;
235 }
236
237 const size_t size = obj_size(cur_beg, cur_end);
238 IterationStatus status = live_closure->do_addr(bit_to_addr(cur_beg), size);
239 if (status != incomplete) {
240 assert(status == would_overflow || status == full, "sanity");
241 return status;
242 }
243
244 // Look for the start of the next object.
245 const idx_t dead_space_beg = cur_end + 1;
246 cur_beg = find_obj_beg(dead_space_beg, dead_search_end);
247 if (cur_beg > dead_space_beg) {
248 // Found dead space; compute the size and invoke the dead closure.
249 const idx_t dead_space_end = MIN2(cur_beg - 1, dead_range_end - 1);
250 const size_t size = obj_size(dead_space_beg, dead_space_end);
251 dead_closure->do_addr(bit_to_addr(dead_space_beg), size);
252 }
253 }
254
255 live_closure->set_source(bit_to_addr(range_end));
256 return complete;
257 }
258
259 #ifdef ASSERT
260 void ParMarkBitMap::verify_clear() const
261 {
262 const idx_t* const beg = (const idx_t*)_virtual_space->committed_low_addr();
263 const idx_t* const end = (const idx_t*)_virtual_space->committed_high_addr();
264 for (const idx_t* p = beg; p < end; ++p) {
265 assert(*p == 0, "bitmap not clear");
266 }
267 }
268 #endif // #ifdef ASSERT
--- EOF ---