275 ReservedSpace aux_bitmap(_bitmap_size, bitmap_page_size);
276 MemTracker::record_virtual_memory_type(aux_bitmap.base(), mtGC);
277 _aux_bitmap_region = MemRegion((HeapWord*) aux_bitmap.base(), aux_bitmap.size() / HeapWordSize);
278 _aux_bitmap_region_special = aux_bitmap.special();
279 _aux_bit_map.initialize(_heap_region, _aux_bitmap_region);
280
281 //
282 // Create regions and region sets
283 //
284 size_t region_align = align_up(sizeof(ShenandoahHeapRegion), SHENANDOAH_CACHE_LINE_SIZE);
285 size_t region_storage_size = align_up(region_align * _num_regions, region_page_size);
286 region_storage_size = align_up(region_storage_size, os::vm_allocation_granularity());
287
288 ReservedSpace region_storage(region_storage_size, region_page_size);
289 MemTracker::record_virtual_memory_type(region_storage.base(), mtGC);
290 if (!region_storage.special()) {
291 os::commit_memory_or_exit(region_storage.base(), region_storage_size, region_page_size, false,
292 "Cannot commit region memory");
293 }
294
295 _regions = NEW_C_HEAP_ARRAY(ShenandoahHeapRegion*, _num_regions, mtGC);
296 _free_set = new ShenandoahFreeSet(this, _num_regions);
297 _collection_set = new ShenandoahCollectionSet(this, sh_rs.base(), sh_rs.size());
298
299 {
300 ShenandoahHeapLocker locker(lock());
301
302 for (size_t i = 0; i < _num_regions; i++) {
303 HeapWord* start = (HeapWord*)sh_rs.base() + ShenandoahHeapRegion::region_size_words() * i;
304 bool is_committed = i < num_committed_regions;
305 void* loc = region_storage.base() + i * region_align;
306
307 ShenandoahHeapRegion* r = new (loc) ShenandoahHeapRegion(start, i, is_committed);
308 assert(is_aligned(r, SHENANDOAH_CACHE_LINE_SIZE), "Sanity");
309
310 _marking_context->initialize_top_at_mark_start(r);
311 _regions[i] = r;
312 assert(!collection_set()->is_in(i), "New region should not be in collection set");
313 }
314
315 // Initialize to complete
316 _marking_context->mark_complete();
317
|
275 ReservedSpace aux_bitmap(_bitmap_size, bitmap_page_size);
276 MemTracker::record_virtual_memory_type(aux_bitmap.base(), mtGC);
277 _aux_bitmap_region = MemRegion((HeapWord*) aux_bitmap.base(), aux_bitmap.size() / HeapWordSize);
278 _aux_bitmap_region_special = aux_bitmap.special();
279 _aux_bit_map.initialize(_heap_region, _aux_bitmap_region);
280
281 //
282 // Create regions and region sets
283 //
284 size_t region_align = align_up(sizeof(ShenandoahHeapRegion), SHENANDOAH_CACHE_LINE_SIZE);
285 size_t region_storage_size = align_up(region_align * _num_regions, region_page_size);
286 region_storage_size = align_up(region_storage_size, os::vm_allocation_granularity());
287
288 ReservedSpace region_storage(region_storage_size, region_page_size);
289 MemTracker::record_virtual_memory_type(region_storage.base(), mtGC);
290 if (!region_storage.special()) {
291 os::commit_memory_or_exit(region_storage.base(), region_storage_size, region_page_size, false,
292 "Cannot commit region memory");
293 }
294
295 // Try to fit the collection set bitmap at lower addresses. This optimizes code generation for cset checks.
296 // Go up until a sensible limit and try to reserve the space there.
297 // If not successful, bite a bullet and allocate at whatever address.
298 {
299 int cset_align = MAX2(os::vm_page_size(), os::vm_allocation_granularity());
300 uintx cset_size = align_up(((uintx) sh_rs.base() + sh_rs.size()) >> ShenandoahHeapRegion::region_size_bytes_shift(), cset_align);
301 uintptr_t min = round_up_power_of_2(cset_align);
302 uintptr_t max = (1u << 31u);
303 for (uintptr_t cset_base = min; cset_base <= max; cset_base <<= 1u) {
304 ReservedSpace cset_rs(cset_size, os::vm_page_size(), false, (char*) cset_base);
305 if (cset_rs.is_reserved()) {
306 assert((char*)cset_base == cset_rs.base(), "Addresses should agree: " PTR_FORMAT ", " PTR_FORMAT, cset_base, p2i(cset_rs.base()));
307 _collection_set = new ShenandoahCollectionSet(this, cset_rs, sh_rs.base());
308 break;
309 }
310 }
311
312 if (_collection_set == NULL) {
313 ReservedSpace cset_rs(cset_size, os::vm_page_size(), false);
314 _collection_set = new ShenandoahCollectionSet(this, cset_rs, sh_rs.base());
315 }
316 }
317
318 _regions = NEW_C_HEAP_ARRAY(ShenandoahHeapRegion*, _num_regions, mtGC);
319 _free_set = new ShenandoahFreeSet(this, _num_regions);
320
321 {
322 ShenandoahHeapLocker locker(lock());
323
324 for (size_t i = 0; i < _num_regions; i++) {
325 HeapWord* start = (HeapWord*)sh_rs.base() + ShenandoahHeapRegion::region_size_words() * i;
326 bool is_committed = i < num_committed_regions;
327 void* loc = region_storage.base() + i * region_align;
328
329 ShenandoahHeapRegion* r = new (loc) ShenandoahHeapRegion(start, i, is_committed);
330 assert(is_aligned(r, SHENANDOAH_CACHE_LINE_SIZE), "Sanity");
331
332 _marking_context->initialize_top_at_mark_start(r);
333 _regions[i] = r;
334 assert(!collection_set()->is_in(i), "New region should not be in collection set");
335 }
336
337 // Initialize to complete
338 _marking_context->mark_complete();
339
|