24 #include "memory/allocation.hpp"
25
26 #include "gc/shared/gcTimer.hpp"
27 #include "gc/shared/gcTraceTime.inline.hpp"
28 #include "gc/shared/parallelCleaning.hpp"
29
30 #include "gc/shenandoah/brooksPointer.hpp"
31 #include "gc/shenandoah/shenandoahBarrierSet.hpp"
32 #include "gc/shenandoah/shenandoahCollectionSet.hpp"
33 #include "gc/shenandoah/shenandoahCollectorPolicy.hpp"
34 #include "gc/shenandoah/shenandoahConcurrentMark.hpp"
35 #include "gc/shenandoah/shenandoahConcurrentMark.inline.hpp"
36 #include "gc/shenandoah/shenandoahConcurrentThread.hpp"
37 #include "gc/shenandoah/shenandoahFreeSet.hpp"
38 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
39 #include "gc/shenandoah/shenandoahHeapRegion.hpp"
40 #include "gc/shenandoah/shenandoahHeapRegionSet.hpp"
41 #include "gc/shenandoah/shenandoahHumongous.hpp"
42 #include "gc/shenandoah/shenandoahMarkCompact.hpp"
43 #include "gc/shenandoah/shenandoahMonitoringSupport.hpp"
44 #include "gc/shenandoah/shenandoahRootProcessor.hpp"
45 #include "gc/shenandoah/vm_operations_shenandoah.hpp"
46
47 #include "runtime/vmThread.hpp"
48 #include "services/mallocTracker.hpp"
49
50 SCMUpdateRefsClosure::SCMUpdateRefsClosure() : _heap(ShenandoahHeap::heap()) {}
51
52 #ifdef ASSERT
53 template <class T>
54 void AssertToSpaceClosure::do_oop_nv(T* p) {
55 T o = oopDesc::load_heap_oop(p);
56 if (! oopDesc::is_null(o)) {
57 oop obj = oopDesc::decode_heap_oop_not_null(o);
58 assert(oopDesc::unsafe_equals(obj, ShenandoahBarrierSet::resolve_oop_static_not_null(obj)),
59 "need to-space object here obj: "PTR_FORMAT" , rb(obj): "PTR_FORMAT", p: "PTR_FORMAT,
60 p2i(obj), p2i(ShenandoahBarrierSet::resolve_oop_static_not_null(obj)), p2i(p));
61 }
62 }
63
247 if (ShenandoahAlwaysPreTouch) {
248 assert (!AlwaysPreTouch, "Should have been overridden");
249
250 // For NUMA, it is important to pre-touch the storage under bitmaps with worker threads,
251 // before initialize() below zeroes it with initializing thread. For any given region,
252 // we touch the region and the corresponding bitmaps from the same thread.
253
254 log_info(gc, heap)("Parallel pretouch " SIZE_FORMAT " regions with " SIZE_FORMAT " byte pages",
255 _ordered_regions->count(), page_size);
256 ShenandoahPretouchTask cl(_ordered_regions, bitmap0.base(), bitmap1.base(), bitmap_size, page_size);
257 _workers->run_task(&cl);
258 }
259
260 _mark_bit_map0.initialize(heap_region, bitmap_region0);
261 _complete_mark_bit_map = &_mark_bit_map0;
262
263 _mark_bit_map1.initialize(heap_region, bitmap_region1);
264 _next_mark_bit_map = &_mark_bit_map1;
265
266 _connection_matrix = new ShenandoahConnectionMatrix(_max_regions);
267
268 _monitoring_support = new ShenandoahMonitoringSupport(this);
269
270 _concurrent_gc_thread = new ShenandoahConcurrentThread();
271
272 ShenandoahMarkCompact::initialize();
273
274 return JNI_OK;
275 }
276
277 ShenandoahHeap::ShenandoahHeap(ShenandoahCollectorPolicy* policy) :
278 CollectedHeap(),
279 _shenandoah_policy(policy),
280 _concurrent_mark_in_progress(0),
281 _evacuation_in_progress(0),
282 _full_gc_in_progress(false),
283 _free_regions(NULL),
284 _collection_set(NULL),
285 _bytes_allocated_since_cm(0),
286 _bytes_allocated_during_cm(0),
2508 bool doHeapRegion(ShenandoahHeapRegion* r) {
2509 if (! r->is_humongous() && ! r->is_pinned() && ! r->in_collection_set()) {
2510 _garbage += r->garbage();
2511 }
2512 return false;
2513 }
2514
2515 size_t garbage() {
2516 return _garbage;
2517 }
2518 };
2519
2520 size_t ShenandoahHeap::garbage() {
2521 ShenandoahCountGarbageClosure cl;
2522 heap_region_iterate(&cl);
2523 return cl.garbage();
2524 }
2525
2526 ShenandoahConnectionMatrix* ShenandoahHeap::connection_matrix() {
2527 return _connection_matrix;
2528 }
2529
2530 #ifdef ASSERT
2531 void ShenandoahHeap::assert_heaplock_owned_by_current_thread() {
2532 assert(_heap_lock == locked, "must be locked");
2533 assert(_heap_lock_owner == Thread::current(), "must be owned by current thread");
2534 }
2535
2536 void ShenandoahHeap::assert_heaplock_or_safepoint() {
2537 Thread* thr = Thread::current();
2538 assert((_heap_lock == locked && _heap_lock_owner == thr) ||
2539 (SafepointSynchronize::is_at_safepoint() && thr->is_VM_thread()),
2540 "must own heap lock or by VM thread at safepoint");
2541 }
2542
2543 #endif
|
24 #include "memory/allocation.hpp"
25
26 #include "gc/shared/gcTimer.hpp"
27 #include "gc/shared/gcTraceTime.inline.hpp"
28 #include "gc/shared/parallelCleaning.hpp"
29
30 #include "gc/shenandoah/brooksPointer.hpp"
31 #include "gc/shenandoah/shenandoahBarrierSet.hpp"
32 #include "gc/shenandoah/shenandoahCollectionSet.hpp"
33 #include "gc/shenandoah/shenandoahCollectorPolicy.hpp"
34 #include "gc/shenandoah/shenandoahConcurrentMark.hpp"
35 #include "gc/shenandoah/shenandoahConcurrentMark.inline.hpp"
36 #include "gc/shenandoah/shenandoahConcurrentThread.hpp"
37 #include "gc/shenandoah/shenandoahFreeSet.hpp"
38 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
39 #include "gc/shenandoah/shenandoahHeapRegion.hpp"
40 #include "gc/shenandoah/shenandoahHeapRegionSet.hpp"
41 #include "gc/shenandoah/shenandoahHumongous.hpp"
42 #include "gc/shenandoah/shenandoahMarkCompact.hpp"
43 #include "gc/shenandoah/shenandoahMonitoringSupport.hpp"
44 #include "gc/shenandoah/shenandoahPartialGC.hpp"
45 #include "gc/shenandoah/shenandoahRootProcessor.hpp"
46 #include "gc/shenandoah/vm_operations_shenandoah.hpp"
47
48 #include "runtime/vmThread.hpp"
49 #include "services/mallocTracker.hpp"
50
51 SCMUpdateRefsClosure::SCMUpdateRefsClosure() : _heap(ShenandoahHeap::heap()) {}
52
53 #ifdef ASSERT
54 template <class T>
55 void AssertToSpaceClosure::do_oop_nv(T* p) {
56 T o = oopDesc::load_heap_oop(p);
57 if (! oopDesc::is_null(o)) {
58 oop obj = oopDesc::decode_heap_oop_not_null(o);
59 assert(oopDesc::unsafe_equals(obj, ShenandoahBarrierSet::resolve_oop_static_not_null(obj)),
60 "need to-space object here obj: "PTR_FORMAT" , rb(obj): "PTR_FORMAT", p: "PTR_FORMAT,
61 p2i(obj), p2i(ShenandoahBarrierSet::resolve_oop_static_not_null(obj)), p2i(p));
62 }
63 }
64
248 if (ShenandoahAlwaysPreTouch) {
249 assert (!AlwaysPreTouch, "Should have been overridden");
250
251 // For NUMA, it is important to pre-touch the storage under bitmaps with worker threads,
252 // before initialize() below zeroes it with initializing thread. For any given region,
253 // we touch the region and the corresponding bitmaps from the same thread.
254
255 log_info(gc, heap)("Parallel pretouch " SIZE_FORMAT " regions with " SIZE_FORMAT " byte pages",
256 _ordered_regions->count(), page_size);
257 ShenandoahPretouchTask cl(_ordered_regions, bitmap0.base(), bitmap1.base(), bitmap_size, page_size);
258 _workers->run_task(&cl);
259 }
260
261 _mark_bit_map0.initialize(heap_region, bitmap_region0);
262 _complete_mark_bit_map = &_mark_bit_map0;
263
264 _mark_bit_map1.initialize(heap_region, bitmap_region1);
265 _next_mark_bit_map = &_mark_bit_map1;
266
267 _connection_matrix = new ShenandoahConnectionMatrix(_max_regions);
268 _partial_gc = new ShenandoahPartialGC(this, _max_regions);
269
270 _monitoring_support = new ShenandoahMonitoringSupport(this);
271
272 _concurrent_gc_thread = new ShenandoahConcurrentThread();
273
274 ShenandoahMarkCompact::initialize();
275
276 return JNI_OK;
277 }
278
279 ShenandoahHeap::ShenandoahHeap(ShenandoahCollectorPolicy* policy) :
280 CollectedHeap(),
281 _shenandoah_policy(policy),
282 _concurrent_mark_in_progress(0),
283 _evacuation_in_progress(0),
284 _full_gc_in_progress(false),
285 _free_regions(NULL),
286 _collection_set(NULL),
287 _bytes_allocated_since_cm(0),
288 _bytes_allocated_during_cm(0),
2510 bool doHeapRegion(ShenandoahHeapRegion* r) {
2511 if (! r->is_humongous() && ! r->is_pinned() && ! r->in_collection_set()) {
2512 _garbage += r->garbage();
2513 }
2514 return false;
2515 }
2516
2517 size_t garbage() {
2518 return _garbage;
2519 }
2520 };
2521
2522 size_t ShenandoahHeap::garbage() {
2523 ShenandoahCountGarbageClosure cl;
2524 heap_region_iterate(&cl);
2525 return cl.garbage();
2526 }
2527
2528 ShenandoahConnectionMatrix* ShenandoahHeap::connection_matrix() {
2529 return _connection_matrix;
2530 }
2531
2532 ShenandoahPartialGC* ShenandoahHeap::partial_gc() {
2533 return _partial_gc;
2534 }
2535
2536 void ShenandoahHeap::do_partial_collection() {
2537 {
2538 ShenandoahHeapLock lock(this);
2539 partial_gc()->prepare();
2540 }
2541 partial_gc()->do_partial_collection();
2542 }
2543
2544 #ifdef ASSERT
2545 void ShenandoahHeap::assert_heaplock_owned_by_current_thread() {
2546 assert(_heap_lock == locked, "must be locked");
2547 assert(_heap_lock_owner == Thread::current(), "must be owned by current thread");
2548 }
2549
2550 void ShenandoahHeap::assert_heaplock_or_safepoint() {
2551 Thread* thr = Thread::current();
2552 assert((_heap_lock == locked && _heap_lock_owner == thr) ||
2553 (SafepointSynchronize::is_at_safepoint() && thr->is_VM_thread()),
2554 "must own heap lock or by VM thread at safepoint");
2555 }
2556
2557 #endif
|