20 * questions.
21 *
22 */
23
24 #ifndef SHARE_VM_GC_SHENANDOAH_SHENANDOAHHEAP_HPP
25 #define SHARE_VM_GC_SHENANDOAH_SHENANDOAHHEAP_HPP
26
27 #include "gc/shared/cmBitMap.hpp"
28 #include "gc/shenandoah/shenandoahWorkGroup.hpp"
29
30 class ConcurrentGCTimer;
31
32 class ShenandoahCollectorPolicy;
33 class ShenandoahConnectionMatrix;
34 class ShenandoahHeapRegion;
35 class ShenandoahHeapRegionClosure;
36 class ShenandoahHeapRegionSet;
37 class ShenandoahCollectionSet;
38 class ShenandoahFreeSet;
39 class ShenandoahConcurrentMark;
40 class ShenandoahConcurrentThread;
41 class ShenandoahMonitoringSupport;
42
43 class SCMUpdateRefsClosure: public OopClosure {
44 private:
45 ShenandoahHeap* _heap;
46
47 template <class T>
48 inline void do_oop_work(T* p);
49
50 public:
51 SCMUpdateRefsClosure();
52
53 public:
54 inline void do_oop(oop* p);
55 inline void do_oop(narrowOop* p);
56 };
57
58 #ifdef ASSERT
59 class AssertToSpaceClosure : public OopClosure {
127 enum ShenandoahCancelCause {
128 _oom_evacuation,
129 _vm_stop,
130 };
131 private:
132
133 static ShenandoahHeap* _pgc;
134 ShenandoahCollectorPolicy* _shenandoah_policy;
135 VirtualSpace _storage;
136 ShenandoahHeapRegion* _first_region;
137 HeapWord* _first_region_bottom;
138
139 // Sortable array of regions
140 ShenandoahHeapRegionSet* _ordered_regions;
141 ShenandoahHeapRegionSet* _sorted_regions;
142 ShenandoahFreeSet* _free_regions;
143 ShenandoahCollectionSet* _collection_set;
144
145 ShenandoahHeapRegion* _currentAllocationRegion;
146 ShenandoahConcurrentMark* _scm;
147
148
149
150 ShenandoahConcurrentThread* _concurrent_gc_thread;
151
152 ShenandoahMonitoringSupport* _monitoring_support;
153
154 size_t _num_regions;
155 size_t _max_regions;
156 size_t _initialSize;
157 #ifndef NDEBUG
158 uint _numAllocs;
159 #endif
160 uint _max_workers;
161
162 ShenandoahWorkGang* _workers;
163
164
165 volatile size_t _used;
166
167 CMBitMap _mark_bit_map0;
168 CMBitMap _mark_bit_map1;
282 static address cancelled_concgc_addr();
283
284 ShenandoahCollectorPolicy *shenandoahPolicy() { return _shenandoah_policy;}
285
286 inline ShenandoahHeapRegion* heap_region_containing(const void* addr) const;
287 inline uint heap_region_index_containing(const void* addr) const;
288 inline bool requires_marking(const void* entry) const;
289 template <class T>
290 inline oop maybe_update_oop_ref(T* p);
291
292 void recycle_dirty_regions();
293
294 void start_concurrent_marking();
295 void stop_concurrent_marking();
296 inline bool concurrent_mark_in_progress();
297 static address concurrent_mark_in_progress_addr();
298
299 void prepare_for_concurrent_evacuation();
300 void evacuate_and_update_roots();
301
302 void verify_matrix();
303 private:
304 void set_evacuation_in_progress(bool in_progress);
305 public:
306 inline bool is_evacuation_in_progress();
307 void set_evacuation_in_progress_concurrently(bool in_progress);
308 void set_evacuation_in_progress_at_safepoint(bool in_progress);
309
310 void set_full_gc_in_progress(bool in_progress);
311 bool is_full_gc_in_progress() const;
312
313 inline bool need_update_refs() const;
314 void set_need_update_refs(bool update_refs);
315
316 inline bool region_in_collection_set(size_t region_index) const;
317
318 void set_region_in_collection_set(size_t region_index, bool b);
319
320 // Mainly there to avoid accidentally calling the templated
321 // method below with ShenandoahHeapRegion* which would be *wrong*.
325 inline bool in_collection_set(T obj) const;
326
327 void clear_cset_fast_test();
328
329 inline bool allocated_after_next_mark_start(HeapWord* addr) const;
330 void set_next_top_at_mark_start(HeapWord* region_base, HeapWord* addr);
331 HeapWord* next_top_at_mark_start(HeapWord* region_base);
332
333 inline bool allocated_after_complete_mark_start(HeapWord* addr) const;
334 void set_complete_top_at_mark_start(HeapWord* region_base, HeapWord* addr);
335 HeapWord* complete_top_at_mark_start(HeapWord* region_base);
336
337 inline oop evacuate_object(oop src, Thread* thread);
338 inline bool cancelled_concgc() const;
339 inline void set_cancelled_concgc(bool v);
340 inline bool try_cancel_concgc() const;
341 void clear_cancelled_concgc();
342
343 ShenandoahHeapRegionSet* regions() { return _ordered_regions;}
344 ShenandoahFreeSet* free_regions();
345 void clear_free_regions();
346 void add_free_region(ShenandoahHeapRegion* r);
347
348 ShenandoahConnectionMatrix* connection_matrix();
349
350 void increase_used(size_t bytes);
351 void decrease_used(size_t bytes);
352
353 void set_used(size_t bytes);
354 size_t calculateUsed();
355
356 size_t garbage();
357
358 void reset_next_mark_bitmap(WorkGang* gang);
359 void reset_complete_mark_bitmap(WorkGang* gang);
360
361 CMBitMap* complete_mark_bit_map();
362 CMBitMap* next_mark_bit_map();
363 inline bool is_marked_complete(oop obj) const;
364 inline bool mark_next(oop obj) const;
375 inline oop maybe_update_oop_ref_not_null(T* p, oop obj);
376
377 void print_heap_regions(outputStream* st = tty) const;
378 void print_all_refs(const char* prefix);
379 void print_heap_locations(HeapWord* start, HeapWord* end);
380
381 void calculate_matrix(int* connections);
382 void print_matrix(int* connections);
383
384 size_t bytes_allocated_since_cm();
385 void set_bytes_allocated_since_cm(size_t bytes);
386
387 size_t max_allocated_gc();
388
389 void reclaim_humongous_region_at(ShenandoahHeapRegion* r);
390
391 VirtualSpace* storage() const;
392
393 ShenandoahMonitoringSupport* monitoring_support();
394 ShenandoahConcurrentMark* concurrentMark() { return _scm;}
395
396 ReferenceProcessor* ref_processor() { return _ref_processor;}
397
398 WorkGang* workers() const { return _workers;}
399
400 uint max_workers();
401
402 void do_evacuation();
403 ShenandoahHeapRegion* next_compaction_region(const ShenandoahHeapRegion* r);
404
405 void heap_region_iterate(ShenandoahHeapRegionClosure* blk, bool skip_dirty_regions = false, bool skip_humongous_continuation = false) const;
406
407 void verify_heap_after_evacuation();
408
409 // Delete entries for dead interned string and clean up unreferenced symbols
410 // in symbol table, possibly in parallel.
411 void unlink_string_and_symbol_table(BoolObjectClosure* is_alive, bool unlink_strings = true, bool unlink_symbols = true);
412
413 size_t num_regions();
414 size_t max_regions();
415
416 // TODO: consider moving this into ShenandoahHeapRegion.
417
418 template<class T>
419 inline void marked_object_iterate(ShenandoahHeapRegion* region, T* cl);
420
421 GCTimer* gc_timer() const;
422
423 void swap_mark_bitmaps();
424
425 void cancel_concgc(GCCause::Cause cause);
426 void cancel_concgc(ShenandoahCancelCause cause);
427
431 private:
432 HeapWord* allocate_new_tlab(size_t word_size, bool mark);
433 HeapWord* allocate_memory_under_lock(size_t word_size);
434 HeapWord* allocate_memory(size_t word_size, bool evacuating);
435 // Shenandoah functionality.
436 inline HeapWord* allocate_from_gclab(Thread* thread, size_t size);
437 HeapWord* allocate_from_gclab_slow(Thread* thread, size_t size);
438 HeapWord* allocate_new_gclab(size_t word_size);
439
440 void roots_iterate(OopClosure* cl);
441
442 template<class T>
443 inline void do_marked_object(CMBitMap* bitmap, T* cl, oop obj);
444
445 ShenandoahConcurrentThread* concurrent_thread() { return _concurrent_gc_thread; }
446
447 inline bool mark_next_no_checks(oop obj) const;
448
449 void parallel_evacuate();
450
451 inline oop atomic_compare_exchange_oop(oop n, narrowOop* addr, oop c);
452 inline oop atomic_compare_exchange_oop(oop n, oop* addr, oop c);
453
454 void evacuate_region(ShenandoahHeapRegion* from_region, ShenandoahHeapRegion* to_region);
455
456 #ifdef ASSERT
457 void verify_evacuated_region(ShenandoahHeapRegion* from_region);
458 #endif
459
460 inline void copy_object(oop p, HeapWord* s, size_t words);
461 void verify_copy(oop p, oop c);
462 void verify_heap_size_consistency();
463 void verify_heap_after_marking();
464 void verify_heap_after_update_refs();
465 void verify_regions_after_update_refs();
466
467 void ref_processing_init();
468
469 GCTracer* tracer();
470 ShenandoahCollectionSet* collection_set() { return _collection_set; }
471
472 bool call_from_write_barrier(bool evacuating);
473 void grow_heap_by(size_t num_regions);
474 void ensure_new_regions(size_t num_new_regions);
475
476 void verify_evacuation(ShenandoahHeapRegion* from_region);
477 void set_concurrent_mark_in_progress(bool in_progress);
478
479 void oom_during_evacuation();
480
481 void verify_live();
482 void verify_liveness_after_concurrent_mark();
483
484 HeapWord* allocate_memory_work(size_t word_size);
485 HeapWord* allocate_large_memory(size_t word_size);
486
487 #ifdef ASSERT
488 void set_from_region_protection(bool protect);
489 #endif
490
|
20 * questions.
21 *
22 */
23
24 #ifndef SHARE_VM_GC_SHENANDOAH_SHENANDOAHHEAP_HPP
25 #define SHARE_VM_GC_SHENANDOAH_SHENANDOAHHEAP_HPP
26
27 #include "gc/shared/cmBitMap.hpp"
28 #include "gc/shenandoah/shenandoahWorkGroup.hpp"
29
30 class ConcurrentGCTimer;
31
32 class ShenandoahCollectorPolicy;
33 class ShenandoahConnectionMatrix;
34 class ShenandoahHeapRegion;
35 class ShenandoahHeapRegionClosure;
36 class ShenandoahHeapRegionSet;
37 class ShenandoahCollectionSet;
38 class ShenandoahFreeSet;
39 class ShenandoahConcurrentMark;
40 class ShenandoahPartialGC;
41 class ShenandoahConcurrentThread;
42 class ShenandoahMonitoringSupport;
43
44 class SCMUpdateRefsClosure: public OopClosure {
45 private:
46 ShenandoahHeap* _heap;
47
48 template <class T>
49 inline void do_oop_work(T* p);
50
51 public:
52 SCMUpdateRefsClosure();
53
54 public:
55 inline void do_oop(oop* p);
56 inline void do_oop(narrowOop* p);
57 };
58
59 #ifdef ASSERT
60 class AssertToSpaceClosure : public OopClosure {
128 enum ShenandoahCancelCause {
129 _oom_evacuation,
130 _vm_stop,
131 };
132 private:
133
134 static ShenandoahHeap* _pgc;
135 ShenandoahCollectorPolicy* _shenandoah_policy;
136 VirtualSpace _storage;
137 ShenandoahHeapRegion* _first_region;
138 HeapWord* _first_region_bottom;
139
140 // Sortable array of regions
141 ShenandoahHeapRegionSet* _ordered_regions;
142 ShenandoahHeapRegionSet* _sorted_regions;
143 ShenandoahFreeSet* _free_regions;
144 ShenandoahCollectionSet* _collection_set;
145
146 ShenandoahHeapRegion* _currentAllocationRegion;
147 ShenandoahConcurrentMark* _scm;
148 ShenandoahPartialGC* _partial_gc;
149
150 ShenandoahConcurrentThread* _concurrent_gc_thread;
151
152 ShenandoahMonitoringSupport* _monitoring_support;
153
154 size_t _num_regions;
155 size_t _max_regions;
156 size_t _initialSize;
157 #ifndef NDEBUG
158 uint _numAllocs;
159 #endif
160 uint _max_workers;
161
162 ShenandoahWorkGang* _workers;
163
164
165 volatile size_t _used;
166
167 CMBitMap _mark_bit_map0;
168 CMBitMap _mark_bit_map1;
282 static address cancelled_concgc_addr();
283
284 ShenandoahCollectorPolicy *shenandoahPolicy() { return _shenandoah_policy;}
285
286 inline ShenandoahHeapRegion* heap_region_containing(const void* addr) const;
287 inline uint heap_region_index_containing(const void* addr) const;
288 inline bool requires_marking(const void* entry) const;
289 template <class T>
290 inline oop maybe_update_oop_ref(T* p);
291
292 void recycle_dirty_regions();
293
294 void start_concurrent_marking();
295 void stop_concurrent_marking();
296 inline bool concurrent_mark_in_progress();
297 static address concurrent_mark_in_progress_addr();
298
299 void prepare_for_concurrent_evacuation();
300 void evacuate_and_update_roots();
301
302 void do_partial_collection();
303 void verify_matrix();
304 private:
305 void set_evacuation_in_progress(bool in_progress);
306 public:
307 inline bool is_evacuation_in_progress();
308 void set_evacuation_in_progress_concurrently(bool in_progress);
309 void set_evacuation_in_progress_at_safepoint(bool in_progress);
310
311 void set_full_gc_in_progress(bool in_progress);
312 bool is_full_gc_in_progress() const;
313
314 inline bool need_update_refs() const;
315 void set_need_update_refs(bool update_refs);
316
317 inline bool region_in_collection_set(size_t region_index) const;
318
319 void set_region_in_collection_set(size_t region_index, bool b);
320
321 // Mainly there to avoid accidentally calling the templated
322 // method below with ShenandoahHeapRegion* which would be *wrong*.
326 inline bool in_collection_set(T obj) const;
327
328 void clear_cset_fast_test();
329
330 inline bool allocated_after_next_mark_start(HeapWord* addr) const;
331 void set_next_top_at_mark_start(HeapWord* region_base, HeapWord* addr);
332 HeapWord* next_top_at_mark_start(HeapWord* region_base);
333
334 inline bool allocated_after_complete_mark_start(HeapWord* addr) const;
335 void set_complete_top_at_mark_start(HeapWord* region_base, HeapWord* addr);
336 HeapWord* complete_top_at_mark_start(HeapWord* region_base);
337
338 inline oop evacuate_object(oop src, Thread* thread);
339 inline bool cancelled_concgc() const;
340 inline void set_cancelled_concgc(bool v);
341 inline bool try_cancel_concgc() const;
342 void clear_cancelled_concgc();
343
344 ShenandoahHeapRegionSet* regions() { return _ordered_regions;}
345 ShenandoahFreeSet* free_regions();
346 ShenandoahCollectionSet* collection_set() { return _collection_set; }
347 void clear_free_regions();
348 void add_free_region(ShenandoahHeapRegion* r);
349
350 ShenandoahConnectionMatrix* connection_matrix();
351
352 void increase_used(size_t bytes);
353 void decrease_used(size_t bytes);
354
355 void set_used(size_t bytes);
356 size_t calculateUsed();
357
358 size_t garbage();
359
360 void reset_next_mark_bitmap(WorkGang* gang);
361 void reset_complete_mark_bitmap(WorkGang* gang);
362
363 CMBitMap* complete_mark_bit_map();
364 CMBitMap* next_mark_bit_map();
365 inline bool is_marked_complete(oop obj) const;
366 inline bool mark_next(oop obj) const;
377 inline oop maybe_update_oop_ref_not_null(T* p, oop obj);
378
379 void print_heap_regions(outputStream* st = tty) const;
380 void print_all_refs(const char* prefix);
381 void print_heap_locations(HeapWord* start, HeapWord* end);
382
383 void calculate_matrix(int* connections);
384 void print_matrix(int* connections);
385
386 size_t bytes_allocated_since_cm();
387 void set_bytes_allocated_since_cm(size_t bytes);
388
389 size_t max_allocated_gc();
390
391 void reclaim_humongous_region_at(ShenandoahHeapRegion* r);
392
393 VirtualSpace* storage() const;
394
395 ShenandoahMonitoringSupport* monitoring_support();
396 ShenandoahConcurrentMark* concurrentMark() { return _scm;}
397 ShenandoahPartialGC* partial_gc();
398
399 ReferenceProcessor* ref_processor() { return _ref_processor;}
400
401 WorkGang* workers() const { return _workers;}
402
403 uint max_workers();
404
405 void do_evacuation();
406 ShenandoahHeapRegion* next_compaction_region(const ShenandoahHeapRegion* r);
407
408 void heap_region_iterate(ShenandoahHeapRegionClosure* blk, bool skip_dirty_regions = false, bool skip_humongous_continuation = false) const;
409
410 void verify_heap_after_evacuation();
411 void verify_heap_after_marking();
412
413 // Delete entries for dead interned string and clean up unreferenced symbols
414 // in symbol table, possibly in parallel.
415 void unlink_string_and_symbol_table(BoolObjectClosure* is_alive, bool unlink_strings = true, bool unlink_symbols = true);
416
417 size_t num_regions();
418 size_t max_regions();
419
420 // TODO: consider moving this into ShenandoahHeapRegion.
421
422 template<class T>
423 inline void marked_object_iterate(ShenandoahHeapRegion* region, T* cl);
424
425 GCTimer* gc_timer() const;
426
427 void swap_mark_bitmaps();
428
429 void cancel_concgc(GCCause::Cause cause);
430 void cancel_concgc(ShenandoahCancelCause cause);
431
435 private:
436 HeapWord* allocate_new_tlab(size_t word_size, bool mark);
437 HeapWord* allocate_memory_under_lock(size_t word_size);
438 HeapWord* allocate_memory(size_t word_size, bool evacuating);
439 // Shenandoah functionality.
440 inline HeapWord* allocate_from_gclab(Thread* thread, size_t size);
441 HeapWord* allocate_from_gclab_slow(Thread* thread, size_t size);
442 HeapWord* allocate_new_gclab(size_t word_size);
443
444 void roots_iterate(OopClosure* cl);
445
446 template<class T>
447 inline void do_marked_object(CMBitMap* bitmap, T* cl, oop obj);
448
449 ShenandoahConcurrentThread* concurrent_thread() { return _concurrent_gc_thread; }
450
451 inline bool mark_next_no_checks(oop obj) const;
452
453 void parallel_evacuate();
454
455 public:
456 inline oop atomic_compare_exchange_oop(oop n, narrowOop* addr, oop c);
457 inline oop atomic_compare_exchange_oop(oop n, oop* addr, oop c);
458
459 private:
460 void evacuate_region(ShenandoahHeapRegion* from_region, ShenandoahHeapRegion* to_region);
461
462 #ifdef ASSERT
463 void verify_evacuated_region(ShenandoahHeapRegion* from_region);
464 #endif
465
466 inline void copy_object(oop p, HeapWord* s, size_t words);
467 void verify_copy(oop p, oop c);
468 void verify_heap_size_consistency();
469 private:
470 void verify_heap_after_update_refs();
471 void verify_regions_after_update_refs();
472
473 void ref_processing_init();
474
475 GCTracer* tracer();
476
477 bool call_from_write_barrier(bool evacuating);
478 void grow_heap_by(size_t num_regions);
479 void ensure_new_regions(size_t num_new_regions);
480
481 void verify_evacuation(ShenandoahHeapRegion* from_region);
482 void set_concurrent_mark_in_progress(bool in_progress);
483
484 void oom_during_evacuation();
485
486 void verify_live();
487 void verify_liveness_after_concurrent_mark();
488
489 HeapWord* allocate_memory_work(size_t word_size);
490 HeapWord* allocate_large_memory(size_t word_size);
491
492 #ifdef ASSERT
493 void set_from_region_protection(bool protect);
494 #endif
495
|