117 ~ShenandoahHeapLock() {
118 #ifdef ASSERT
119 _heap->assert_heaplock_owned_by_current_thread();
120 _heap->_heap_lock_owner = NULL;
121 #endif
122 OrderAccess::release_store_fence(&_heap->_heap_lock, unlocked);
123 }
124
125 };
126
127 public:
128 enum ShenandoahCancelCause {
129 _oom_evacuation,
130 _vm_stop,
131 };
132 private:
133
134 static ShenandoahHeap* _pgc;
135 ShenandoahCollectorPolicy* _shenandoah_policy;
136 VirtualSpace _storage;
137 ShenandoahHeapRegion* _first_region;
138 HeapWord* _first_region_bottom;
139
140 // Sortable array of regions
141 ShenandoahHeapRegionSet* _ordered_regions;
142 ShenandoahHeapRegionSet* _sorted_regions;
143 ShenandoahFreeSet* _free_regions;
144 ShenandoahCollectionSet* _collection_set;
145
146 ShenandoahHeapRegion* _currentAllocationRegion;
147 ShenandoahConcurrentMark* _scm;
148 ShenandoahPartialGC* _partial_gc;
149
150 ShenandoahConcurrentThread* _concurrent_gc_thread;
151
152 ShenandoahMonitoringSupport* _monitoring_support;
153
154 size_t _num_regions;
155 size_t _max_regions;
156 size_t _initialSize;
283
284 ShenandoahCollectorPolicy *shenandoahPolicy() { return _shenandoah_policy;}
285
286 inline ShenandoahHeapRegion* heap_region_containing(const void* addr) const;
287 inline uint heap_region_index_containing(const void* addr) const;
288 inline bool requires_marking(const void* entry) const;
289 template <class T>
290 inline oop maybe_update_oop_ref(T* p);
291
292 void recycle_dirty_regions();
293
294 void start_concurrent_marking();
295 void stop_concurrent_marking();
296 inline bool concurrent_mark_in_progress();
297 static address concurrent_mark_in_progress_addr();
298
299 void prepare_for_concurrent_evacuation();
300 void evacuate_and_update_roots();
301
302 void do_partial_collection();
303 void verify_matrix();
304 private:
305 void set_evacuation_in_progress(bool in_progress);
306 public:
307 inline bool is_evacuation_in_progress();
308 void set_evacuation_in_progress_concurrently(bool in_progress);
309 void set_evacuation_in_progress_at_safepoint(bool in_progress);
310
311 void set_full_gc_in_progress(bool in_progress);
312 bool is_full_gc_in_progress() const;
313
314 inline bool need_update_refs() const;
315 void set_need_update_refs(bool update_refs);
316
317 inline bool region_in_collection_set(size_t region_index) const;
318
319 void set_region_in_collection_set(size_t region_index, bool b);
320
321 // Mainly there to avoid accidentally calling the templated
322 // method below with ShenandoahHeapRegion* which would be *wrong*.
323 inline bool in_collection_set(ShenandoahHeapRegion* r) const;
392
393 VirtualSpace* storage() const;
394
395 ShenandoahMonitoringSupport* monitoring_support();
396 ShenandoahConcurrentMark* concurrentMark() { return _scm;}
397 ShenandoahPartialGC* partial_gc();
398
399 ReferenceProcessor* ref_processor() { return _ref_processor;}
400
401 WorkGang* workers() const { return _workers;}
402
403 uint max_workers();
404
405 void do_evacuation();
406 ShenandoahHeapRegion* next_compaction_region(const ShenandoahHeapRegion* r);
407
408 void heap_region_iterate(ShenandoahHeapRegionClosure* blk, bool skip_dirty_regions = false, bool skip_humongous_continuation = false) const;
409
410 void verify_heap_after_evacuation();
411 void verify_heap_after_marking();
412
413 // Delete entries for dead interned string and clean up unreferenced symbols
414 // in symbol table, possibly in parallel.
415 void unlink_string_and_symbol_table(BoolObjectClosure* is_alive, bool unlink_strings = true, bool unlink_symbols = true);
416
417 size_t num_regions();
418 size_t max_regions();
419
420 // TODO: consider moving this into ShenandoahHeapRegion.
421
422 template<class T>
423 inline void marked_object_iterate(ShenandoahHeapRegion* region, T* cl);
424
425 GCTimer* gc_timer() const;
426
427 void swap_mark_bitmaps();
428
429 void cancel_concgc(GCCause::Cause cause);
430 void cancel_concgc(ShenandoahCancelCause cause);
431
|
117 ~ShenandoahHeapLock() {
118 #ifdef ASSERT
119 _heap->assert_heaplock_owned_by_current_thread();
120 _heap->_heap_lock_owner = NULL;
121 #endif
122 OrderAccess::release_store_fence(&_heap->_heap_lock, unlocked);
123 }
124
125 };
126
127 public:
128 enum ShenandoahCancelCause {
129 _oom_evacuation,
130 _vm_stop,
131 };
132 private:
133
134 static ShenandoahHeap* _pgc;
135 ShenandoahCollectorPolicy* _shenandoah_policy;
136 VirtualSpace _storage;
137 size_t _bitmap_size;
138 MemRegion _heap_region;
139
140 ShenandoahHeapRegion* _first_region;
141 HeapWord* _first_region_bottom;
142
143 // Sortable array of regions
144 ShenandoahHeapRegionSet* _ordered_regions;
145 ShenandoahHeapRegionSet* _sorted_regions;
146 ShenandoahFreeSet* _free_regions;
147 ShenandoahCollectionSet* _collection_set;
148
149 ShenandoahHeapRegion* _currentAllocationRegion;
150 ShenandoahConcurrentMark* _scm;
151 ShenandoahPartialGC* _partial_gc;
152
153 ShenandoahConcurrentThread* _concurrent_gc_thread;
154
155 ShenandoahMonitoringSupport* _monitoring_support;
156
157 size_t _num_regions;
158 size_t _max_regions;
159 size_t _initialSize;
286
287 ShenandoahCollectorPolicy *shenandoahPolicy() { return _shenandoah_policy;}
288
289 inline ShenandoahHeapRegion* heap_region_containing(const void* addr) const;
290 inline uint heap_region_index_containing(const void* addr) const;
291 inline bool requires_marking(const void* entry) const;
292 template <class T>
293 inline oop maybe_update_oop_ref(T* p);
294
295 void recycle_dirty_regions();
296
297 void start_concurrent_marking();
298 void stop_concurrent_marking();
299 inline bool concurrent_mark_in_progress();
300 static address concurrent_mark_in_progress_addr();
301
302 void prepare_for_concurrent_evacuation();
303 void evacuate_and_update_roots();
304
305 void do_partial_collection();
306 private:
307 void set_evacuation_in_progress(bool in_progress);
308 public:
309 inline bool is_evacuation_in_progress();
310 void set_evacuation_in_progress_concurrently(bool in_progress);
311 void set_evacuation_in_progress_at_safepoint(bool in_progress);
312
313 void set_full_gc_in_progress(bool in_progress);
314 bool is_full_gc_in_progress() const;
315
316 inline bool need_update_refs() const;
317 void set_need_update_refs(bool update_refs);
318
319 inline bool region_in_collection_set(size_t region_index) const;
320
321 void set_region_in_collection_set(size_t region_index, bool b);
322
323 // Mainly there to avoid accidentally calling the templated
324 // method below with ShenandoahHeapRegion* which would be *wrong*.
325 inline bool in_collection_set(ShenandoahHeapRegion* r) const;
394
395 VirtualSpace* storage() const;
396
397 ShenandoahMonitoringSupport* monitoring_support();
398 ShenandoahConcurrentMark* concurrentMark() { return _scm;}
399 ShenandoahPartialGC* partial_gc();
400
401 ReferenceProcessor* ref_processor() { return _ref_processor;}
402
403 WorkGang* workers() const { return _workers;}
404
405 uint max_workers();
406
407 void do_evacuation();
408 ShenandoahHeapRegion* next_compaction_region(const ShenandoahHeapRegion* r);
409
410 void heap_region_iterate(ShenandoahHeapRegionClosure* blk, bool skip_dirty_regions = false, bool skip_humongous_continuation = false) const;
411
412 void verify_heap_after_evacuation();
413 void verify_heap_after_marking();
414 void verify_heap_reachable_at_safepoint();
415
416 // Delete entries for dead interned string and clean up unreferenced symbols
417 // in symbol table, possibly in parallel.
418 void unlink_string_and_symbol_table(BoolObjectClosure* is_alive, bool unlink_strings = true, bool unlink_symbols = true);
419
420 size_t num_regions();
421 size_t max_regions();
422
423 // TODO: consider moving this into ShenandoahHeapRegion.
424
425 template<class T>
426 inline void marked_object_iterate(ShenandoahHeapRegion* region, T* cl);
427
428 GCTimer* gc_timer() const;
429
430 void swap_mark_bitmaps();
431
432 void cancel_concgc(GCCause::Cause cause);
433 void cancel_concgc(ShenandoahCancelCause cause);
434
|