454 // at the end of a successful GC). expect_null_mutator_alloc_region
455 // specifies whether the mutator alloc region is expected to be NULL
456 // or not.
457 HeapWord* attempt_allocation_at_safepoint(size_t word_size,
458 AllocationContext_t context,
459 bool expect_null_mutator_alloc_region);
460
461 // These methods are the "callbacks" from the G1AllocRegion class.
462
463 // For mutator alloc regions.
464 HeapRegion* new_mutator_alloc_region(size_t word_size, bool force);
465 void retire_mutator_alloc_region(HeapRegion* alloc_region,
466 size_t allocated_bytes);
467
468 // For GC alloc regions.
469 HeapRegion* new_gc_alloc_region(size_t word_size, uint count,
470 InCSetState dest);
471 void retire_gc_alloc_region(HeapRegion* alloc_region,
472 size_t allocated_bytes, InCSetState dest);
473
474 // - if explicit_gc is true, the GC is for a System.gc() or a heap
475 // inspection request and should collect the entire heap
476 // - if clear_all_soft_refs is true, all soft references should be
477 // cleared during the GC
478 // - if explicit_gc is false, word_size describes the allocation that
479 // the GC should attempt (at least) to satisfy
480 // - it returns false if it is unable to do the collection due to the
481 // GC locker being active, true otherwise
482 bool do_collection(bool explicit_gc,
483 bool clear_all_soft_refs,
484 size_t word_size);
485
486 // Callback from VM_G1CollectFull operation.
487 // Perform a full collection.
488 virtual void do_full_collection(bool clear_all_soft_refs);
489
490 // Resize the heap if necessary after a full collection. If this is
491 // after a collect-for allocation, "word_size" is the allocation size,
492 // and will be considered part of the used portion of the heap.
493 void resize_if_necessary_after_full_collection(size_t word_size);
494
495 // Callback from VM_G1CollectForAllocation operation.
496 // This function does everything necessary/possible to satisfy a
497 // failed allocation request (including collection, expansion, etc.)
498 HeapWord* satisfy_failed_allocation(size_t word_size,
499 AllocationContext_t context,
500 bool* succeeded);
501 private:
502 // Helper method for satisfy_failed_allocation()
503 HeapWord* satisfy_failed_allocation_helper(size_t word_size,
504 AllocationContext_t context,
505 bool do_gc,
506 bool clear_all_soft_refs,
507 bool expect_null_mutator_alloc_region,
508 bool* gc_succeeded);
509
510 protected:
511 // Attempting to expand the heap sufficiently
512 // to support an allocation of the given "word_size". If
513 // successful, perform the allocation and return the address of the
1132 inline void old_set_add(HeapRegion* hr);
1133 inline void old_set_remove(HeapRegion* hr);
1134
1135 size_t non_young_capacity_bytes() {
1136 return _old_set.total_capacity_bytes() + _humongous_set.total_capacity_bytes();
1137 }
1138
1139 void set_free_regions_coming();
1140 void reset_free_regions_coming();
1141 bool free_regions_coming() { return _free_regions_coming; }
1142 void wait_while_free_regions_coming();
1143
1144 // Determine whether the given region is one that we are using as an
1145 // old GC alloc region.
1146 bool is_old_gc_alloc_region(HeapRegion* hr);
1147
1148 // Perform a collection of the heap; intended for use in implementing
1149 // "System.gc". This probably implies as full a collection as the
1150 // "CollectedHeap" supports.
1151 virtual void collect(GCCause::Cause cause);
1152
1153 // The same as above but assume that the caller holds the Heap_lock.
1154 void collect_locked(GCCause::Cause cause);
1155
1156 virtual bool copy_allocation_context_stats(const jint* contexts,
1157 jlong* totals,
1158 jbyte* accuracy,
1159 jint len);
1160
1161 // True iff an evacuation has failed in the most-recent collection.
1162 bool evacuation_failed() { return _evacuation_failed; }
1163
1164 void remove_from_old_sets(const HeapRegionSetCount& old_regions_removed, const HeapRegionSetCount& humongous_regions_removed);
1165 void prepend_to_freelist(FreeRegionList* list);
1166 void decrement_summary_bytes(size_t bytes);
1167
1168 virtual bool is_in(const void* p) const;
1169 #ifdef ASSERT
1170 // Returns whether p is in one of the available areas of the heap. Slow but
1171 // extensive version.
1172 bool is_in_exact(const void* p) const;
1173 #endif
1174
|
454 // at the end of a successful GC). expect_null_mutator_alloc_region
455 // specifies whether the mutator alloc region is expected to be NULL
456 // or not.
457 HeapWord* attempt_allocation_at_safepoint(size_t word_size,
458 AllocationContext_t context,
459 bool expect_null_mutator_alloc_region);
460
461 // These methods are the "callbacks" from the G1AllocRegion class.
462
463 // For mutator alloc regions.
464 HeapRegion* new_mutator_alloc_region(size_t word_size, bool force);
465 void retire_mutator_alloc_region(HeapRegion* alloc_region,
466 size_t allocated_bytes);
467
468 // For GC alloc regions.
469 HeapRegion* new_gc_alloc_region(size_t word_size, uint count,
470 InCSetState dest);
471 void retire_gc_alloc_region(HeapRegion* alloc_region,
472 size_t allocated_bytes, InCSetState dest);
473
474 // - if explicit_gc is true, the GC is for a System.gc() etc,
475 // otherwise it's for a failed allocation.
476 // - if clear_all_soft_refs is true, all soft references should be
477 // cleared during the GC.
478 // - it returns false if it is unable to do the collection due to the
479 // GC locker being active, true otherwise.
480 bool do_full_collection(bool explicit_gc,
481 bool clear_all_soft_refs);
482
483 // Callback from VM_G1CollectFull operation, or collect_as_vm_thread.
484 virtual void do_full_collection(bool clear_all_soft_refs);
485
486 // Resize the heap if necessary after a full collection.
487 void resize_if_necessary_after_full_collection();
488
489 // Callback from VM_G1CollectForAllocation operation.
490 // This function does everything necessary/possible to satisfy a
491 // failed allocation request (including collection, expansion, etc.)
492 HeapWord* satisfy_failed_allocation(size_t word_size,
493 AllocationContext_t context,
494 bool* succeeded);
495 private:
496 // Helper method for satisfy_failed_allocation()
497 HeapWord* satisfy_failed_allocation_helper(size_t word_size,
498 AllocationContext_t context,
499 bool do_gc,
500 bool clear_all_soft_refs,
501 bool expect_null_mutator_alloc_region,
502 bool* gc_succeeded);
503
504 protected:
505 // Attempting to expand the heap sufficiently
506 // to support an allocation of the given "word_size". If
507 // successful, perform the allocation and return the address of the
1126 inline void old_set_add(HeapRegion* hr);
1127 inline void old_set_remove(HeapRegion* hr);
1128
1129 size_t non_young_capacity_bytes() {
1130 return _old_set.total_capacity_bytes() + _humongous_set.total_capacity_bytes();
1131 }
1132
1133 void set_free_regions_coming();
1134 void reset_free_regions_coming();
1135 bool free_regions_coming() { return _free_regions_coming; }
1136 void wait_while_free_regions_coming();
1137
1138 // Determine whether the given region is one that we are using as an
1139 // old GC alloc region.
1140 bool is_old_gc_alloc_region(HeapRegion* hr);
1141
1142 // Perform a collection of the heap; intended for use in implementing
1143 // "System.gc". This probably implies as full a collection as the
1144 // "CollectedHeap" supports.
1145 virtual void collect(GCCause::Cause cause);
1146
1147 virtual bool copy_allocation_context_stats(const jint* contexts,
1148 jlong* totals,
1149 jbyte* accuracy,
1150 jint len);
1151
1152 // True iff an evacuation has failed in the most-recent collection.
1153 bool evacuation_failed() { return _evacuation_failed; }
1154
1155 void remove_from_old_sets(const HeapRegionSetCount& old_regions_removed, const HeapRegionSetCount& humongous_regions_removed);
1156 void prepend_to_freelist(FreeRegionList* list);
1157 void decrement_summary_bytes(size_t bytes);
1158
1159 virtual bool is_in(const void* p) const;
1160 #ifdef ASSERT
1161 // Returns whether p is in one of the available areas of the heap. Slow but
1162 // extensive version.
1163 bool is_in_exact(const void* p) const;
1164 #endif
1165
|