158 // of the old generation.
159 HeapWord* failed_mem_allocate(size_t size);
160
161 // Support for System.gc()
162 void collect(GCCause::Cause cause);
163
164 // These also should be called by the vm thread at a safepoint (e.g., from a
165 // VM operation).
166 //
167 // The first collects the young generation only, unless the scavenge fails; it
168 // will then attempt a full gc. The second collects the entire heap; if
169 // maximum_compaction is true, it will compact everything and clear all soft
170 // references.
171 inline void invoke_scavenge();
172
173 // Perform a full collection
174 virtual void do_full_collection(bool clear_all_soft_refs);
175
176 bool supports_inline_contig_alloc() const { return !UseNUMA; }
177
178 HeapWord** top_addr() const { return !UseNUMA ? young_gen()->top_addr() : (HeapWord**)-1; }
179 HeapWord** end_addr() const { return !UseNUMA ? young_gen()->end_addr() : (HeapWord**)-1; }
180
181 void ensure_parsability(bool retire_tlabs);
182 void accumulate_statistics_all_tlabs();
183 void resize_all_tlabs();
184
185 bool supports_tlab_allocation() const { return true; }
186
187 size_t tlab_capacity(Thread* thr) const;
188 size_t tlab_used(Thread* thr) const;
189 size_t unsafe_max_tlab_alloc(Thread* thr) const;
190
191 // Can a compiler initialize a new object without store barriers?
192 // This permission only extends from the creation of a new object
193 // via a TLAB up to the first subsequent safepoint.
194 virtual bool can_elide_tlab_store_barriers() const {
195 return true;
196 }
197
198 virtual bool card_mark_must_follow_store() const {
|
158 // of the old generation.
159 HeapWord* failed_mem_allocate(size_t size);
160
161 // Support for System.gc()
162 void collect(GCCause::Cause cause);
163
164 // These also should be called by the vm thread at a safepoint (e.g., from a
165 // VM operation).
166 //
167 // The first collects the young generation only, unless the scavenge fails; it
168 // will then attempt a full gc. The second collects the entire heap; if
169 // maximum_compaction is true, it will compact everything and clear all soft
170 // references.
171 inline void invoke_scavenge();
172
173 // Perform a full collection
174 virtual void do_full_collection(bool clear_all_soft_refs);
175
176 bool supports_inline_contig_alloc() const { return !UseNUMA; }
177
178 HeapWord* volatile* top_addr() const { return !UseNUMA ? young_gen()->top_addr() : (HeapWord* volatile*)-1; }
179 HeapWord** end_addr() const { return !UseNUMA ? young_gen()->end_addr() : (HeapWord**)-1; }
180
181 void ensure_parsability(bool retire_tlabs);
182 void accumulate_statistics_all_tlabs();
183 void resize_all_tlabs();
184
185 bool supports_tlab_allocation() const { return true; }
186
187 size_t tlab_capacity(Thread* thr) const;
188 size_t tlab_used(Thread* thr) const;
189 size_t unsafe_max_tlab_alloc(Thread* thr) const;
190
191 // Can a compiler initialize a new object without store barriers?
192 // This permission only extends from the creation of a new object
193 // via a TLAB up to the first subsequent safepoint.
194 virtual bool can_elide_tlab_store_barriers() const {
195 return true;
196 }
197
198 virtual bool card_mark_must_follow_store() const {
|