195 assert(last_time <= time, "time should be monotonic");
196
197 // If the thread had not allocated recently, retract the ergonomic size.
198 // This conserves memory when the thread had initial burst of allocations,
199 // and then started allocating only sporadically.
200 if (last_time != 0 && (time - last_time > _decay_time_ns)) {
201 ergo_tlab = 0;
202 EpsilonThreadLocalData::set_ergo_tlab_size(thread, 0);
203 }
204 }
205
206 // If we can fit the allocation under current TLAB size, do so.
207 // Otherwise, we want to elastically increase the TLAB size.
208 fits = (requested_size <= ergo_tlab);
209 if (!fits) {
210 size = (size_t) (ergo_tlab * EpsilonTLABElasticity);
211 }
212 }
213
214 // Always honor boundaries
215 size = MAX2(min_size, MIN2(_max_tlab_size, size));
216
217 // Always honor alignment
218 size = align_up(size, MinObjAlignment);
219
220 // Check that adjustments did not break local and global invariants
221 assert(is_object_aligned(size),
222 "Size honors object alignment: " SIZE_FORMAT, size);
223 assert(min_size <= size,
224 "Size honors min size: " SIZE_FORMAT " <= " SIZE_FORMAT, min_size, size);
225 assert(size <= _max_tlab_size,
226 "Size honors max size: " SIZE_FORMAT " <= " SIZE_FORMAT, size, _max_tlab_size);
227 assert(size <= CollectedHeap::max_tlab_size(),
228 "Size honors global max size: " SIZE_FORMAT " <= " SIZE_FORMAT, size, CollectedHeap::max_tlab_size());
229
230 if (log_is_enabled(Trace, gc)) {
231 ResourceMark rm;
232 log_trace(gc)("TLAB size for \"%s\" (Requested: " SIZE_FORMAT "K, Min: " SIZE_FORMAT
233 "K, Max: " SIZE_FORMAT "K, Ergo: " SIZE_FORMAT "K) -> " SIZE_FORMAT "K",
234 thread->name(),
235 requested_size * HeapWordSize / K,
|
195 assert(last_time <= time, "time should be monotonic");
196
197 // If the thread had not allocated recently, retract the ergonomic size.
198 // This conserves memory when the thread had initial burst of allocations,
199 // and then started allocating only sporadically.
200 if (last_time != 0 && (time - last_time > _decay_time_ns)) {
201 ergo_tlab = 0;
202 EpsilonThreadLocalData::set_ergo_tlab_size(thread, 0);
203 }
204 }
205
206 // If we can fit the allocation under current TLAB size, do so.
207 // Otherwise, we want to elastically increase the TLAB size.
208 fits = (requested_size <= ergo_tlab);
209 if (!fits) {
210 size = (size_t) (ergo_tlab * EpsilonTLABElasticity);
211 }
212 }
213
214 // Always honor boundaries
215 size = clamp(size, min_size, _max_tlab_size);
216
217 // Always honor alignment
218 size = align_up(size, MinObjAlignment);
219
220 // Check that adjustments did not break local and global invariants
221 assert(is_object_aligned(size),
222 "Size honors object alignment: " SIZE_FORMAT, size);
223 assert(min_size <= size,
224 "Size honors min size: " SIZE_FORMAT " <= " SIZE_FORMAT, min_size, size);
225 assert(size <= _max_tlab_size,
226 "Size honors max size: " SIZE_FORMAT " <= " SIZE_FORMAT, size, _max_tlab_size);
227 assert(size <= CollectedHeap::max_tlab_size(),
228 "Size honors global max size: " SIZE_FORMAT " <= " SIZE_FORMAT, size, CollectedHeap::max_tlab_size());
229
230 if (log_is_enabled(Trace, gc)) {
231 ResourceMark rm;
232 log_trace(gc)("TLAB size for \"%s\" (Requested: " SIZE_FORMAT "K, Min: " SIZE_FORMAT
233 "K, Max: " SIZE_FORMAT "K, Ergo: " SIZE_FORMAT "K) -> " SIZE_FORMAT "K",
234 thread->name(),
235 requested_size * HeapWordSize / K,
|