280 virtual HeapWord* block_start(const void* addr) const;
281
282 // Requires "addr" to be the start of a chunk, and returns its size.
283 // "addr + size" is required to be the start of a new chunk, or the end
284 // of the active area of the heap. Assumes (and verifies in non-product
285 // builds) that addr is in the allocated part of the heap and is
286 // the start of a chunk.
287 virtual size_t block_size(const HeapWord* addr) const;
288
289 // Requires "addr" to be the start of a block, and returns "TRUE" iff
290 // the block is an object. Assumes (and verifies in non-product
291 // builds) that addr is in the allocated part of the heap and is
292 // the start of a chunk.
293 virtual bool block_is_obj(const HeapWord* addr) const;
294
295 // Section on TLAB's.
296 virtual bool supports_tlab_allocation() const;
297 virtual size_t tlab_capacity(Thread* thr) const;
298 virtual size_t tlab_used(Thread* thr) const;
299 virtual size_t unsafe_max_tlab_alloc(Thread* thr) const;
300 virtual HeapWord* allocate_new_tlab(size_t size);
301
302 // The "requestor" generation is performing some garbage collection
303 // action for which it would be useful to have scratch space. The
304 // requestor promises to allocate no more than "max_alloc_words" in any
305 // older generation (via promotion say.) Any blocks of space that can
306 // be provided are returned as a list of ScratchBlocks, sorted by
307 // decreasing size.
308 ScratchBlock* gather_scratch(Generation* requestor, size_t max_alloc_words);
309 // Allow each generation to reset any scratch space that it has
310 // contributed as it needs.
311 void release_scratch();
312
313 // Ensure parsability: override
314 virtual void ensure_parsability(bool retire_tlabs);
315
316 // Time in ms since the longest time a collector ran in
317 // in any generation.
318 virtual jlong millis_since_last_gc();
319
320 // Total number of full collections completed.
|
280 virtual HeapWord* block_start(const void* addr) const;
281
282 // Requires "addr" to be the start of a chunk, and returns its size.
283 // "addr + size" is required to be the start of a new chunk, or the end
284 // of the active area of the heap. Assumes (and verifies in non-product
285 // builds) that addr is in the allocated part of the heap and is
286 // the start of a chunk.
287 virtual size_t block_size(const HeapWord* addr) const;
288
289 // Requires "addr" to be the start of a block, and returns "TRUE" iff
290 // the block is an object. Assumes (and verifies in non-product
291 // builds) that addr is in the allocated part of the heap and is
292 // the start of a chunk.
293 virtual bool block_is_obj(const HeapWord* addr) const;
294
295 // Section on TLAB's.
296 virtual bool supports_tlab_allocation() const;
297 virtual size_t tlab_capacity(Thread* thr) const;
298 virtual size_t tlab_used(Thread* thr) const;
299 virtual size_t unsafe_max_tlab_alloc(Thread* thr) const;
300 virtual HeapWord* allocate_new_tlab(size_t min_word_size,
301 size_t desired_word_size,
302 size_t* actual_word_size);
303
304 // The "requestor" generation is performing some garbage collection
305 // action for which it would be useful to have scratch space. The
306 // requestor promises to allocate no more than "max_alloc_words" in any
307 // older generation (via promotion say.) Any blocks of space that can
308 // be provided are returned as a list of ScratchBlocks, sorted by
309 // decreasing size.
310 ScratchBlock* gather_scratch(Generation* requestor, size_t max_alloc_words);
311 // Allow each generation to reset any scratch space that it has
312 // contributed as it needs.
313 void release_scratch();
314
315 // Ensure parsability: override
316 virtual void ensure_parsability(bool retire_tlabs);
317
318 // Time in ms since the longest time a collector ran in
319 // in any generation.
320 virtual jlong millis_since_last_gc();
321
322 // Total number of full collections completed.
|