< prev index next >

src/share/vm/gc/g1/g1Allocator.inline.hpp

Print this page
rev 12504 : 8171235: Move archive object code from G1MarkSweep into G1ArchiveAllocator
Reviewed-by:


  38   assert(result != NULL || mutator_alloc_region(context)->get() == NULL,
  39          "Must not have a mutator alloc region if there is no memory, but is " PTR_FORMAT, p2i(mutator_alloc_region(context)->get()));
  40   return result;
  41 }
  42 
  43 HeapWord* G1Allocator::attempt_allocation_force(size_t word_size, AllocationContext_t context) {
  44   return mutator_alloc_region(context)->attempt_allocation_force(word_size, false /* bot_updates */);
  45 }
  46 
  47 inline HeapWord* G1PLABAllocator::plab_allocate(InCSetState dest,
  48                                                 size_t word_sz,
  49                                                 AllocationContext_t context) {
  50   G1PLAB* buffer = alloc_buffer(dest, context);
  51   if (_survivor_alignment_bytes == 0 || !dest.is_young()) {
  52     return buffer->allocate(word_sz);
  53   } else {
  54     return buffer->allocate_aligned(word_sz, _survivor_alignment_bytes);
  55   }
  56 }
  57 

































  58 #endif // SHARE_VM_GC_G1_G1ALLOCATOR_HPP


  38   assert(result != NULL || mutator_alloc_region(context)->get() == NULL,
  39          "Must not have a mutator alloc region if there is no memory, but is " PTR_FORMAT, p2i(mutator_alloc_region(context)->get()));
  40   return result;
  41 }
  42 
  43 HeapWord* G1Allocator::attempt_allocation_force(size_t word_size, AllocationContext_t context) {
  44   return mutator_alloc_region(context)->attempt_allocation_force(word_size, false /* bot_updates */);
  45 }
  46 
  47 inline HeapWord* G1PLABAllocator::plab_allocate(InCSetState dest,
  48                                                 size_t word_sz,
  49                                                 AllocationContext_t context) {
  50   G1PLAB* buffer = alloc_buffer(dest, context);
  51   if (_survivor_alignment_bytes == 0 || !dest.is_young()) {
  52     return buffer->allocate(word_sz);
  53   } else {
  54     return buffer->allocate_aligned(word_sz, _survivor_alignment_bytes);
  55   }
  56 }
  57 
  58 // Create the _archive_region_map which is used to identify archive objects.
  59 inline void G1ArchiveAllocator::enable_archive_object_check() {
  60   assert(!_archive_check_enabled, "archive range check already enabled");
  61   _archive_check_enabled = true;
  62   size_t length = Universe::heap()->max_capacity();
  63   _archive_region_map.initialize((HeapWord*)Universe::heap()->base(),
  64                                  (HeapWord*)Universe::heap()->base() + length,
  65                                  HeapRegion::GrainBytes);
  66 }
  67 
  68 // Set the regions containing the specified address range as archive/non-archive.
  69 inline void G1ArchiveAllocator::set_range_archive(MemRegion range, bool is_archive) {
  70   assert(_archive_check_enabled, "archive range check not enabled");
  71   _archive_region_map.set_by_address(range, is_archive);
  72 }
  73 
  74 // Check if an object is in an archive region using the _archive_region_map.
  75 inline bool G1ArchiveAllocator::in_archive_range(oop object) {
  76   // This is the out-of-line part of is_archive_object test, done separately
  77   // to avoid additional performance impact when the check is not enabled.
  78   return _archive_region_map.get_by_address((HeapWord*)object);
  79 }
  80 
  81 // Check if archive object checking is enabled, to avoid calling in_archive_range
  82 // unnecessarily.
  83 inline bool G1ArchiveAllocator::archive_check_enabled() {
  84   return _archive_check_enabled;
  85 }
  86 
  87 inline bool G1ArchiveAllocator::is_archive_object(oop object) {
  88   return (archive_check_enabled() && in_archive_range(object));
  89 }
  90 
  91 #endif // SHARE_VM_GC_G1_G1ALLOCATOR_HPP
< prev index next >