< prev index next >

src/hotspot/share/gc/g1/g1ConcurrentMark.hpp

Print this page

        

*** 27,36 **** --- 27,37 ---- #include "gc/g1/g1ConcurrentMarkBitMap.hpp" #include "gc/g1/g1ConcurrentMarkObjArrayProcessor.hpp" #include "gc/g1/heapRegionSet.hpp" #include "gc/shared/taskqueue.hpp" + #include "memory/allocation.hpp" class ConcurrentGCTimer; class ConcurrentMarkThread; class G1CollectedHeap; class G1CMTask;
*** 45,55 **** #pragma warning(disable:4522) #endif // This is a container class for either an oop or a continuation address for // mark stack entries. Both are pushed onto the mark stack. ! class G1TaskQueueEntry VALUE_OBJ_CLASS_SPEC { private: void* _holder; static const uintptr_t ArraySliceBit = 1; --- 46,56 ---- #pragma warning(disable:4522) #endif // This is a container class for either an oop or a continuation address for // mark stack entries. Both are pushed onto the mark stack. ! class G1TaskQueueEntry { private: void* _holder; static const uintptr_t ArraySliceBit = 1;
*** 125,135 **** // management. This wastes some space, but is negligible (< .1% with current sizing). // // Memory management is done using a mix of tracking a high water-mark indicating // that all chunks at a lower address are valid chunks, and a singly linked free // list connecting all empty chunks. ! class G1CMMarkStack VALUE_OBJ_CLASS_SPEC { public: // Number of TaskQueueEntries that can fit in a single chunk. static const size_t EntriesPerChunk = 1024 - 1 /* One reference for the next pointer */; private: struct TaskQueueEntryChunk { --- 126,136 ---- // management. This wastes some space, but is negligible (< .1% with current sizing). // // Memory management is done using a mix of tracking a high water-mark indicating // that all chunks at a lower address are valid chunks, and a singly linked free // list connecting all empty chunks. ! class G1CMMarkStack { public: // Number of TaskQueueEntries that can fit in a single chunk. static const size_t EntriesPerChunk = 1024 - 1 /* One reference for the next pointer */; private: struct TaskQueueEntryChunk {
*** 225,235 **** // root regions without having to mark them or do anything else to them. // // Currently, we only support root region scanning once (at the start // of the marking cycle) and the root regions are all the survivor // regions populated during the initial-mark pause. ! class G1CMRootRegions VALUE_OBJ_CLASS_SPEC { private: const G1SurvivorRegions* _survivors; G1ConcurrentMark* _cm; volatile bool _scan_in_progress; --- 226,236 ---- // root regions without having to mark them or do anything else to them. // // Currently, we only support root region scanning once (at the start // of the marking cycle) and the root regions are all the survivor // regions populated during the initial-mark pause. ! class G1CMRootRegions { private: const G1SurvivorRegions* _survivors; G1ConcurrentMark* _cm; volatile bool _scan_in_progress;
< prev index next >