src/share/vm/gc_implementation/g1/g1CollectedHeap.hpp

Print this page
rev 6670 : fast reclaim main patch

*** 196,212 **** G1STWIsAliveClosure(G1CollectedHeap* g1) : _g1(g1) {} bool do_object_b(oop p); }; // Instances of this class are used for quick tests on whether a reference points ! // into the collection set. Each of the array's elements denotes whether the ! // corresponding region is in the collection set. ! class G1FastCSetBiasedMappedArray : public G1BiasedMappedArray<bool> { protected: ! bool default_value() const { return false; } public: ! void clear() { G1BiasedMappedArray<bool>::clear(); } }; class RefineCardTableEntryClosure; class G1CollectedHeap : public SharedHeap { --- 196,236 ---- G1STWIsAliveClosure(G1CollectedHeap* g1) : _g1(g1) {} bool do_object_b(oop p); }; // Instances of this class are used for quick tests on whether a reference points ! // into the collection set or is a humongous object (points into a humongous ! // object). ! // Each of the array's elements denotes whether the corresponding region is in ! // the collection set or a humongous region. ! // We use this to quickly reclaim humongous objects: by making a humongous region ! // succeed this test, we sort-of add it to the collection set which objects are ! // supposed to be evacuated. However, since the region is humongous, evacuation ! // will automatically fail the test to allocate it into a PLAB. We catch this ! // condition (in this slow-path), and mark that region as "live" in a side table. ! // At the end of GC, we use this information, among other, to determine whether ! // we can reclaim the humongous object or not. ! class G1FastCSetBiasedMappedArray : public G1BiasedMappedArray<char> { ! private: ! enum { ! InNeither, // neither in collection set nor humongous ! InCSet, // region is in collection set only ! IsHumongous // region is a humongous start region ! }; protected: ! char default_value() const { return InNeither; } public: ! void set_humongous(uintptr_t index) { assert(get_by_index(index) != InCSet, "Should not overwrite InCSet values"); set_by_index(index, IsHumongous); } ! void clear_humongous(uintptr_t index) { ! set_by_index(index, InNeither); ! } ! void set_in_cset(uintptr_t index) { assert(get_by_index(index) != IsHumongous, "Should not overwrite InCSetOrHumongous value"); set_by_index(index, InCSet); } ! ! bool is_in_cset_or_humongous(HeapWord* addr) const { return get_by_address(addr) != InNeither; } ! bool is_in_cset_and_humongous(HeapWord* addr) const { return get_by_address(addr) == IsHumongous; } ! bool is_in_cset(HeapWord* addr) const { return get_by_address(addr) == InCSet; } ! void clear() { G1BiasedMappedArray<char>::clear(); } }; class RefineCardTableEntryClosure; class G1CollectedHeap : public SharedHeap {
*** 235,244 **** --- 259,269 ---- friend class RegionResetter; friend class CountRCClosure; friend class EvacPopObjClosure; friend class G1ParCleanupCTTask; + friend class G1FreeHumongousRegionClosure; // Other related classes. friend class G1MarkSweep; private: // The one and only G1CollectedHeap, so static functions can find it.
*** 265,274 **** --- 290,302 ---- HeapRegionSet _old_set; // It keeps track of the humongous regions. HeapRegionSet _humongous_set; + void clear_humongous_is_live_table(); + void eagerly_reclaim_humongous_regions(); + // The number of regions we could create by expansion. uint _expansion_regions; // The block offset table for the G1 heap. G1BlockOffsetSharedArray* _bot_shared;
*** 370,379 **** --- 398,427 ---- // This array is used for a quick test on whether a reference points into // the collection set or not. Each of the array's elements denotes whether the // corresponding region is in the collection set or not. G1FastCSetBiasedMappedArray _in_cset_fast_test; + // Records whether the region at the given index is kept live by roots or + // references from the young generation. + class HumongousIsLiveBiasedMappedArray : public G1BiasedMappedArray<bool> { + protected: + bool default_value() const { return false; } + public: + void clear() { G1BiasedMappedArray<bool>::clear(); } + void set_live(uint region) { + set_by_index(region, true); + } + bool is_live(uint region) { + return get_by_index(region); + } + }; + + HumongousIsLiveBiasedMappedArray _humongous_is_live; + // Stores whether during humongous object registration we found candidate regions. + // If not, we can skip a few steps. + bool _has_humongous_reclaim_candidates; + volatile unsigned _gc_time_stamp; size_t* _surviving_young_words; G1HRPrinter _hr_printer;
*** 688,701 **** // Do anything common to GC's. virtual void gc_prologue(bool full); virtual void gc_epilogue(bool full); // We register a region with the fast "in collection set" test. We // simply set to true the array slot corresponding to this region. void register_region_with_in_cset_fast_test(HeapRegion* r) { ! _in_cset_fast_test.set_by_index(r->hrs_index(), true); } // This is a fast test on whether a reference points into the // collection set or not. Assume that the reference // points into the heap. --- 736,763 ---- // Do anything common to GC's. virtual void gc_prologue(bool full); virtual void gc_epilogue(bool full); + inline void set_humongous_is_live(oop obj); + + bool humongous_is_live(uint region) { + return _humongous_is_live.is_live(region); + } + + // Returns whether the given region (which must be a humongous (start) region) + // is to be considered conservatively live regardless of any other conditions. + bool humongous_region_is_always_live(HeapRegion* region); + // Register the given region to be part of the collection set. + inline void register_humongous_region_with_in_cset_fast_test(uint index); + // Register regions with humongous objects (actually on the start region) in + // the in_cset_fast_test table. + void register_humongous_regions_with_in_cset_fast_test(); // We register a region with the fast "in collection set" test. We // simply set to true the array slot corresponding to this region. void register_region_with_in_cset_fast_test(HeapRegion* r) { ! _in_cset_fast_test.set_in_cset(r->hrs_index()); } // This is a fast test on whether a reference points into the // collection set or not. Assume that the reference // points into the heap.
*** 1281,1293 **** // Returns "TRUE" iff "p" points into the committed areas of the heap. virtual bool is_in(const void* p) const; // Return "TRUE" iff the given object address is within the collection ! // set. inline bool obj_in_cs(oop obj); // Return "TRUE" iff the given object address is in the reserved // region of g1. bool is_in_g1_reserved(const void* p) const { return _g1_reserved.contains(p); } --- 1343,1361 ---- // Returns "TRUE" iff "p" points into the committed areas of the heap. virtual bool is_in(const void* p) const; // Return "TRUE" iff the given object address is within the collection ! // set. Slow implementation. inline bool obj_in_cs(oop obj); + inline bool is_in_cset(oop obj); + + inline bool is_in_cset_or_humongous(const oop obj); + + inline bool is_in_cset_and_humongous(const oop obj); + // Return "TRUE" iff the given object address is in the reserved // region of g1. bool is_in_g1_reserved(const void* p) const { return _g1_reserved.contains(p); }
*** 1338,1347 **** --- 1406,1419 ---- void heap_region_iterate(HeapRegionClosure* blk) const; // Return the region with the given index. It assumes the index is valid. inline HeapRegion* region_at(uint index) const; + // Calculate the region index of the given address. Given address must be + // within the heap. + inline uint addr_to_region(HeapWord* addr) const; + // Divide the heap region sequence into "chunks" of some size (the number // of regions divided by the number of parallel threads times some // overpartition factor, currently 4). Assumes that this will be called // in parallel by ParallelGCThreads worker threads with distinct worker // ids in the range [0..max(ParallelGCThreads-1, 1)], that all parallel