--- old/src/share/vm/gc_implementation/g1/g1CollectedHeap.hpp 2014-07-21 15:24:37.879616862 +0200 +++ new/src/share/vm/gc_implementation/g1/g1CollectedHeap.hpp 2014-07-21 15:24:37.793614350 +0200 @@ -198,13 +198,37 @@ }; // Instances of this class are used for quick tests on whether a reference points -// into the collection set. Each of the array's elements denotes whether the -// corresponding region is in the collection set. -class G1FastCSetBiasedMappedArray : public G1BiasedMappedArray { +// into the collection set or is a humongous object (points into a humongous +// object). +// Each of the array's elements denotes whether the corresponding region is in +// the collection set or a humongous region. +// We use this to quickly reclaim humongous objects: by making a humongous region +// succeed this test, we sort-of add it to the collection set which objects are +// supposed to be evacuated. However, since the region is humongous, evacuation +// will automatically fail the test to allocate it into a PLAB. We catch this +// condition (in this slow-path), and mark that region as "live" in a side table. +// At the end of GC, we use this information, among other, to determine whether +// we can reclaim the humongous object or not. +class G1FastCSetBiasedMappedArray : public G1BiasedMappedArray { + public: + enum in_cset_state_t { + InNeither, // neither in collection set nor humongous + InCSet, // region is in collection set only + IsHumongous // region is a humongous start region + }; protected: - bool default_value() const { return false; } + char default_value() const { return InNeither; } public: - void clear() { G1BiasedMappedArray::clear(); } + void set_humongous(uintptr_t index) { assert(get_by_index(index) != InCSet, "Should not overwrite InCSet values"); set_by_index(index, IsHumongous); } + void clear_humongous(uintptr_t index) { + set_by_index(index, InNeither); + } + void set_in_cset(uintptr_t index) { assert(get_by_index(index) != IsHumongous, "Should not overwrite IsHumongous value"); set_by_index(index, InCSet); } + + bool is_in_cset_or_humongous(HeapWord* addr) const { return get_by_address(addr) != InNeither; } + bool is_in_cset(HeapWord* addr) const { return get_by_address(addr) == InCSet; } + in_cset_state_t at(HeapWord* addr) const { return (in_cset_state_t)get_by_address(addr); } + void clear() { G1BiasedMappedArray::clear(); } }; class RefineCardTableEntryClosure; @@ -237,6 +261,7 @@ friend class EvacPopObjClosure; friend class G1ParCleanupCTTask; + friend class G1FreeHumongousRegionClosure; // Other related classes. friend class G1MarkSweep; @@ -267,6 +292,9 @@ // It keeps track of the humongous regions. HeapRegionSet _humongous_set; + void clear_humongous_is_live_table(); + void eagerly_reclaim_humongous_regions(); + // The number of regions we could create by expansion. uint _expansion_regions; @@ -372,6 +400,26 @@ // corresponding region is in the collection set or not. G1FastCSetBiasedMappedArray _in_cset_fast_test; + // Records whether the region at the given index is kept live by roots or + // references from the young generation. + class HumongousIsLiveBiasedMappedArray : public G1BiasedMappedArray { + protected: + bool default_value() const { return false; } + public: + void clear() { G1BiasedMappedArray::clear(); } + void set_live(uint region) { + set_by_index(region, true); + } + bool is_live(uint region) { + return get_by_index(region); + } + }; + + HumongousIsLiveBiasedMappedArray _humongous_is_live; + // Stores whether during humongous object registration we found candidate regions. + // If not, we can skip a few steps. + bool _has_humongous_reclaim_candidates; + volatile unsigned _gc_time_stamp; size_t* _surviving_young_words; @@ -690,10 +738,24 @@ virtual void gc_prologue(bool full); virtual void gc_epilogue(bool full); + inline void set_humongous_is_live(oop obj); + + bool humongous_is_live(uint region) { + return _humongous_is_live.is_live(region); + } + + // Returns whether the given region (which must be a humongous (start) region) + // is to be considered conservatively live regardless of any other conditions. + bool humongous_region_is_always_live(uint index); + // Register the given region to be part of the collection set. + inline void register_humongous_region_with_in_cset_fast_test(uint index); + // Register regions with humongous objects (actually on the start region) in + // the in_cset_fast_test table. + void register_humongous_regions_with_in_cset_fast_test(); // We register a region with the fast "in collection set" test. We // simply set to true the array slot corresponding to this region. void register_region_with_in_cset_fast_test(HeapRegion* r) { - _in_cset_fast_test.set_by_index(r->hrs_index(), true); + _in_cset_fast_test.set_in_cset(r->hrs_index()); } // This is a fast test on whether a reference points into the @@ -1283,9 +1345,15 @@ virtual bool is_in(const void* p) const; // Return "TRUE" iff the given object address is within the collection - // set. + // set. Slow implementation. inline bool obj_in_cs(oop obj); + inline bool is_in_cset(oop obj); + + inline bool is_in_cset_or_humongous(const oop obj); + + inline G1FastCSetBiasedMappedArray::in_cset_state_t in_cset_state(const oop obj); + // Return "TRUE" iff the given object address is in the reserved // region of g1. bool is_in_g1_reserved(const void* p) const { @@ -1340,6 +1408,10 @@ // Return the region with the given index. It assumes the index is valid. inline HeapRegion* region_at(uint index) const; + // Calculate the region index of the given address. Given address must be + // within the heap. + inline uint addr_to_region(HeapWord* addr) const; + // Divide the heap region sequence into "chunks" of some size (the number // of regions divided by the number of parallel threads times some // overpartition factor, currently 4). Assumes that this will be called