< prev index next >

src/share/vm/gc_implementation/g1/g1ParScanThreadState.hpp

Print this page
rev 7471 : 8060025: Object copy time regressions after JDK-8031323 and JDK-8057536
Summary: Evaluate and improve object copy time by micro-optimizations and splitting out slow and fast paths aggressively.
Reviewed-by:
Contributed-by: Tony Printezis <tprintezis@twitter.com>, Thomas Schatzl <thomas.schatzl@oracle.com>

*** 47,57 **** G1RemSet* _g1_rem; G1ParGCAllocator* _g1_par_allocator; ageTable _age_table; ! G1ParScanClosure _scanner; size_t _alloc_buffer_waste; size_t _undo_waste; --- 47,59 ---- G1RemSet* _g1_rem; G1ParGCAllocator* _g1_par_allocator; ageTable _age_table; ! in_cset_state_t _dest[InCSetState::Num]; ! // Local tenuring threshold. ! uint _tenuring_threshold; G1ParScanClosure _scanner; size_t _alloc_buffer_waste; size_t _undo_waste;
*** 80,89 **** --- 82,99 ---- void add_to_undo_waste(size_t waste) { _undo_waste += waste; } DirtyCardQueue& dirty_card_queue() { return _dcq; } G1SATBCardTableModRefBS* ctbs() { return _ct_bs; } + in_cset_state_t dest(uint index) const { + assert(index < InCSetState::Num, + err_msg("dest index out-of-bounds: %u", index)); + assert(_dest[index] != InCSetState::NotInCSet, + err_msg("dest is invalid: %u", index)); + return _dest[index]; + } + public: G1ParScanThreadState(G1CollectedHeap* g1h, uint queue_num, ReferenceProcessor* rp); ~G1ParScanThreadState(); ageTable* age_table() { return &_age_table; }
*** 110,120 **** if (ctbs()->mark_card_deferred(card_index)) { dirty_card_queue().enqueue((jbyte*)ctbs()->byte_for_index(card_index)); } } } - public: void set_evac_failure_closure(OopsInHeapRegionClosure* evac_failure_cl) { _evac_failure_cl = evac_failure_cl; } --- 120,129 ----
*** 191,203 **** template <class T> inline void do_oop_evac(T* p, HeapRegion* from); template <class T> inline void deal_with_reference(T* ref_to_scan); inline void dispatch_reference(StarTask ref); public: ! oop copy_to_survivor_space(oop const obj, markOop const old_mark); void trim_queue(); inline void steal_and_trim_queue(RefToScanQueueSet *task_queues); }; --- 200,223 ---- template <class T> inline void do_oop_evac(T* p, HeapRegion* from); template <class T> inline void deal_with_reference(T* ref_to_scan); inline void dispatch_reference(StarTask ref); + + // Tries to allocate word_sz in the PLAB of the next "generation" after trying to + // allocate into dest. State is the original (source) cset state for the object + // that is allocated for. + // Returns a non-NULL pointer if successful, and updates dest if required. + HeapWord* allocate_in_next_plab(in_cset_state_t const state, + in_cset_state_t* dest, + size_t word_sz, + AllocationContext_t const context); + + inline in_cset_state_t next_state(in_cset_state_t const state, markOop const m, uint& age); public: ! oop copy_to_survivor_space(in_cset_state_t const state, oop const obj, markOop const old_mark); void trim_queue(); inline void steal_and_trim_queue(RefToScanQueueSet *task_queues); };
< prev index next >