< prev index next >

src/share/vm/gc/parallel/psParallelCompact.cpp

Print this page
rev 9846 : [mq]: par-scav-patch
rev 9847 : 8146987: Improve Parallel GC Full GC by caching results of live_words_in_range()
Summary: A large part of time in the parallel scavenge collector is spent finding out the amount of live words within memory ranges to find out where to move an object to. Try to incrementally calculate this value.
Reviewed-by: tschatzl, mgerdin
Contributed-by: ray alex <sky1young@gmail.com>

*** 1,7 **** /* ! * Copyright (c) 2005, 2015, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. --- 1,7 ---- /* ! * Copyright (c) 2005, 2016, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation.
*** 749,759 **** *target_next = dest_addr; return true; } ! HeapWord* ParallelCompactData::calc_new_pointer(HeapWord* addr) { assert(addr != NULL, "Should detect NULL oop earlier"); assert(ParallelScavengeHeap::heap()->is_in(addr), "not in heap"); assert(PSParallelCompact::mark_bitmap()->is_marked(addr), "not marked"); // Region covering the object. --- 749,759 ---- *target_next = dest_addr; return true; } ! HeapWord* ParallelCompactData::calc_new_pointer(HeapWord* addr, ParCompactionManager* cm) { assert(addr != NULL, "Should detect NULL oop earlier"); assert(ParallelScavengeHeap::heap()->is_in(addr), "not in heap"); assert(PSParallelCompact::mark_bitmap()->is_marked(addr), "not marked"); // Region covering the object.
*** 786,796 **** HeapWord* const search_start = block_align_down(addr); const size_t block_offset = addr_to_block_ptr(addr)->offset(); const ParMarkBitMap* bitmap = PSParallelCompact::mark_bitmap(); ! const size_t live = bitmap->live_words_in_range(search_start, oop(addr)); result += block_offset + live; DEBUG_ONLY(PSParallelCompact::check_new_location(addr, result)); return result; } --- 786,796 ---- HeapWord* const search_start = block_align_down(addr); const size_t block_offset = addr_to_block_ptr(addr)->offset(); const ParMarkBitMap* bitmap = PSParallelCompact::mark_bitmap(); ! const size_t live = bitmap->live_words_in_range(cm, search_start, oop(addr)); result += block_offset + live; DEBUG_ONLY(PSParallelCompact::check_new_location(addr, result)); return result; }
*** 823,837 **** PSParallelCompact::IsAliveClosure PSParallelCompact::_is_alive_closure; bool PSParallelCompact::IsAliveClosure::do_object_b(oop p) { return mark_bitmap()->is_marked(p); } - PSParallelCompact::AdjustPointerClosure PSParallelCompact::_adjust_pointer_closure; - PSParallelCompact::AdjustKlassClosure PSParallelCompact::_adjust_klass_closure; - void PSParallelCompact::AdjustKlassClosure::do_klass(Klass* klass) { ! klass->oops_do(&PSParallelCompact::_adjust_pointer_closure); } void PSParallelCompact::post_initialize() { ParallelScavengeHeap* heap = ParallelScavengeHeap::heap(); MemRegion mr = heap->reserved_region(); --- 823,835 ---- PSParallelCompact::IsAliveClosure PSParallelCompact::_is_alive_closure; bool PSParallelCompact::IsAliveClosure::do_object_b(oop p) { return mark_bitmap()->is_marked(p); } void PSParallelCompact::AdjustKlassClosure::do_klass(Klass* klass) { ! PSParallelCompact::AdjustPointerClosure closure(_cm); ! klass->oops_do(&closure); } void PSParallelCompact::post_initialize() { ParallelScavengeHeap* heap = ParallelScavengeHeap::heap(); MemRegion mr = heap->reserved_region();
*** 975,984 **** --- 973,984 ---- DEBUG_ONLY(mark_bitmap()->verify_clear();) DEBUG_ONLY(summary_data().verify_clear();) // Have worker threads release resources the next time they run a task. gc_task_manager()->release_all_resources(); + + ParCompactionManager::reset_cache_for_bitmap(); } void PSParallelCompact::post_compact() { GCTraceTime(Trace, gc, phases) tm("Post Compact", &_gc_timer);
*** 1799,1809 **** DerivedPointerTable::set_active(false); #endif // adjust_roots() updates Universe::_intArrayKlassObj which is // needed by the compaction for filling holes in the dense prefix. ! adjust_roots(); compaction_start.update(); compact(); // Reset the mark bitmap, summary data, and do other bookkeeping. Must be --- 1799,1809 ---- DerivedPointerTable::set_active(false); #endif // adjust_roots() updates Universe::_intArrayKlassObj which is // needed by the compaction for filling holes in the dense prefix. ! adjust_roots(vmthread_cm); compaction_start.update(); compact(); // Reset the mark bitmap, summary data, and do other bookkeeping. Must be
*** 2140,2182 **** public: bool do_object_b(oop p) { return true; } }; static PSAlwaysTrueClosure always_true; ! void PSParallelCompact::adjust_roots() { // Adjust the pointers to reflect the new locations GCTraceTime(Trace, gc, phases) tm("Adjust Roots", &_gc_timer); // Need new claim bits when tracing through and adjusting pointers. ClassLoaderDataGraph::clear_claimed_marks(); // General strong roots. ! Universe::oops_do(adjust_pointer_closure()); ! JNIHandles::oops_do(adjust_pointer_closure()); // Global (strong) JNI handles ! CLDToOopClosure adjust_from_cld(adjust_pointer_closure()); ! Threads::oops_do(adjust_pointer_closure(), &adjust_from_cld, NULL); ! ObjectSynchronizer::oops_do(adjust_pointer_closure()); ! FlatProfiler::oops_do(adjust_pointer_closure()); ! Management::oops_do(adjust_pointer_closure()); ! JvmtiExport::oops_do(adjust_pointer_closure()); ! SystemDictionary::oops_do(adjust_pointer_closure()); ! ClassLoaderDataGraph::oops_do(adjust_pointer_closure(), adjust_klass_closure(), true); // Now adjust pointers in remaining weak roots. (All of which should // have been cleared if they pointed to non-surviving objects.) // Global (weak) JNI handles ! JNIHandles::weak_oops_do(&always_true, adjust_pointer_closure()); ! CodeBlobToOopClosure adjust_from_blobs(adjust_pointer_closure(), CodeBlobToOopClosure::FixRelocations); CodeCache::blobs_do(&adjust_from_blobs); ! StringTable::oops_do(adjust_pointer_closure()); ! ref_processor()->weak_oops_do(adjust_pointer_closure()); // Roots were visited so references into the young gen in roots // may have been scanned. Process them also. // Should the reference processor have a span that excludes // young gen objects? ! PSScavenge::reference_processor()->weak_oops_do(adjust_pointer_closure()); } // Helper class to print 8 region numbers per line and then print the total at the end. class FillableRegionLogger : public StackObj { private: --- 2140,2185 ---- public: bool do_object_b(oop p) { return true; } }; static PSAlwaysTrueClosure always_true; ! void PSParallelCompact::adjust_roots(ParCompactionManager* cm) { // Adjust the pointers to reflect the new locations GCTraceTime(Trace, gc, phases) tm("Adjust Roots", &_gc_timer); // Need new claim bits when tracing through and adjusting pointers. ClassLoaderDataGraph::clear_claimed_marks(); + PSParallelCompact::AdjustPointerClosure closure(cm); + PSParallelCompact::AdjustKlassClosure kclosure(cm); + // General strong roots. ! Universe::oops_do(&closure); ! JNIHandles::oops_do(&closure); // Global (strong) JNI handles ! CLDToOopClosure adjust_from_cld(&closure); ! Threads::oops_do(&closure, &adjust_from_cld, NULL); ! ObjectSynchronizer::oops_do(&closure); ! FlatProfiler::oops_do(&closure); ! Management::oops_do(&closure); ! JvmtiExport::oops_do(&closure); ! SystemDictionary::oops_do(&closure); ! ClassLoaderDataGraph::oops_do(&closure, &kclosure, true); // Now adjust pointers in remaining weak roots. (All of which should // have been cleared if they pointed to non-surviving objects.) // Global (weak) JNI handles ! JNIHandles::weak_oops_do(&always_true, &closure); ! CodeBlobToOopClosure adjust_from_blobs(&closure, CodeBlobToOopClosure::FixRelocations); CodeCache::blobs_do(&adjust_from_blobs); ! StringTable::oops_do(&closure); ! ref_processor()->weak_oops_do(&closure); // Roots were visited so references into the young gen in roots // may have been scanned. Process them also. // Should the reference processor have a span that excludes // young gen objects? ! PSScavenge::reference_processor()->weak_oops_do(&closure); } // Helper class to print 8 region numbers per line and then print the total at the end. class FillableRegionLogger : public StackObj { private:
*** 3060,3081 **** Copy::aligned_conjoint_words(source(), destination(), words); } update_state(words); } ! void InstanceKlass::oop_pc_update_pointers(oop obj) { ! oop_oop_iterate_oop_maps<true>(obj, PSParallelCompact::adjust_pointer_closure()); } ! void InstanceMirrorKlass::oop_pc_update_pointers(oop obj) { ! InstanceKlass::oop_pc_update_pointers(obj); ! oop_oop_iterate_statics<true>(obj, PSParallelCompact::adjust_pointer_closure()); } ! void InstanceClassLoaderKlass::oop_pc_update_pointers(oop obj) { ! InstanceKlass::oop_pc_update_pointers(obj); } #ifdef ASSERT template <class T> static void trace_reference_gc(const char *s, oop obj, T* referent_addr, --- 3063,3086 ---- Copy::aligned_conjoint_words(source(), destination(), words); } update_state(words); } ! void InstanceKlass::oop_pc_update_pointers(oop obj, ParCompactionManager* cm) { ! PSParallelCompact::AdjustPointerClosure closure(cm); ! oop_oop_iterate_oop_maps<true>(obj, &closure); } ! void InstanceMirrorKlass::oop_pc_update_pointers(oop obj, ParCompactionManager* cm) { ! InstanceKlass::oop_pc_update_pointers(obj, cm); ! PSParallelCompact::AdjustPointerClosure closure(cm); ! oop_oop_iterate_statics<true>(obj, &closure); } ! void InstanceClassLoaderKlass::oop_pc_update_pointers(oop obj, ParCompactionManager* cm) { ! InstanceKlass::oop_pc_update_pointers(obj, cm); } #ifdef ASSERT template <class T> static void trace_reference_gc(const char *s, oop obj, T* referent_addr,
*** 3090,3136 **** p2i(discovered_addr), discovered_addr ? p2i(oopDesc::load_decode_heap_oop(discovered_addr)) : NULL); } #endif template <class T> ! static void oop_pc_update_pointers_specialized(oop obj) { T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj); ! PSParallelCompact::adjust_pointer(referent_addr); T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj); ! PSParallelCompact::adjust_pointer(next_addr); T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj); ! PSParallelCompact::adjust_pointer(discovered_addr); debug_only(trace_reference_gc("InstanceRefKlass::oop_update_ptrs", obj, referent_addr, next_addr, discovered_addr);) } ! void InstanceRefKlass::oop_pc_update_pointers(oop obj) { ! InstanceKlass::oop_pc_update_pointers(obj); if (UseCompressedOops) { ! oop_pc_update_pointers_specialized<narrowOop>(obj); } else { ! oop_pc_update_pointers_specialized<oop>(obj); } } ! void ObjArrayKlass::oop_pc_update_pointers(oop obj) { assert(obj->is_objArray(), "obj must be obj array"); ! oop_oop_iterate_elements<true>(objArrayOop(obj), PSParallelCompact::adjust_pointer_closure()); } ! void TypeArrayKlass::oop_pc_update_pointers(oop obj) { assert(obj->is_typeArray(),"must be a type array"); } ParMarkBitMapClosure::IterationStatus MoveAndUpdateClosure::do_addr(HeapWord* addr, size_t words) { assert(destination() != NULL, "sanity"); assert(bitmap()->obj_size(addr) == words, "bad size"); _source = addr; ! assert(PSParallelCompact::summary_data().calc_new_pointer(source()) == destination(), "wrong destination"); if (words > words_remaining()) { return ParMarkBitMap::would_overflow; } --- 3095,3142 ---- p2i(discovered_addr), discovered_addr ? p2i(oopDesc::load_decode_heap_oop(discovered_addr)) : NULL); } #endif template <class T> ! static void oop_pc_update_pointers_specialized(oop obj, ParCompactionManager* cm) { T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj); ! PSParallelCompact::adjust_pointer(referent_addr, cm); T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj); ! PSParallelCompact::adjust_pointer(next_addr, cm); T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj); ! PSParallelCompact::adjust_pointer(discovered_addr, cm); debug_only(trace_reference_gc("InstanceRefKlass::oop_update_ptrs", obj, referent_addr, next_addr, discovered_addr);) } ! void InstanceRefKlass::oop_pc_update_pointers(oop obj, ParCompactionManager* cm) { ! InstanceKlass::oop_pc_update_pointers(obj, cm); if (UseCompressedOops) { ! oop_pc_update_pointers_specialized<narrowOop>(obj, cm); } else { ! oop_pc_update_pointers_specialized<oop>(obj, cm); } } ! void ObjArrayKlass::oop_pc_update_pointers(oop obj, ParCompactionManager* cm) { assert(obj->is_objArray(), "obj must be obj array"); ! PSParallelCompact::AdjustPointerClosure closure(cm); ! oop_oop_iterate_elements<true>(objArrayOop(obj), &closure); } ! void TypeArrayKlass::oop_pc_update_pointers(oop obj, ParCompactionManager* cm) { assert(obj->is_typeArray(),"must be a type array"); } ParMarkBitMapClosure::IterationStatus MoveAndUpdateClosure::do_addr(HeapWord* addr, size_t words) { assert(destination() != NULL, "sanity"); assert(bitmap()->obj_size(addr) == words, "bad size"); _source = addr; ! assert(PSParallelCompact::summary_data().calc_new_pointer(source(), compaction_manager()) == destination(), "wrong destination"); if (words > words_remaining()) { return ParMarkBitMap::would_overflow; }
< prev index next >