1 /* 2 * Copyright (c) 2005, 2016, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "classfile/systemDictionary.hpp" 27 #include "gc/parallel/gcTaskManager.hpp" 28 #include "gc/parallel/objectStartArray.hpp" 29 #include "gc/parallel/parMarkBitMap.hpp" 30 #include "gc/parallel/parallelScavengeHeap.hpp" 31 #include "gc/parallel/psCompactionManager.inline.hpp" 32 #include "gc/parallel/psOldGen.hpp" 33 #include "gc/parallel/psParallelCompact.inline.hpp" 34 #include "gc/shared/taskqueue.inline.hpp" 35 #include "logging/log.hpp" 36 #include "memory/iterator.inline.hpp" 37 #include "oops/instanceKlass.inline.hpp" 38 #include "oops/instanceMirrorKlass.inline.hpp" 39 #include "oops/objArrayKlass.inline.hpp" 40 #include "oops/oop.inline.hpp" 41 #include "runtime/atomic.inline.hpp" 42 43 PSOldGen* ParCompactionManager::_old_gen = NULL; 44 ParCompactionManager** ParCompactionManager::_manager_array = NULL; 45 46 RegionTaskQueue** ParCompactionManager::_region_list = NULL; 47 48 OopTaskQueueSet* ParCompactionManager::_stack_array = NULL; 49 ParCompactionManager::ObjArrayTaskQueueSet* 50 ParCompactionManager::_objarray_queues = NULL; 51 ObjectStartArray* ParCompactionManager::_start_array = NULL; 52 ParMarkBitMap* ParCompactionManager::_mark_bitmap = NULL; 53 RegionTaskQueueSet* ParCompactionManager::_region_array = NULL; 54 55 uint* ParCompactionManager::_recycled_stack_index = NULL; 56 int ParCompactionManager::_recycled_top = -1; 57 int ParCompactionManager::_recycled_bottom = -1; 58 59 ParCompactionManager::ParCompactionManager() : 60 _action(CopyAndUpdate), 61 _region_stack(NULL), 62 _region_stack_index((uint)max_uintx) { 63 64 ParallelScavengeHeap* heap = ParallelScavengeHeap::heap(); 65 66 _old_gen = heap->old_gen(); 67 _start_array = old_gen()->start_array(); 68 69 marking_stack()->initialize(); 70 _objarray_stack.initialize(); 71 72 reset_bitmap_query_cache(); 73 } 74 75 ParCompactionManager::~ParCompactionManager() { 76 delete _recycled_stack_index; 77 } 78 79 void ParCompactionManager::initialize(ParMarkBitMap* mbm) { 80 assert(PSParallelCompact::gc_task_manager() != NULL, 81 "Needed for initialization"); 82 83 _mark_bitmap = mbm; 84 85 uint parallel_gc_threads = PSParallelCompact::gc_task_manager()->workers(); 86 87 assert(_manager_array == NULL, "Attempt to initialize twice"); 88 _manager_array = NEW_C_HEAP_ARRAY(ParCompactionManager*, parallel_gc_threads+1, mtGC); 89 guarantee(_manager_array != NULL, "Could not allocate manager_array"); 90 91 _region_list = NEW_C_HEAP_ARRAY(RegionTaskQueue*, 92 parallel_gc_threads+1, mtGC); 93 guarantee(_region_list != NULL, "Could not initialize promotion manager"); 94 95 _recycled_stack_index = NEW_C_HEAP_ARRAY(uint, parallel_gc_threads, mtGC); 96 97 // parallel_gc-threads + 1 to be consistent with the number of 98 // compaction managers. 99 for(uint i=0; i<parallel_gc_threads + 1; i++) { 100 _region_list[i] = new RegionTaskQueue(); 101 region_list(i)->initialize(); 102 } 103 104 _stack_array = new OopTaskQueueSet(parallel_gc_threads); 105 guarantee(_stack_array != NULL, "Could not allocate stack_array"); 106 _objarray_queues = new ObjArrayTaskQueueSet(parallel_gc_threads); 107 guarantee(_objarray_queues != NULL, "Could not allocate objarray_queues"); 108 _region_array = new RegionTaskQueueSet(parallel_gc_threads); 109 guarantee(_region_array != NULL, "Could not allocate region_array"); 110 111 // Create and register the ParCompactionManager(s) for the worker threads. 112 for(uint i=0; i<parallel_gc_threads; i++) { 113 _manager_array[i] = new ParCompactionManager(); 114 guarantee(_manager_array[i] != NULL, "Could not create ParCompactionManager"); 115 stack_array()->register_queue(i, _manager_array[i]->marking_stack()); 116 _objarray_queues->register_queue(i, &_manager_array[i]->_objarray_stack); 117 region_array()->register_queue(i, region_list(i)); 118 } 119 120 // The VMThread gets its own ParCompactionManager, which is not available 121 // for work stealing. 122 _manager_array[parallel_gc_threads] = new ParCompactionManager(); 123 guarantee(_manager_array[parallel_gc_threads] != NULL, 124 "Could not create ParCompactionManager"); 125 assert(PSParallelCompact::gc_task_manager()->workers() != 0, 126 "Not initialized?"); 127 } 128 129 void ParCompactionManager::reset_all_bitmap_query_caches() { 130 uint parallel_gc_threads = PSParallelCompact::gc_task_manager()->workers(); 131 for (uint i=0; i<=parallel_gc_threads; i++) { 132 _manager_array[i]->reset_bitmap_query_cache(); 133 } 134 } 135 136 int ParCompactionManager::pop_recycled_stack_index() { 137 assert(_recycled_bottom <= _recycled_top, "list is empty"); 138 // Get the next available index 139 if (_recycled_bottom < _recycled_top) { 140 uint cur, next, last; 141 do { 142 cur = _recycled_bottom; 143 next = cur + 1; 144 last = Atomic::cmpxchg(next, &_recycled_bottom, cur); 145 } while (cur != last); 146 return _recycled_stack_index[next]; 147 } else { 148 return -1; 149 } 150 } 151 152 void ParCompactionManager::push_recycled_stack_index(uint v) { 153 // Get the next available index 154 int cur = Atomic::add(1, &_recycled_top); 155 _recycled_stack_index[cur] = v; 156 assert(_recycled_bottom <= _recycled_top, "list top and bottom are wrong"); 157 } 158 159 bool ParCompactionManager::should_update() { 160 assert(action() != NotValid, "Action is not set"); 161 return (action() == ParCompactionManager::Update) || 162 (action() == ParCompactionManager::CopyAndUpdate) || 163 (action() == ParCompactionManager::UpdateAndCopy); 164 } 165 166 bool ParCompactionManager::should_copy() { 167 assert(action() != NotValid, "Action is not set"); 168 return (action() == ParCompactionManager::Copy) || 169 (action() == ParCompactionManager::CopyAndUpdate) || 170 (action() == ParCompactionManager::UpdateAndCopy); 171 } 172 173 void ParCompactionManager::region_list_push(uint list_index, 174 size_t region_index) { 175 region_list(list_index)->push(region_index); 176 } 177 178 void ParCompactionManager::verify_region_list_empty(uint list_index) { 179 assert(region_list(list_index)->is_empty(), "Not empty"); 180 } 181 182 ParCompactionManager* 183 ParCompactionManager::gc_thread_compaction_manager(uint index) { 184 assert(index < ParallelGCThreads, "index out of range"); 185 assert(_manager_array != NULL, "Sanity"); 186 return _manager_array[index]; 187 } 188 189 void InstanceKlass::oop_pc_follow_contents(oop obj, ParCompactionManager* cm) { 190 assert(obj != NULL, "can't follow the content of NULL object"); 191 192 cm->follow_klass(this); 193 // Only mark the header and let the scan of the meta-data mark 194 // everything else. 195 196 ParCompactionManager::MarkAndPushClosure cl(cm); 197 InstanceKlass::oop_oop_iterate_oop_maps<true>(obj, &cl); 198 } 199 200 void InstanceMirrorKlass::oop_pc_follow_contents(oop obj, ParCompactionManager* cm) { 201 InstanceKlass::oop_pc_follow_contents(obj, cm); 202 203 // Follow the klass field in the mirror. 204 Klass* klass = java_lang_Class::as_Klass(obj); 205 if (klass != NULL) { 206 // An anonymous class doesn't have its own class loader, so the call 207 // to follow_klass will mark and push its java mirror instead of the 208 // class loader. When handling the java mirror for an anonymous class 209 // we need to make sure its class loader data is claimed, this is done 210 // by calling follow_class_loader explicitly. For non-anonymous classes 211 // the call to follow_class_loader is made when the class loader itself 212 // is handled. 213 if (klass->is_instance_klass() && InstanceKlass::cast(klass)->is_anonymous()) { 214 cm->follow_class_loader(klass->class_loader_data()); 215 } else { 216 cm->follow_klass(klass); 217 } 218 } else { 219 // If klass is NULL then this a mirror for a primitive type. 220 // We don't have to follow them, since they are handled as strong 221 // roots in Universe::oops_do. 222 assert(java_lang_Class::is_primitive(obj), "Sanity check"); 223 } 224 225 ParCompactionManager::MarkAndPushClosure cl(cm); 226 oop_oop_iterate_statics<true>(obj, &cl); 227 } 228 229 void InstanceClassLoaderKlass::oop_pc_follow_contents(oop obj, ParCompactionManager* cm) { 230 InstanceKlass::oop_pc_follow_contents(obj, cm); 231 232 ClassLoaderData * const loader_data = java_lang_ClassLoader::loader_data(obj); 233 if (loader_data != NULL) { 234 cm->follow_class_loader(loader_data); 235 } 236 } 237 238 template <class T> 239 static void oop_pc_follow_contents_specialized(InstanceRefKlass* klass, oop obj, ParCompactionManager* cm) { 240 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj); 241 T heap_oop = oopDesc::load_heap_oop(referent_addr); 242 log_develop_trace(gc, ref)("InstanceRefKlass::oop_pc_follow_contents " PTR_FORMAT, p2i(obj)); 243 if (!oopDesc::is_null(heap_oop)) { 244 oop referent = oopDesc::decode_heap_oop_not_null(heap_oop); 245 if (PSParallelCompact::mark_bitmap()->is_unmarked(referent) && 246 PSParallelCompact::ref_processor()->discover_reference(obj, klass->reference_type())) { 247 // reference already enqueued, referent will be traversed later 248 klass->InstanceKlass::oop_pc_follow_contents(obj, cm); 249 log_develop_trace(gc, ref)(" Non NULL enqueued " PTR_FORMAT, p2i(obj)); 250 return; 251 } else { 252 // treat referent as normal oop 253 log_develop_trace(gc, ref)(" Non NULL normal " PTR_FORMAT, p2i(obj)); 254 cm->mark_and_push(referent_addr); 255 } 256 } 257 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj); 258 // Treat discovered as normal oop, if ref is not "active", 259 // i.e. if next is non-NULL. 260 T next_oop = oopDesc::load_heap_oop(next_addr); 261 if (!oopDesc::is_null(next_oop)) { // i.e. ref is not "active" 262 T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj); 263 log_develop_trace(gc, ref)(" Process discovered as normal " PTR_FORMAT, p2i(discovered_addr)); 264 cm->mark_and_push(discovered_addr); 265 } 266 cm->mark_and_push(next_addr); 267 klass->InstanceKlass::oop_pc_follow_contents(obj, cm); 268 } 269 270 271 void InstanceRefKlass::oop_pc_follow_contents(oop obj, ParCompactionManager* cm) { 272 if (UseCompressedOops) { 273 oop_pc_follow_contents_specialized<narrowOop>(this, obj, cm); 274 } else { 275 oop_pc_follow_contents_specialized<oop>(this, obj, cm); 276 } 277 } 278 279 void ObjArrayKlass::oop_pc_follow_contents(oop obj, ParCompactionManager* cm) { 280 cm->follow_klass(this); 281 282 if (UseCompressedOops) { 283 oop_pc_follow_contents_specialized<narrowOop>(objArrayOop(obj), 0, cm); 284 } else { 285 oop_pc_follow_contents_specialized<oop>(objArrayOop(obj), 0, cm); 286 } 287 } 288 289 void TypeArrayKlass::oop_pc_follow_contents(oop obj, ParCompactionManager* cm) { 290 assert(obj->is_typeArray(),"must be a type array"); 291 // Performance tweak: We skip iterating over the klass pointer since we 292 // know that Universe::TypeArrayKlass never moves. 293 } 294 295 void ParCompactionManager::follow_marking_stacks() { 296 do { 297 // Drain the overflow stack first, to allow stealing from the marking stack. 298 oop obj; 299 while (marking_stack()->pop_overflow(obj)) { 300 follow_contents(obj); 301 } 302 while (marking_stack()->pop_local(obj)) { 303 follow_contents(obj); 304 } 305 306 // Process ObjArrays one at a time to avoid marking stack bloat. 307 ObjArrayTask task; 308 if (_objarray_stack.pop_overflow(task) || _objarray_stack.pop_local(task)) { 309 follow_contents((objArrayOop)task.obj(), task.index()); 310 } 311 } while (!marking_stacks_empty()); 312 313 assert(marking_stacks_empty(), "Sanity"); 314 } 315 316 void ParCompactionManager::drain_region_stacks() { 317 do { 318 // Drain overflow stack first so other threads can steal. 319 size_t region_index; 320 while (region_stack()->pop_overflow(region_index)) { 321 PSParallelCompact::fill_and_update_region(this, region_index); 322 } 323 324 while (region_stack()->pop_local(region_index)) { 325 PSParallelCompact::fill_and_update_region(this, region_index); 326 } 327 } while (!region_stack()->is_empty()); 328 }