133 } 134 } 135 #endif // ASSERT 136 137 void G1ParScanThreadState::trim_queue() { 138 assert(_evac_failure_cl != NULL, "not set"); 139 140 StarTask ref; 141 do { 142 // Drain the overflow stack first, so other threads can steal. 143 while (_refs->pop_overflow(ref)) { 144 dispatch_reference(ref); 145 } 146 147 while (_refs->pop_local(ref)) { 148 dispatch_reference(ref); 149 } 150 } while (!_refs->is_empty()); 151 } 152 153 oop G1ParScanThreadState::copy_to_survivor_space(oop const old) { 154 size_t word_sz = old->size(); 155 HeapRegion* from_region = _g1h->heap_region_containing_raw(old); 156 // +1 to make the -1 indexes valid... 157 int young_index = from_region->young_index_in_cset()+1; 158 assert( (from_region->is_young() && young_index > 0) || 159 (!from_region->is_young() && young_index == 0), "invariant" ); 160 G1CollectorPolicy* g1p = _g1h->g1_policy(); 161 markOop m = old->mark(); 162 int age = m->has_displaced_mark_helper() ? m->displaced_mark_helper()->age() 163 : m->age(); 164 GCAllocPurpose alloc_purpose = g1p->evacuation_destination(from_region, age, 165 word_sz); 166 AllocationContext_t context = from_region->allocation_context(); 167 HeapWord* obj_ptr = _g1_par_allocator->allocate(alloc_purpose, word_sz, context); 168 #ifndef PRODUCT 169 // Should this evacuation fail? 170 if (_g1h->evacuation_should_fail()) { 171 if (obj_ptr != NULL) { 172 _g1_par_allocator->undo_allocation(alloc_purpose, obj_ptr, word_sz, context); 173 obj_ptr = NULL; 174 } 175 } 176 #endif // !PRODUCT 177 178 if (obj_ptr == NULL) { 179 // This will either forward-to-self, or detect that someone else has 180 // installed a forwarding pointer. 181 return _g1h->handle_evacuation_failure_par(this, old); 182 } 183 184 oop obj = oop(obj_ptr); 185 186 // We're going to allocate linearly, so might as well prefetch ahead. 187 Prefetch::write(obj_ptr, PrefetchCopyIntervalInBytes); 188 189 oop forward_ptr = old->forward_to_atomic(obj); 190 if (forward_ptr == NULL) { 191 Copy::aligned_disjoint_words((HeapWord*) old, obj_ptr, word_sz); 192 193 // alloc_purpose is just a hint to allocate() above, recheck the type of region 194 // we actually allocated from and update alloc_purpose accordingly 195 HeapRegion* to_region = _g1h->heap_region_containing_raw(obj_ptr); 196 alloc_purpose = to_region->is_young() ? GCAllocForSurvived : GCAllocForTenured; 197 198 if (g1p->track_object_age(alloc_purpose)) { 199 // We could simply do obj->incr_age(). However, this causes a 200 // performance issue. obj->incr_age() will first check whether 201 // the object has a displaced mark by checking its mark word; 202 // getting the mark word from the new location of the object 203 // stalls. So, given that we already have the mark word and we 204 // are about to install it anyway, it's better to increase the 205 // age on the mark word, when the object does not have a 206 // displaced mark word. We're not expecting many objects to have 207 // a displaced marked word, so that case is not optimized 208 // further (it could be...) and we simply call obj->incr_age(). 209 210 if (m->has_displaced_mark_helper()) { 211 // in this case, we have to install the mark word first, 212 // otherwise obj looks to be forwarded (the old mark word, 213 // which contains the forward pointer, was copied) 214 obj->set_mark(m); 215 obj->incr_age(); 216 } else { 217 m = m->incr_age(); 218 obj->set_mark(m); 219 } 220 age_table()->add(obj, word_sz); 221 } else { 222 obj->set_mark(m); 223 } 224 225 if (G1StringDedup::is_enabled()) { 226 G1StringDedup::enqueue_from_evacuation(from_region->is_young(), 227 to_region->is_young(), 228 queue_num(), 229 obj); 230 } 231 232 size_t* surv_young_words = surviving_young_words(); 233 surv_young_words[young_index] += word_sz; 234 235 if (obj->is_objArray() && arrayOop(obj)->length() >= ParGCArrayScanChunk) { 236 // We keep track of the next start index in the length field of 237 // the to-space object. The actual length can be found in the 238 // length field of the from-space object. 239 arrayOop(obj)->set_length(0); 240 oop* old_p = set_partial_array_mask(old); 241 push_on_queue(old_p); 242 } else { | 133 } 134 } 135 #endif // ASSERT 136 137 void G1ParScanThreadState::trim_queue() { 138 assert(_evac_failure_cl != NULL, "not set"); 139 140 StarTask ref; 141 do { 142 // Drain the overflow stack first, so other threads can steal. 143 while (_refs->pop_overflow(ref)) { 144 dispatch_reference(ref); 145 } 146 147 while (_refs->pop_local(ref)) { 148 dispatch_reference(ref); 149 } 150 } while (!_refs->is_empty()); 151 } 152 153 oop G1ParScanThreadState::copy_to_survivor_space(oop const old, 154 markOop const old_mark) { 155 size_t word_sz = old->size(); 156 HeapRegion* from_region = _g1h->heap_region_containing_raw(old); 157 // +1 to make the -1 indexes valid... 158 int young_index = from_region->young_index_in_cset()+1; 159 assert( (from_region->is_young() && young_index > 0) || 160 (!from_region->is_young() && young_index == 0), "invariant" ); 161 G1CollectorPolicy* g1p = _g1h->g1_policy(); 162 uint age = old_mark->has_displaced_mark_helper() ? old_mark->displaced_mark_helper()->age() 163 : old_mark->age(); 164 GCAllocPurpose alloc_purpose = g1p->evacuation_destination(from_region, age, 165 word_sz); 166 AllocationContext_t context = from_region->allocation_context(); 167 HeapWord* obj_ptr = _g1_par_allocator->allocate(alloc_purpose, word_sz, old, age, context); 168 #ifndef PRODUCT 169 // Should this evacuation fail? 170 if (_g1h->evacuation_should_fail()) { 171 if (obj_ptr != NULL) { 172 _g1_par_allocator->undo_allocation(alloc_purpose, obj_ptr, word_sz, context); 173 obj_ptr = NULL; 174 } 175 } 176 #endif // !PRODUCT 177 178 if (obj_ptr == NULL) { 179 // This will either forward-to-self, or detect that someone else has 180 // installed a forwarding pointer. 181 return _g1h->handle_evacuation_failure_par(this, old); 182 } 183 184 oop obj = oop(obj_ptr); 185 186 // We're going to allocate linearly, so might as well prefetch ahead. 187 Prefetch::write(obj_ptr, PrefetchCopyIntervalInBytes); 188 189 oop forward_ptr = old->forward_to_atomic(obj); 190 if (forward_ptr == NULL) { 191 Copy::aligned_disjoint_words((HeapWord*) old, obj_ptr, word_sz); 192 193 // alloc_purpose is just a hint to allocate() above, recheck the type of region 194 // we actually allocated from and update alloc_purpose accordingly 195 HeapRegion* to_region = _g1h->heap_region_containing_raw(obj_ptr); 196 alloc_purpose = to_region->is_young() ? GCAllocForSurvived : GCAllocForTenured; 197 198 if (g1p->track_object_age(alloc_purpose)) { 199 if (old_mark->has_displaced_mark_helper()) { 200 // in this case, we have to install the mark word first, 201 // otherwise obj looks to be forwarded (the old mark word, 202 // which contains the forward pointer, was copied) 203 obj->set_mark(old_mark); 204 if (age < markOopDesc::max_age) { 205 markOop new_mark = old_mark->displaced_mark_helper()->set_age(++age); 206 old_mark->set_displaced_mark_helper(new_mark); 207 } 208 } else { 209 if (age < markOopDesc::max_age) { 210 obj->set_mark(old_mark->set_age(++age)); 211 } 212 } 213 age_table()->add(age, word_sz); 214 } else { 215 obj->set_mark(old_mark); 216 } 217 218 if (G1StringDedup::is_enabled()) { 219 G1StringDedup::enqueue_from_evacuation(from_region->is_young(), 220 to_region->is_young(), 221 queue_num(), 222 obj); 223 } 224 225 size_t* surv_young_words = surviving_young_words(); 226 surv_young_words[young_index] += word_sz; 227 228 if (obj->is_objArray() && arrayOop(obj)->length() >= ParGCArrayScanChunk) { 229 // We keep track of the next start index in the length field of 230 // the to-space object. The actual length can be found in the 231 // length field of the from-space object. 232 arrayOop(obj)->set_length(0); 233 oop* old_p = set_partial_array_mask(old); 234 push_on_queue(old_p); 235 } else { |