84 // Native load reference barrier is only needed for concurrent root processing
85 if (!ShenandoahConcurrentRoots::can_do_concurrent_roots()) {
86 return false;
87 }
88
89 return (decorators & IN_NATIVE) != 0;
90 }
91
92 bool ShenandoahBarrierSet::need_keep_alive_barrier(DecoratorSet decorators,BasicType type) {
93 if (!ShenandoahKeepAliveBarrier) return false;
94 // Only needed for references
95 if (!is_reference_type(type)) return false;
96
97 bool keep_alive = (decorators & AS_NO_KEEPALIVE) == 0;
98 bool unknown = (decorators & ON_UNKNOWN_OOP_REF) != 0;
99 bool is_traversal_mode = ShenandoahHeap::heap()->is_traversal_mode();
100 bool on_weak_ref = (decorators & (ON_WEAK_OOP_REF | ON_PHANTOM_OOP_REF)) != 0;
101 return (on_weak_ref || unknown) && (keep_alive || is_traversal_mode);
102 }
103
104 template <class T>
105 inline void ShenandoahBarrierSet::inline_write_ref_field_pre(T* field, oop new_val) {
106 shenandoah_assert_not_in_cset_loc_except(field, _heap->cancelled_gc());
107 if (_heap->is_concurrent_mark_in_progress()) {
108 T heap_oop = RawAccess<>::oop_load(field);
109 if (!CompressedOops::is_null(heap_oop)) {
110 enqueue(CompressedOops::decode(heap_oop));
111 }
112 }
113 }
114
115 // These are the more general virtual versions.
116 void ShenandoahBarrierSet::write_ref_field_pre_work(oop* field, oop new_val) {
117 inline_write_ref_field_pre(field, new_val);
118 }
119
120 void ShenandoahBarrierSet::write_ref_field_pre_work(narrowOop* field, oop new_val) {
121 inline_write_ref_field_pre(field, new_val);
122 }
123
124 void ShenandoahBarrierSet::write_ref_field_pre_work(void* field, oop new_val) {
125 guarantee(false, "Not needed");
126 }
127
128 void ShenandoahBarrierSet::write_ref_field_work(void* v, oop o, bool release) {
129 shenandoah_assert_not_in_cset_loc_except(v, _heap->cancelled_gc());
130 shenandoah_assert_not_forwarded_except (v, o, o == NULL || _heap->cancelled_gc() || !_heap->is_concurrent_mark_in_progress());
131 shenandoah_assert_not_in_cset_except (v, o, o == NULL || _heap->cancelled_gc() || !_heap->is_concurrent_mark_in_progress());
132 }
133
134 oop ShenandoahBarrierSet::load_reference_barrier_not_null(oop obj) {
135 if (ShenandoahLoadRefBarrier && _heap->has_forwarded_objects()) {
136 return load_reference_barrier_impl(obj);
137 } else {
138 return obj;
139 }
140 }
141
142 oop ShenandoahBarrierSet::load_reference_barrier(oop obj) {
143 if (obj != NULL) {
144 return load_reference_barrier_not_null(obj);
145 } else {
146 return obj;
147 }
148 }
149
150 oop ShenandoahBarrierSet::load_reference_barrier_mutator(oop obj, oop* load_addr) {
151 return load_reference_barrier_mutator_work(obj, load_addr);
152 }
153
215 assert(ShenandoahLoadRefBarrier, "should be enabled");
216 if (!CompressedOops::is_null(obj)) {
217 bool evac_in_progress = _heap->is_gc_in_progress_mask(ShenandoahHeap::EVACUATION | ShenandoahHeap::TRAVERSAL);
218 oop fwd = resolve_forwarded_not_null(obj);
219 if (evac_in_progress &&
220 _heap->in_collection_set(obj) &&
221 obj == fwd) {
222 Thread *t = Thread::current();
223 if (t->is_GC_task_thread()) {
224 return _heap->evacuate_object(obj, t);
225 } else {
226 ShenandoahEvacOOMScope oom_evac_scope;
227 return _heap->evacuate_object(obj, t);
228 }
229 } else {
230 return fwd;
231 }
232 } else {
233 return obj;
234 }
235 }
236
237 void ShenandoahBarrierSet::storeval_barrier(oop obj) {
238 if (ShenandoahStoreValEnqueueBarrier && !CompressedOops::is_null(obj) && _heap->is_concurrent_traversal_in_progress()) {
239 enqueue(obj);
240 }
241 }
242
243 void ShenandoahBarrierSet::keep_alive_barrier(oop obj) {
244 if (ShenandoahKeepAliveBarrier && _heap->is_concurrent_mark_in_progress()) {
245 enqueue(obj);
246 }
247 }
248
249 void ShenandoahBarrierSet::enqueue(oop obj) {
250 shenandoah_assert_not_forwarded_if(NULL, obj, _heap->is_concurrent_traversal_in_progress());
251 assert(_satb_mark_queue_set.is_active(), "only get here when SATB active");
252
253 // Filter marked objects before hitting the SATB queues. The same predicate would
254 // be used by SATBMQ::filter to eliminate already marked objects downstream, but
255 // filtering here helps to avoid wasteful SATB queueing work to begin with.
256 if (!_heap->requires_marking<false>(obj)) return;
257
258 ShenandoahThreadLocalData::satb_mark_queue(Thread::current()).enqueue_known_active(obj);
259 }
260
261 void ShenandoahBarrierSet::on_thread_create(Thread* thread) {
262 // Create thread local data
263 ShenandoahThreadLocalData::create(thread);
264 }
265
266 void ShenandoahBarrierSet::on_thread_destroy(Thread* thread) {
267 // Destroy thread local data
268 ShenandoahThreadLocalData::destroy(thread);
269 }
270
271 void ShenandoahBarrierSet::on_thread_attach(Thread *thread) {
272 assert(!thread->is_Java_thread() || !SafepointSynchronize::is_at_safepoint(),
273 "We should not be at a safepoint");
274 SATBMarkQueue& queue = ShenandoahThreadLocalData::satb_mark_queue(thread);
275 assert(!queue.is_active(), "SATB queue should not be active");
276 assert( queue.is_empty(), "SATB queue should be empty");
277 queue.set_active(_satb_mark_queue_set.is_active());
278 if (thread->is_Java_thread()) {
|
84 // Native load reference barrier is only needed for concurrent root processing
85 if (!ShenandoahConcurrentRoots::can_do_concurrent_roots()) {
86 return false;
87 }
88
89 return (decorators & IN_NATIVE) != 0;
90 }
91
92 bool ShenandoahBarrierSet::need_keep_alive_barrier(DecoratorSet decorators,BasicType type) {
93 if (!ShenandoahKeepAliveBarrier) return false;
94 // Only needed for references
95 if (!is_reference_type(type)) return false;
96
97 bool keep_alive = (decorators & AS_NO_KEEPALIVE) == 0;
98 bool unknown = (decorators & ON_UNKNOWN_OOP_REF) != 0;
99 bool is_traversal_mode = ShenandoahHeap::heap()->is_traversal_mode();
100 bool on_weak_ref = (decorators & (ON_WEAK_OOP_REF | ON_PHANTOM_OOP_REF)) != 0;
101 return (on_weak_ref || unknown) && (keep_alive || is_traversal_mode);
102 }
103
104 oop ShenandoahBarrierSet::load_reference_barrier_not_null(oop obj) {
105 if (ShenandoahLoadRefBarrier && _heap->has_forwarded_objects()) {
106 return load_reference_barrier_impl(obj);
107 } else {
108 return obj;
109 }
110 }
111
112 oop ShenandoahBarrierSet::load_reference_barrier(oop obj) {
113 if (obj != NULL) {
114 return load_reference_barrier_not_null(obj);
115 } else {
116 return obj;
117 }
118 }
119
120 oop ShenandoahBarrierSet::load_reference_barrier_mutator(oop obj, oop* load_addr) {
121 return load_reference_barrier_mutator_work(obj, load_addr);
122 }
123
185 assert(ShenandoahLoadRefBarrier, "should be enabled");
186 if (!CompressedOops::is_null(obj)) {
187 bool evac_in_progress = _heap->is_gc_in_progress_mask(ShenandoahHeap::EVACUATION | ShenandoahHeap::TRAVERSAL);
188 oop fwd = resolve_forwarded_not_null(obj);
189 if (evac_in_progress &&
190 _heap->in_collection_set(obj) &&
191 obj == fwd) {
192 Thread *t = Thread::current();
193 if (t->is_GC_task_thread()) {
194 return _heap->evacuate_object(obj, t);
195 } else {
196 ShenandoahEvacOOMScope oom_evac_scope;
197 return _heap->evacuate_object(obj, t);
198 }
199 } else {
200 return fwd;
201 }
202 } else {
203 return obj;
204 }
205 }
206
207 void ShenandoahBarrierSet::on_thread_create(Thread* thread) {
208 // Create thread local data
209 ShenandoahThreadLocalData::create(thread);
210 }
211
212 void ShenandoahBarrierSet::on_thread_destroy(Thread* thread) {
213 // Destroy thread local data
214 ShenandoahThreadLocalData::destroy(thread);
215 }
216
217 void ShenandoahBarrierSet::on_thread_attach(Thread *thread) {
218 assert(!thread->is_Java_thread() || !SafepointSynchronize::is_at_safepoint(),
219 "We should not be at a safepoint");
220 SATBMarkQueue& queue = ShenandoahThreadLocalData::satb_mark_queue(thread);
221 assert(!queue.is_active(), "SATB queue should not be active");
222 assert( queue.is_empty(), "SATB queue should be empty");
223 queue.set_active(_satb_mark_queue_set.is_active());
224 if (thread->is_Java_thread()) {
|