127 }
128 }
129
130 oop ShenandoahBarrierSet::load_reference_barrier_mutator(oop obj, oop* load_addr) {
131 return load_reference_barrier_mutator_work(obj, load_addr);
132 }
133
134 oop ShenandoahBarrierSet::load_reference_barrier_mutator(oop obj, narrowOop* load_addr) {
135 return load_reference_barrier_mutator_work(obj, load_addr);
136 }
137
138 template <class T>
139 oop ShenandoahBarrierSet::load_reference_barrier_mutator_work(oop obj, T* load_addr) {
140 assert(ShenandoahLoadRefBarrier, "should be enabled");
141 shenandoah_assert_in_cset(load_addr, obj);
142
143 oop fwd = resolve_forwarded_not_null_mutator(obj);
144 if (obj == fwd) {
145 assert(_heap->is_gc_in_progress_mask(ShenandoahHeap::EVACUATION | ShenandoahHeap::TRAVERSAL),
146 "evac should be in progress");
147
148 ShenandoahEvacOOMScope oom_evac_scope;
149
150 Thread* thread = Thread::current();
151 oop res_oop = _heap->evacuate_object(obj, thread);
152
153 // Since we are already here and paid the price of getting through runtime call adapters
154 // and acquiring oom-scope, it makes sense to try and evacuate more adjacent objects,
155 // thus amortizing the overhead. For sparsely live heaps, scan costs easily dominate
156 // total assist costs, and can introduce a lot of evacuation latency. This is why we
157 // only scan for _nearest_ N objects, regardless if they are eligible for evac or not.
158 // The scan itself should also avoid touching the non-marked objects below TAMS, because
159 // their metadata (notably, klasses) may be incorrect already.
160
161 size_t max = ShenandoahEvacAssist;
162 if (max > 0) {
163 // Traversal is special: it uses incomplete marking context, because it coalesces evac with mark.
164 // Other code uses complete marking context, because evac happens after the mark.
165 ShenandoahMarkingContext* ctx = _heap->is_concurrent_traversal_in_progress() ?
166 _heap->marking_context() : _heap->complete_marking_context();
167
168 ShenandoahHeapRegion* r = _heap->heap_region_containing(obj);
169 assert(r->is_cset(), "sanity");
170
171 HeapWord* cur = cast_from_oop<HeapWord*>(obj) + obj->size();
172
173 size_t count = 0;
174 while ((cur < r->top()) && ctx->is_marked(oop(cur)) && (count++ < max)) {
175 oop cur_oop = oop(cur);
176 if (cur_oop == resolve_forwarded_not_null_mutator(cur_oop)) {
177 _heap->evacuate_object(cur_oop, thread);
178 }
179 cur = cur + cur_oop->size();
180 }
181 }
182
183 fwd = res_oop;
184 }
185
186 if (load_addr != NULL && fwd != obj) {
187 // Since we are here and we know the load address, update the reference.
188 ShenandoahHeap::cas_oop(fwd, load_addr, obj);
189 }
190
191 return fwd;
192 }
193
194 oop ShenandoahBarrierSet::load_reference_barrier_impl(oop obj) {
195 assert(ShenandoahLoadRefBarrier, "should be enabled");
196 if (!CompressedOops::is_null(obj)) {
197 bool evac_in_progress = _heap->is_gc_in_progress_mask(ShenandoahHeap::EVACUATION | ShenandoahHeap::TRAVERSAL);
198 oop fwd = resolve_forwarded_not_null(obj);
199 if (evac_in_progress &&
200 _heap->in_collection_set(obj) &&
201 obj == fwd) {
202 Thread *t = Thread::current();
203 ShenandoahEvacOOMScope oom_evac_scope;
|
127 }
128 }
129
130 oop ShenandoahBarrierSet::load_reference_barrier_mutator(oop obj, oop* load_addr) {
131 return load_reference_barrier_mutator_work(obj, load_addr);
132 }
133
134 oop ShenandoahBarrierSet::load_reference_barrier_mutator(oop obj, narrowOop* load_addr) {
135 return load_reference_barrier_mutator_work(obj, load_addr);
136 }
137
138 template <class T>
139 oop ShenandoahBarrierSet::load_reference_barrier_mutator_work(oop obj, T* load_addr) {
140 assert(ShenandoahLoadRefBarrier, "should be enabled");
141 shenandoah_assert_in_cset(load_addr, obj);
142
143 oop fwd = resolve_forwarded_not_null_mutator(obj);
144 if (obj == fwd) {
145 assert(_heap->is_gc_in_progress_mask(ShenandoahHeap::EVACUATION | ShenandoahHeap::TRAVERSAL),
146 "evac should be in progress");
147 ShenandoahEvacOOMScope scope;
148 fwd = _heap->evacuate_object(obj, Thread::current());
149 }
150
151 if (load_addr != NULL && fwd != obj) {
152 // Since we are here and we know the load address, update the reference.
153 ShenandoahHeap::cas_oop(fwd, load_addr, obj);
154 }
155
156 return fwd;
157 }
158
159 oop ShenandoahBarrierSet::load_reference_barrier_impl(oop obj) {
160 assert(ShenandoahLoadRefBarrier, "should be enabled");
161 if (!CompressedOops::is_null(obj)) {
162 bool evac_in_progress = _heap->is_gc_in_progress_mask(ShenandoahHeap::EVACUATION | ShenandoahHeap::TRAVERSAL);
163 oop fwd = resolve_forwarded_not_null(obj);
164 if (evac_in_progress &&
165 _heap->in_collection_set(obj) &&
166 obj == fwd) {
167 Thread *t = Thread::current();
168 ShenandoahEvacOOMScope oom_evac_scope;
|