140 // ResetNoHandleMark and HandleMark were removed from it. The actual reallocation
141 // of previously eliminated objects occurs in realloc_objects, which is
142 // called from the method fetch_unroll_info_helper below.
143 JRT_BLOCK_ENTRY(Deoptimization::UnrollBlock*, Deoptimization::fetch_unroll_info(JavaThread* thread, int exec_mode))
144 // It is actually ok to allocate handles in a leaf method. It causes no safepoints,
145 // but makes the entry a little slower. There is however a little dance we have to
146 // do in debug mode to get around the NoHandleMark code in the JRT_LEAF macro
147
148 // fetch_unroll_info() is called at the beginning of the deoptimization
149 // handler. Note this fact before we start generating temporary frames
150 // that can confuse an asynchronous stack walker. This counter is
151 // decremented at the end of unpack_frames().
152 if (TraceDeoptimization) {
153 tty->print_cr("Deoptimizing thread " INTPTR_FORMAT, p2i(thread));
154 }
155 thread->inc_in_deopt_handler();
156
157 return fetch_unroll_info_helper(thread, exec_mode);
158 JRT_END
159
160
161 // This is factored, since it is both called from a JRT_LEAF (deoptimization) and a JRT_ENTRY (uncommon_trap)
162 Deoptimization::UnrollBlock* Deoptimization::fetch_unroll_info_helper(JavaThread* thread, int exec_mode) {
163
164 // Note: there is a safepoint safety issue here. No matter whether we enter
165 // via vanilla deopt or uncommon trap we MUST NOT stop at a safepoint once
166 // the vframeArray is created.
167 //
168
169 // Allocate our special deoptimization ResourceMark
170 DeoptResourceMark* dmark = new DeoptResourceMark(thread);
171 assert(thread->deopt_mark() == NULL, "Pending deopt!");
172 thread->set_deopt_mark(dmark);
173
174 frame stub_frame = thread->last_frame(); // Makes stack walkable as side effect
175 RegisterMap map(thread, true);
176 RegisterMap dummy_map(thread, false);
177 // Now get the deoptee with a valid map
178 frame deoptee = stub_frame.sender(&map);
179 // Set the deoptee nmethod
180 assert(thread->deopt_compiled_method() == NULL, "Pending deopt!");
181 CompiledMethod* cm = deoptee.cb()->as_compiled_method_or_null();
182 thread->set_deopt_compiled_method(cm);
183
184 if (VerifyStack) {
185 thread->validate_frame_layout();
186 }
187
188 // Create a growable array of VFrames where each VFrame represents an inlined
189 // Java frame. This storage is allocated with the usual system arena.
190 assert(deoptee.is_compiled_frame(), "Wrong frame type");
191 GrowableArray<compiledVFrame*>* chunk = new GrowableArray<compiledVFrame*>(10);
192 vframe* vf = vframe::new_vframe(&deoptee, &map, thread);
193 while (!vf->is_top()) {
194 assert(vf->is_compiled_frame(), "Wrong frame type");
195 chunk->push(compiledVFrame::cast(vf));
196 vf = vf->sender();
197 }
198 assert(vf->is_compiled_frame(), "Wrong frame type");
199 chunk->push(compiledVFrame::cast(vf));
200
201 bool realloc_failures = false;
202
203 #if COMPILER2_OR_JVMCI
204 // Reallocate the non-escaping objects and restore their fields. Then
205 // relock objects if synchronization on them was eliminated.
206 #if !INCLUDE_JVMCI
207 if (DoEscapeAnalysis || EliminateNestedLocks) {
208 if (EliminateAllocations) {
209 #endif // INCLUDE_JVMCI
210 assert (chunk->at(0)->scope() != NULL,"expect only compiled java frames");
211 GrowableArray<ScopeValue*>* objects = chunk->at(0)->scope()->objects();
212
213 // The flag return_oop() indicates call sites which return oop
214 // in compiled code. Such sites include java method calls,
215 // runtime calls (for example, used to allocate new objects/arrays
216 // on slow code path) and any other calls generated in compiled code.
217 // It is not guaranteed that we can get such information here only
218 // by analyzing bytecode in deoptimized frames. This is why this flag
219 // is set during method compilation (see Compile::Process_OopMap_Node()).
220 // If the previous frame was popped or if we are dispatching an exception,
221 // we don't have an oop result.
222 bool save_oop_result = chunk->at(0)->scope()->return_oop() && !thread->popframe_forcing_deopt_reexecution() && (exec_mode == Unpack_deopt);
223 Handle return_value;
224 if (save_oop_result) {
225 // Reallocation may trigger GC. If deoptimization happened on return from
226 // call which returns oop we need to save it since it is not in oopmap.
227 oop result = deoptee.saved_oop_result(&map);
228 assert(oopDesc::is_oop_or_null(result), "must be oop");
229 return_value = Handle(thread, result);
230 assert(Universe::heap()->is_in_or_null(result), "must be heap pointer");
231 if (TraceDeoptimization) {
232 ttyLocker ttyl;
233 tty->print_cr("SAVED OOP RESULT " INTPTR_FORMAT " in thread " INTPTR_FORMAT, p2i(result), p2i(thread));
234 }
235 }
236 if (objects != NULL) {
237 JRT_BLOCK
238 realloc_failures = realloc_objects(thread, &deoptee, &map, objects, THREAD);
239 JRT_END
240 bool skip_internal = (cm != NULL) && !cm->is_compiled_by_jvmci();
241 reassign_fields(&deoptee, &map, objects, realloc_failures, skip_internal);
242 #ifndef PRODUCT
243 if (TraceDeoptimization) {
244 ttyLocker ttyl;
245 tty->print_cr("REALLOC OBJECTS in thread " INTPTR_FORMAT, p2i(thread));
246 print_objects(objects, realloc_failures);
247 }
248 #endif
249 }
250 if (save_oop_result) {
251 // Restore result.
252 deoptee.set_saved_oop_result(&map, return_value());
253 }
254 #if !INCLUDE_JVMCI
255 }
256 if (EliminateLocks) {
257 #endif // INCLUDE_JVMCI
258 #ifndef PRODUCT
259 bool first = true;
260 #endif
261 for (int i = 0; i < chunk->length(); i++) {
262 compiledVFrame* cvf = chunk->at(i);
263 assert (cvf->scope() != NULL,"expect only compiled java frames");
264 GrowableArray<MonitorInfo*>* monitors = cvf->monitors();
265 if (monitors->is_nonempty()) {
266 relock_objects(monitors, thread, realloc_failures);
267 #ifndef PRODUCT
268 if (PrintDeoptimizationDetails) {
269 ttyLocker ttyl;
270 for (int j = 0; j < monitors->length(); j++) {
271 MonitorInfo* mi = monitors->at(j);
272 if (mi->eliminated()) {
273 if (first) {
274 first = false;
275 tty->print_cr("RELOCK OBJECTS in thread " INTPTR_FORMAT, p2i(thread));
276 }
277 if (mi->owner_is_scalar_replaced()) {
278 Klass* k = java_lang_Class::as_Klass(mi->owner_klass());
279 tty->print_cr(" failed reallocation for klass %s", k->external_name());
280 } else {
281 tty->print_cr(" object <" INTPTR_FORMAT "> locked", p2i(mi->owner()));
282 }
283 }
284 }
285 }
286 #endif // !PRODUCT
287 }
288 }
289 #if !INCLUDE_JVMCI
290 }
291 }
292 #endif // INCLUDE_JVMCI
293 #endif // COMPILER2_OR_JVMCI
294
295 ScopeDesc* trap_scope = chunk->at(0)->scope();
296 Handle exceptionObject;
297 if (trap_scope->rethrow_exception()) {
298 if (PrintDeoptimizationDetails) {
299 tty->print_cr("Exception to be rethrown in the interpreter for method %s::%s at bci %d", trap_scope->method()->method_holder()->name()->as_C_string(), trap_scope->method()->name()->as_C_string(), trap_scope->bci());
300 }
301 GrowableArray<ScopeValue*>* expressions = trap_scope->expressions();
302 guarantee(expressions != NULL && expressions->length() > 0, "must have exception to throw");
303 ScopeValue* topOfStack = expressions->top();
304 exceptionObject = StackValue::create_stack_value(&deoptee, &map, topOfStack)->get_obj();
305 guarantee(exceptionObject() != NULL, "exception oop can not be null");
306 }
307
308 // Ensure that no safepoint is taken after pointers have been stored
309 // in fields of rematerialized objects. If a safepoint occurs from here on
310 // out the java state residing in the vframeArray will be missed.
311 NoSafepointVerifier no_safepoint;
312
313 vframeArray* array = create_vframeArray(thread, deoptee, &map, chunk, realloc_failures);
314 #if COMPILER2_OR_JVMCI
315 if (realloc_failures) {
316 pop_frames_failed_reallocs(thread, array);
317 }
318 #endif
319
320 assert(thread->vframe_array_head() == NULL, "Pending deopt!");
321 thread->set_vframe_array_head(array);
322
323 // Now that the vframeArray has been created if we have any deferred local writes
324 // added by jvmti then we can free up that structure as the data is now in the
325 // vframeArray
326
327 if (thread->deferred_locals() != NULL) {
328 GrowableArray<jvmtiDeferredLocalVariableSet*>* list = thread->deferred_locals();
329 int i = 0;
330 do {
331 // Because of inlining we could have multiple vframes for a single frame
332 // and several of the vframes could have deferred writes. Find them all.
762 vframeArrayElement* el = cur_array->element(k);
763 tty->print_cr(" %s (bci %d)", el->method()->name_and_sig_as_C_string(), el->bci());
764 }
765 cur_array->print_on_2(tty);
766 } // release tty lock before calling guarantee
767 guarantee(false, "wrong number of expression stack elements during deopt");
768 }
769 VerifyOopClosure verify;
770 iframe->oops_interpreted_do(&verify, &rm, false);
771 callee_size_of_parameters = mh->size_of_parameters();
772 callee_max_locals = mh->max_locals();
773 is_top_frame = false;
774 }
775 }
776 #endif /* !PRODUCT */
777
778
779 return bt;
780 JRT_END
781
782
783 int Deoptimization::deoptimize_dependents() {
784 Threads::deoptimized_wrt_marked_nmethods();
785 return 0;
786 }
787
788 Deoptimization::DeoptAction Deoptimization::_unloaded_action
789 = Deoptimization::Action_reinterpret;
790
791
792
793 #if INCLUDE_JVMCI || INCLUDE_AOT
794 template<typename CacheType>
795 class BoxCacheBase : public CHeapObj<mtCompiler> {
796 protected:
797 static InstanceKlass* find_cache_klass(Symbol* klass_name, TRAPS) {
798 ResourceMark rm;
799 char* klass_name_str = klass_name->as_C_string();
800 Klass* k = SystemDictionary::find(klass_name, Handle(), Handle(), THREAD);
801 guarantee(k != NULL, "%s must be loaded", klass_name_str);
802 InstanceKlass* ik = InstanceKlass::cast(k);
803 guarantee(ik->is_initialized(), "%s must be initialized", klass_name_str);
804 CacheType::compute_offsets(ik);
805 return ik;
1380 array->element(i)->free_monitors(thread);
1381 #ifdef ASSERT
1382 array->element(i)->set_removed_monitors();
1383 #endif
1384 }
1385 }
1386 }
1387 #endif
1388
1389 static void collect_monitors(compiledVFrame* cvf, GrowableArray<Handle>* objects_to_revoke) {
1390 GrowableArray<MonitorInfo*>* monitors = cvf->monitors();
1391 Thread* thread = Thread::current();
1392 for (int i = 0; i < monitors->length(); i++) {
1393 MonitorInfo* mon_info = monitors->at(i);
1394 if (!mon_info->eliminated() && mon_info->owner() != NULL) {
1395 objects_to_revoke->append(Handle(thread, mon_info->owner()));
1396 }
1397 }
1398 }
1399
1400
1401 void Deoptimization::revoke_biases_of_monitors(JavaThread* thread, frame fr, RegisterMap* map) {
1402 if (!UseBiasedLocking) {
1403 return;
1404 }
1405
1406 GrowableArray<Handle>* objects_to_revoke = new GrowableArray<Handle>();
1407
1408 // Unfortunately we don't have a RegisterMap available in most of
1409 // the places we want to call this routine so we need to walk the
1410 // stack again to update the register map.
1411 if (map == NULL || !map->update_map()) {
1412 StackFrameStream sfs(thread, true);
1413 bool found = false;
1414 while (!found && !sfs.is_done()) {
1415 frame* cur = sfs.current();
1416 sfs.next();
1417 found = cur->id() == fr.id();
1418 }
1419 assert(found, "frame to be deoptimized not found on target thread's stack");
1420 map = sfs.register_map();
1421 }
1422
1423 vframe* vf = vframe::new_vframe(&fr, map, thread);
1424 compiledVFrame* cvf = compiledVFrame::cast(vf);
1425 // Revoke monitors' biases in all scopes
1426 while (!cvf->is_top()) {
1427 collect_monitors(cvf, objects_to_revoke);
1428 cvf = compiledVFrame::cast(cvf->sender());
1429 }
1430 collect_monitors(cvf, objects_to_revoke);
1431
1432 if (SafepointSynchronize::is_at_safepoint()) {
1433 BiasedLocking::revoke_at_safepoint(objects_to_revoke);
1434 } else {
1435 BiasedLocking::revoke(objects_to_revoke, thread);
1436 }
1437 }
1438
1439
1440 void Deoptimization::deoptimize_single_frame(JavaThread* thread, frame fr, Deoptimization::DeoptReason reason) {
1441 assert(fr.can_be_deoptimized(), "checking frame type");
1442
1443 gather_statistics(reason, Action_none, Bytecodes::_illegal);
1444
1445 if (LogCompilation && xtty != NULL) {
1446 CompiledMethod* cm = fr.cb()->as_compiled_method_or_null();
1447 assert(cm != NULL, "only compiled methods can deopt");
1448
1449 ttyLocker ttyl;
1450 xtty->begin_head("deoptimized thread='" UINTX_FORMAT "' reason='%s' pc='" INTPTR_FORMAT "'",(uintx)thread->osthread()->thread_id(), trap_reason_name(reason), p2i(fr.pc()));
1451 cm->log_identity(xtty);
1452 xtty->end_head();
1453 for (ScopeDesc* sd = cm->scope_desc_at(fr.pc()); ; sd = sd->sender()) {
1454 xtty->begin_elem("jvms bci='%d'", sd->bci());
1455 xtty->method(sd->method());
1456 xtty->end_elem();
1457 if (sd->is_top()) break;
1458 }
1459 xtty->tail("deoptimized");
1460 }
1461
1462 // Patch the compiled method so that when execution returns to it we will
1463 // deopt the execution state and return to the interpreter.
1464 fr.deoptimize(thread);
1465 }
1466
1467 void Deoptimization::deoptimize(JavaThread* thread, frame fr, RegisterMap *map) {
1468 deoptimize(thread, fr, map, Reason_constraint);
1469 }
1470
1471 void Deoptimization::deoptimize(JavaThread* thread, frame fr, RegisterMap *map, DeoptReason reason) {
1472 // Deoptimize only if the frame comes from compile code.
1473 // Do not deoptimize the frame which is already patched
1474 // during the execution of the loops below.
1475 if (!fr.is_compiled_frame() || fr.is_deoptimized_frame()) {
1476 return;
1477 }
1478 ResourceMark rm;
1479 DeoptimizationMarker dm;
1480 if (UseBiasedLocking) {
1481 revoke_biases_of_monitors(thread, fr, map);
1482 }
1483 deoptimize_single_frame(thread, fr, reason);
1484
1485 }
1486
1487 #if INCLUDE_JVMCI
1488 address Deoptimization::deoptimize_for_missing_exception_handler(CompiledMethod* cm) {
1489 // there is no exception handler for this pc => deoptimize
1490 cm->make_not_entrant();
1491
1492 // Use Deoptimization::deoptimize for all of its side-effects:
1493 // revoking biases of monitors, gathering traps statistics, logging...
1494 // it also patches the return pc but we do not care about that
1495 // since we return a continuation to the deopt_blob below.
1496 JavaThread* thread = JavaThread::current();
1497 RegisterMap reg_map(thread, UseBiasedLocking);
1498 frame runtime_frame = thread->last_frame();
1499 frame caller_frame = runtime_frame.sender(®_map);
1500 assert(caller_frame.cb()->as_compiled_method_or_null() == cm, "expect top frame compiled method");
1501 Deoptimization::deoptimize(thread, caller_frame, ®_map, Deoptimization::Reason_not_compiled_exception_handler);
1502
1503 MethodData* trap_mdo = get_method_data(thread, cm->method(), true);
1504 if (trap_mdo != NULL) {
1624
1625 // We need to update the map if we have biased locking.
1626 #if INCLUDE_JVMCI
1627 // JVMCI might need to get an exception from the stack, which in turn requires the register map to be valid
1628 RegisterMap reg_map(thread, true);
1629 #else
1630 RegisterMap reg_map(thread, UseBiasedLocking);
1631 #endif
1632 frame stub_frame = thread->last_frame();
1633 frame fr = stub_frame.sender(®_map);
1634 // Make sure the calling nmethod is not getting deoptimized and removed
1635 // before we are done with it.
1636 nmethodLocker nl(fr.pc());
1637
1638 // Log a message
1639 Events::log_deopt_message(thread, "Uncommon trap: trap_request=" PTR32_FORMAT " fr.pc=" INTPTR_FORMAT " relative=" INTPTR_FORMAT,
1640 trap_request, p2i(fr.pc()), fr.pc() - fr.cb()->code_begin());
1641
1642 {
1643 ResourceMark rm;
1644
1645 // Revoke biases of any monitors in the frame to ensure we can migrate them
1646 revoke_biases_of_monitors(thread, fr, ®_map);
1647
1648 DeoptReason reason = trap_request_reason(trap_request);
1649 DeoptAction action = trap_request_action(trap_request);
1650 #if INCLUDE_JVMCI
1651 int debug_id = trap_request_debug_id(trap_request);
1652 #endif
1653 jint unloaded_class_index = trap_request_index(trap_request); // CP idx or -1
1654
1655 vframe* vf = vframe::new_vframe(&fr, ®_map, thread);
1656 compiledVFrame* cvf = compiledVFrame::cast(vf);
1657
1658 CompiledMethod* nm = cvf->code();
1659
1660 ScopeDesc* trap_scope = cvf->scope();
1661
1662 if (TraceDeoptimization) {
1663 ttyLocker ttyl;
1664 tty->print_cr(" bci=%d pc=" INTPTR_FORMAT ", relative_pc=" INTPTR_FORMAT ", method=%s" JVMCI_ONLY(", debug_id=%d"), trap_scope->bci(), p2i(fr.pc()), fr.pc() - nm->code_begin(), trap_scope->method()->name_and_sig_as_C_string()
1665 #if INCLUDE_JVMCI
1666 , debug_id
|
140 // ResetNoHandleMark and HandleMark were removed from it. The actual reallocation
141 // of previously eliminated objects occurs in realloc_objects, which is
142 // called from the method fetch_unroll_info_helper below.
143 JRT_BLOCK_ENTRY(Deoptimization::UnrollBlock*, Deoptimization::fetch_unroll_info(JavaThread* thread, int exec_mode))
144 // It is actually ok to allocate handles in a leaf method. It causes no safepoints,
145 // but makes the entry a little slower. There is however a little dance we have to
146 // do in debug mode to get around the NoHandleMark code in the JRT_LEAF macro
147
148 // fetch_unroll_info() is called at the beginning of the deoptimization
149 // handler. Note this fact before we start generating temporary frames
150 // that can confuse an asynchronous stack walker. This counter is
151 // decremented at the end of unpack_frames().
152 if (TraceDeoptimization) {
153 tty->print_cr("Deoptimizing thread " INTPTR_FORMAT, p2i(thread));
154 }
155 thread->inc_in_deopt_handler();
156
157 return fetch_unroll_info_helper(thread, exec_mode);
158 JRT_END
159
160 #if COMPILER2_OR_JVMCI
161 static bool eliminate_allocations(JavaThread* thread, int exec_mode, CompiledMethod* compiled_method,
162 frame& deoptee, RegisterMap& map, GrowableArray<compiledVFrame*>* chunk) {
163 bool realloc_failures = false;
164 assert (chunk->at(0)->scope() != NULL,"expect only compiled java frames");
165
166 GrowableArray<ScopeValue*>* objects = chunk->at(0)->scope()->objects();
167
168 // The flag return_oop() indicates call sites which return oop
169 // in compiled code. Such sites include java method calls,
170 // runtime calls (for example, used to allocate new objects/arrays
171 // on slow code path) and any other calls generated in compiled code.
172 // It is not guaranteed that we can get such information here only
173 // by analyzing bytecode in deoptimized frames. This is why this flag
174 // is set during method compilation (see Compile::Process_OopMap_Node()).
175 // If the previous frame was popped or if we are dispatching an exception,
176 // we don't have an oop result.
177 bool save_oop_result = chunk->at(0)->scope()->return_oop() && !thread->popframe_forcing_deopt_reexecution() && (exec_mode == Deoptimization::Unpack_deopt);
178 Handle return_value;
179 if (save_oop_result) {
180 // Reallocation may trigger GC. If deoptimization happened on return from
181 // call which returns oop we need to save it since it is not in oopmap.
182 oop result = deoptee.saved_oop_result(&map);
183 assert(oopDesc::is_oop_or_null(result), "must be oop");
184 return_value = Handle(thread, result);
185 assert(Universe::heap()->is_in_or_null(result), "must be heap pointer");
186 if (TraceDeoptimization) {
187 ttyLocker ttyl;
188 tty->print_cr("SAVED OOP RESULT " INTPTR_FORMAT " in thread " INTPTR_FORMAT, p2i(result), p2i(thread));
189 }
190 }
191 if (objects != NULL) {
192 JRT_BLOCK
193 realloc_failures = Deoptimization::realloc_objects(thread, &deoptee, &map, objects, THREAD);
194 JRT_END
195 bool skip_internal = (compiled_method != NULL) && !compiled_method->is_compiled_by_jvmci();
196 Deoptimization::reassign_fields(&deoptee, &map, objects, realloc_failures, skip_internal);
197 #ifndef PRODUCT
198 if (TraceDeoptimization) {
199 ttyLocker ttyl;
200 tty->print_cr("REALLOC OBJECTS in thread " INTPTR_FORMAT, p2i(thread));
201 Deoptimization::print_objects(objects, realloc_failures);
202 }
203 #endif
204 }
205 if (save_oop_result) {
206 // Restore result.
207 deoptee.set_saved_oop_result(&map, return_value());
208 }
209 return realloc_failures;
210 }
211
212 static void eliminate_locks(JavaThread* thread, GrowableArray<compiledVFrame*>* chunk, bool realloc_failures) {
213 #ifndef PRODUCT
214 bool first = true;
215 #endif
216 for (int i = 0; i < chunk->length(); i++) {
217 compiledVFrame* cvf = chunk->at(i);
218 assert (cvf->scope() != NULL,"expect only compiled java frames");
219 GrowableArray<MonitorInfo*>* monitors = cvf->monitors();
220 if (monitors->is_nonempty()) {
221 Deoptimization::relock_objects(monitors, thread, realloc_failures);
222 #ifndef PRODUCT
223 if (PrintDeoptimizationDetails) {
224 ttyLocker ttyl;
225 for (int j = 0; j < monitors->length(); j++) {
226 MonitorInfo* mi = monitors->at(j);
227 if (mi->eliminated()) {
228 if (first) {
229 first = false;
230 tty->print_cr("RELOCK OBJECTS in thread " INTPTR_FORMAT, p2i(thread));
231 }
232 if (mi->owner_is_scalar_replaced()) {
233 Klass* k = java_lang_Class::as_Klass(mi->owner_klass());
234 tty->print_cr(" failed reallocation for klass %s", k->external_name());
235 } else {
236 tty->print_cr(" object <" INTPTR_FORMAT "> locked", p2i(mi->owner()));
237 }
238 }
239 }
240 }
241 #endif // !PRODUCT
242 }
243 }
244 }
245 #endif // COMPILER2_OR_JVMCI
246
247 // This is factored, since it is both called from a JRT_LEAF (deoptimization) and a JRT_ENTRY (uncommon_trap)
248 Deoptimization::UnrollBlock* Deoptimization::fetch_unroll_info_helper(JavaThread* thread, int exec_mode) {
249
250 // Note: there is a safepoint safety issue here. No matter whether we enter
251 // via vanilla deopt or uncommon trap we MUST NOT stop at a safepoint once
252 // the vframeArray is created.
253 //
254
255 // Allocate our special deoptimization ResourceMark
256 DeoptResourceMark* dmark = new DeoptResourceMark(thread);
257 assert(thread->deopt_mark() == NULL, "Pending deopt!");
258 thread->set_deopt_mark(dmark);
259
260 frame stub_frame = thread->last_frame(); // Makes stack walkable as side effect
261 RegisterMap map(thread, true);
262 RegisterMap dummy_map(thread, false);
263 // Now get the deoptee with a valid map
264 frame deoptee = stub_frame.sender(&map);
265 // Set the deoptee nmethod
266 assert(thread->deopt_compiled_method() == NULL, "Pending deopt!");
267 CompiledMethod* cm = deoptee.cb()->as_compiled_method_or_null();
268 thread->set_deopt_compiled_method(cm);
269
270 if (VerifyStack) {
271 thread->validate_frame_layout();
272 }
273
274 // Create a growable array of VFrames where each VFrame represents an inlined
275 // Java frame. This storage is allocated with the usual system arena.
276 assert(deoptee.is_compiled_frame(), "Wrong frame type");
277 GrowableArray<compiledVFrame*>* chunk = new GrowableArray<compiledVFrame*>(10);
278 vframe* vf = vframe::new_vframe(&deoptee, &map, thread);
279 while (!vf->is_top()) {
280 assert(vf->is_compiled_frame(), "Wrong frame type");
281 chunk->push(compiledVFrame::cast(vf));
282 vf = vf->sender();
283 }
284 assert(vf->is_compiled_frame(), "Wrong frame type");
285 chunk->push(compiledVFrame::cast(vf));
286
287 bool realloc_failures = false;
288
289 #if COMPILER2_OR_JVMCI
290 #if INCLUDE_JVMCI
291 bool jvmci_enabled = true;
292 #else
293 bool jvmci_enabled = false;
294 #endif
295
296 // Reallocate the non-escaping objects and restore their fields. Then
297 // relock objects if synchronization on them was eliminated.
298 if (jvmci_enabled || ((DoEscapeAnalysis || EliminateNestedLocks) && EliminateAllocations)) {
299 realloc_failures = eliminate_allocations(thread, exec_mode, cm, deoptee, map, chunk);
300 }
301 #endif // COMPILER2_OR_JVMCI
302
303 // Revoke biases, done with in java state.
304 // No safepoints allowed after this
305 revoke_from_deopt_handler(thread, deoptee, &map);
306
307 // Ensure that no safepoint is taken after pointers have been stored
308 // in fields of rematerialized objects. If a safepoint occurs from here on
309 // out the java state residing in the vframeArray will be missed.
310 // Locks may be rebaised in a safepoint.
311 NoSafepointVerifier no_safepoint;
312
313 #if COMPILER2_OR_JVMCI
314 if (jvmci_enabled || ((DoEscapeAnalysis || EliminateNestedLocks) && EliminateLocks)) {
315 eliminate_locks(thread, chunk, realloc_failures);
316 }
317 #endif // COMPILER2_OR_JVMCI
318
319 ScopeDesc* trap_scope = chunk->at(0)->scope();
320 Handle exceptionObject;
321 if (trap_scope->rethrow_exception()) {
322 if (PrintDeoptimizationDetails) {
323 tty->print_cr("Exception to be rethrown in the interpreter for method %s::%s at bci %d", trap_scope->method()->method_holder()->name()->as_C_string(), trap_scope->method()->name()->as_C_string(), trap_scope->bci());
324 }
325 GrowableArray<ScopeValue*>* expressions = trap_scope->expressions();
326 guarantee(expressions != NULL && expressions->length() > 0, "must have exception to throw");
327 ScopeValue* topOfStack = expressions->top();
328 exceptionObject = StackValue::create_stack_value(&deoptee, &map, topOfStack)->get_obj();
329 guarantee(exceptionObject() != NULL, "exception oop can not be null");
330 }
331
332 vframeArray* array = create_vframeArray(thread, deoptee, &map, chunk, realloc_failures);
333 #if COMPILER2_OR_JVMCI
334 if (realloc_failures) {
335 pop_frames_failed_reallocs(thread, array);
336 }
337 #endif
338
339 assert(thread->vframe_array_head() == NULL, "Pending deopt!");
340 thread->set_vframe_array_head(array);
341
342 // Now that the vframeArray has been created if we have any deferred local writes
343 // added by jvmti then we can free up that structure as the data is now in the
344 // vframeArray
345
346 if (thread->deferred_locals() != NULL) {
347 GrowableArray<jvmtiDeferredLocalVariableSet*>* list = thread->deferred_locals();
348 int i = 0;
349 do {
350 // Because of inlining we could have multiple vframes for a single frame
351 // and several of the vframes could have deferred writes. Find them all.
781 vframeArrayElement* el = cur_array->element(k);
782 tty->print_cr(" %s (bci %d)", el->method()->name_and_sig_as_C_string(), el->bci());
783 }
784 cur_array->print_on_2(tty);
785 } // release tty lock before calling guarantee
786 guarantee(false, "wrong number of expression stack elements during deopt");
787 }
788 VerifyOopClosure verify;
789 iframe->oops_interpreted_do(&verify, &rm, false);
790 callee_size_of_parameters = mh->size_of_parameters();
791 callee_max_locals = mh->max_locals();
792 is_top_frame = false;
793 }
794 }
795 #endif /* !PRODUCT */
796
797
798 return bt;
799 JRT_END
800
801 class DeoptimizeMarkedTC : public ThreadClosure {
802 public:
803 virtual void do_thread(Thread* thread) {
804 assert(thread->is_Java_thread(), "must be");
805 JavaThread* jt = (JavaThread*)thread;
806 jt->deoptimize_marked_methods();
807 }
808 };
809
810 void Deoptimization::deoptimize_all_marked() {
811 ResourceMark rm;
812 DeoptimizationMarker dm;
813
814 if (SafepointSynchronize::is_at_safepoint()) {
815 DeoptimizeMarkedTC deopt;
816 // Make the dependent methods not entrant
817 CodeCache::make_marked_nmethods_not_entrant();
818 Threads::java_threads_do(&deopt);
819 } else {
820 // Make the dependent methods not entrant
821 {
822 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
823 CodeCache::make_marked_nmethods_not_entrant();
824 }
825 DeoptimizeMarkedTC deopt;
826 Handshake::execute(&deopt);
827 }
828 }
829
830 Deoptimization::DeoptAction Deoptimization::_unloaded_action
831 = Deoptimization::Action_reinterpret;
832
833
834
835 #if INCLUDE_JVMCI || INCLUDE_AOT
836 template<typename CacheType>
837 class BoxCacheBase : public CHeapObj<mtCompiler> {
838 protected:
839 static InstanceKlass* find_cache_klass(Symbol* klass_name, TRAPS) {
840 ResourceMark rm;
841 char* klass_name_str = klass_name->as_C_string();
842 Klass* k = SystemDictionary::find(klass_name, Handle(), Handle(), THREAD);
843 guarantee(k != NULL, "%s must be loaded", klass_name_str);
844 InstanceKlass* ik = InstanceKlass::cast(k);
845 guarantee(ik->is_initialized(), "%s must be initialized", klass_name_str);
846 CacheType::compute_offsets(ik);
847 return ik;
1422 array->element(i)->free_monitors(thread);
1423 #ifdef ASSERT
1424 array->element(i)->set_removed_monitors();
1425 #endif
1426 }
1427 }
1428 }
1429 #endif
1430
1431 static void collect_monitors(compiledVFrame* cvf, GrowableArray<Handle>* objects_to_revoke) {
1432 GrowableArray<MonitorInfo*>* monitors = cvf->monitors();
1433 Thread* thread = Thread::current();
1434 for (int i = 0; i < monitors->length(); i++) {
1435 MonitorInfo* mon_info = monitors->at(i);
1436 if (!mon_info->eliminated() && mon_info->owner() != NULL) {
1437 objects_to_revoke->append(Handle(thread, mon_info->owner()));
1438 }
1439 }
1440 }
1441
1442 static void get_monitors_from_stack(GrowableArray<Handle>* objects_to_revoke, JavaThread* thread, frame fr, RegisterMap* map) {
1443 // Unfortunately we don't have a RegisterMap available in most of
1444 // the places we want to call this routine so we need to walk the
1445 // stack again to update the register map.
1446 if (map == NULL || !map->update_map()) {
1447 StackFrameStream sfs(thread, true);
1448 bool found = false;
1449 while (!found && !sfs.is_done()) {
1450 frame* cur = sfs.current();
1451 sfs.next();
1452 found = cur->id() == fr.id();
1453 }
1454 assert(found, "frame to be deoptimized not found on target thread's stack");
1455 map = sfs.register_map();
1456 }
1457
1458 vframe* vf = vframe::new_vframe(&fr, map, thread);
1459 compiledVFrame* cvf = compiledVFrame::cast(vf);
1460 // Revoke monitors' biases in all scopes
1461 while (!cvf->is_top()) {
1462 collect_monitors(cvf, objects_to_revoke);
1463 cvf = compiledVFrame::cast(cvf->sender());
1464 }
1465 collect_monitors(cvf, objects_to_revoke);
1466 }
1467
1468 void Deoptimization::revoke_from_deopt_handler(JavaThread* thread, frame fr, RegisterMap* map) {
1469 if (!UseBiasedLocking) {
1470 return;
1471 }
1472 GrowableArray<Handle>* objects_to_revoke = new GrowableArray<Handle>();
1473 get_monitors_from_stack(objects_to_revoke, thread, fr, map);
1474
1475 int len = objects_to_revoke->length();
1476 for (int i = 0; i < len; i++) {
1477 oop obj = (objects_to_revoke->at(i))();
1478 BiasedLocking::revoke_own_lock(objects_to_revoke->at(i), thread);
1479 assert(!obj->mark().has_bias_pattern(), "biases should be revoked by now");
1480 }
1481 }
1482
1483
1484 void Deoptimization::deoptimize_single_frame(JavaThread* thread, frame fr, Deoptimization::DeoptReason reason) {
1485 assert(fr.can_be_deoptimized(), "checking frame type");
1486
1487 gather_statistics(reason, Action_none, Bytecodes::_illegal);
1488
1489 if (LogCompilation && xtty != NULL) {
1490 CompiledMethod* cm = fr.cb()->as_compiled_method_or_null();
1491 assert(cm != NULL, "only compiled methods can deopt");
1492
1493 ttyLocker ttyl;
1494 xtty->begin_head("deoptimized thread='" UINTX_FORMAT "' reason='%s' pc='" INTPTR_FORMAT "'",(uintx)thread->osthread()->thread_id(), trap_reason_name(reason), p2i(fr.pc()));
1495 cm->log_identity(xtty);
1496 xtty->end_head();
1497 for (ScopeDesc* sd = cm->scope_desc_at(fr.pc()); ; sd = sd->sender()) {
1498 xtty->begin_elem("jvms bci='%d'", sd->bci());
1499 xtty->method(sd->method());
1500 xtty->end_elem();
1501 if (sd->is_top()) break;
1502 }
1503 xtty->tail("deoptimized");
1504 }
1505
1506 // Patch the compiled method so that when execution returns to it we will
1507 // deopt the execution state and return to the interpreter.
1508 fr.deoptimize(thread);
1509 }
1510
1511 void Deoptimization::deoptimize(JavaThread* thread, frame fr, RegisterMap *map, DeoptReason reason) {
1512 // Deoptimize only if the frame comes from compile code.
1513 // Do not deoptimize the frame which is already patched
1514 // during the execution of the loops below.
1515 if (!fr.is_compiled_frame() || fr.is_deoptimized_frame()) {
1516 return;
1517 }
1518 ResourceMark rm;
1519 DeoptimizationMarker dm;
1520 deoptimize_single_frame(thread, fr, reason);
1521 }
1522
1523 #if INCLUDE_JVMCI
1524 address Deoptimization::deoptimize_for_missing_exception_handler(CompiledMethod* cm) {
1525 // there is no exception handler for this pc => deoptimize
1526 cm->make_not_entrant();
1527
1528 // Use Deoptimization::deoptimize for all of its side-effects:
1529 // revoking biases of monitors, gathering traps statistics, logging...
1530 // it also patches the return pc but we do not care about that
1531 // since we return a continuation to the deopt_blob below.
1532 JavaThread* thread = JavaThread::current();
1533 RegisterMap reg_map(thread, UseBiasedLocking);
1534 frame runtime_frame = thread->last_frame();
1535 frame caller_frame = runtime_frame.sender(®_map);
1536 assert(caller_frame.cb()->as_compiled_method_or_null() == cm, "expect top frame compiled method");
1537 Deoptimization::deoptimize(thread, caller_frame, ®_map, Deoptimization::Reason_not_compiled_exception_handler);
1538
1539 MethodData* trap_mdo = get_method_data(thread, cm->method(), true);
1540 if (trap_mdo != NULL) {
1660
1661 // We need to update the map if we have biased locking.
1662 #if INCLUDE_JVMCI
1663 // JVMCI might need to get an exception from the stack, which in turn requires the register map to be valid
1664 RegisterMap reg_map(thread, true);
1665 #else
1666 RegisterMap reg_map(thread, UseBiasedLocking);
1667 #endif
1668 frame stub_frame = thread->last_frame();
1669 frame fr = stub_frame.sender(®_map);
1670 // Make sure the calling nmethod is not getting deoptimized and removed
1671 // before we are done with it.
1672 nmethodLocker nl(fr.pc());
1673
1674 // Log a message
1675 Events::log_deopt_message(thread, "Uncommon trap: trap_request=" PTR32_FORMAT " fr.pc=" INTPTR_FORMAT " relative=" INTPTR_FORMAT,
1676 trap_request, p2i(fr.pc()), fr.pc() - fr.cb()->code_begin());
1677
1678 {
1679 ResourceMark rm;
1680
1681 DeoptReason reason = trap_request_reason(trap_request);
1682 DeoptAction action = trap_request_action(trap_request);
1683 #if INCLUDE_JVMCI
1684 int debug_id = trap_request_debug_id(trap_request);
1685 #endif
1686 jint unloaded_class_index = trap_request_index(trap_request); // CP idx or -1
1687
1688 vframe* vf = vframe::new_vframe(&fr, ®_map, thread);
1689 compiledVFrame* cvf = compiledVFrame::cast(vf);
1690
1691 CompiledMethod* nm = cvf->code();
1692
1693 ScopeDesc* trap_scope = cvf->scope();
1694
1695 if (TraceDeoptimization) {
1696 ttyLocker ttyl;
1697 tty->print_cr(" bci=%d pc=" INTPTR_FORMAT ", relative_pc=" INTPTR_FORMAT ", method=%s" JVMCI_ONLY(", debug_id=%d"), trap_scope->bci(), p2i(fr.pc()), fr.pc() - nm->code_begin(), trap_scope->method()->name_and_sig_as_C_string()
1698 #if INCLUDE_JVMCI
1699 , debug_id
|