140 // ResetNoHandleMark and HandleMark were removed from it. The actual reallocation
141 // of previously eliminated objects occurs in realloc_objects, which is
142 // called from the method fetch_unroll_info_helper below.
143 JRT_BLOCK_ENTRY(Deoptimization::UnrollBlock*, Deoptimization::fetch_unroll_info(JavaThread* thread, int exec_mode))
144 // It is actually ok to allocate handles in a leaf method. It causes no safepoints,
145 // but makes the entry a little slower. There is however a little dance we have to
146 // do in debug mode to get around the NoHandleMark code in the JRT_LEAF macro
147
148 // fetch_unroll_info() is called at the beginning of the deoptimization
149 // handler. Note this fact before we start generating temporary frames
150 // that can confuse an asynchronous stack walker. This counter is
151 // decremented at the end of unpack_frames().
152 if (TraceDeoptimization) {
153 tty->print_cr("Deoptimizing thread " INTPTR_FORMAT, p2i(thread));
154 }
155 thread->inc_in_deopt_handler();
156
157 return fetch_unroll_info_helper(thread, exec_mode);
158 JRT_END
159
160
161 // This is factored, since it is both called from a JRT_LEAF (deoptimization) and a JRT_ENTRY (uncommon_trap)
162 Deoptimization::UnrollBlock* Deoptimization::fetch_unroll_info_helper(JavaThread* thread, int exec_mode) {
163
164 // Note: there is a safepoint safety issue here. No matter whether we enter
165 // via vanilla deopt or uncommon trap we MUST NOT stop at a safepoint once
166 // the vframeArray is created.
167 //
168
169 // Allocate our special deoptimization ResourceMark
170 DeoptResourceMark* dmark = new DeoptResourceMark(thread);
171 assert(thread->deopt_mark() == NULL, "Pending deopt!");
172 thread->set_deopt_mark(dmark);
173
174 frame stub_frame = thread->last_frame(); // Makes stack walkable as side effect
175 RegisterMap map(thread, true);
176 RegisterMap dummy_map(thread, false);
177 // Now get the deoptee with a valid map
178 frame deoptee = stub_frame.sender(&map);
179 // Set the deoptee nmethod
180 assert(thread->deopt_compiled_method() == NULL, "Pending deopt!");
181 CompiledMethod* cm = deoptee.cb()->as_compiled_method_or_null();
182 thread->set_deopt_compiled_method(cm);
183
184 if (VerifyStack) {
185 thread->validate_frame_layout();
186 }
187
188 // Create a growable array of VFrames where each VFrame represents an inlined
189 // Java frame. This storage is allocated with the usual system arena.
190 assert(deoptee.is_compiled_frame(), "Wrong frame type");
191 GrowableArray<compiledVFrame*>* chunk = new GrowableArray<compiledVFrame*>(10);
192 vframe* vf = vframe::new_vframe(&deoptee, &map, thread);
193 while (!vf->is_top()) {
194 assert(vf->is_compiled_frame(), "Wrong frame type");
195 chunk->push(compiledVFrame::cast(vf));
196 vf = vf->sender();
197 }
198 assert(vf->is_compiled_frame(), "Wrong frame type");
199 chunk->push(compiledVFrame::cast(vf));
200
201 bool realloc_failures = false;
202
203 #if COMPILER2_OR_JVMCI
204 // Reallocate the non-escaping objects and restore their fields. Then
205 // relock objects if synchronization on them was eliminated.
206 #if !INCLUDE_JVMCI
207 if (DoEscapeAnalysis || EliminateNestedLocks) {
208 if (EliminateAllocations) {
209 #endif // INCLUDE_JVMCI
210 assert (chunk->at(0)->scope() != NULL,"expect only compiled java frames");
211 GrowableArray<ScopeValue*>* objects = chunk->at(0)->scope()->objects();
212
213 // The flag return_oop() indicates call sites which return oop
214 // in compiled code. Such sites include java method calls,
215 // runtime calls (for example, used to allocate new objects/arrays
216 // on slow code path) and any other calls generated in compiled code.
217 // It is not guaranteed that we can get such information here only
218 // by analyzing bytecode in deoptimized frames. This is why this flag
219 // is set during method compilation (see Compile::Process_OopMap_Node()).
220 // If the previous frame was popped or if we are dispatching an exception,
221 // we don't have an oop result.
222 bool save_oop_result = chunk->at(0)->scope()->return_oop() && !thread->popframe_forcing_deopt_reexecution() && (exec_mode == Unpack_deopt);
223 Handle return_value;
224 if (save_oop_result) {
225 // Reallocation may trigger GC. If deoptimization happened on return from
226 // call which returns oop we need to save it since it is not in oopmap.
227 oop result = deoptee.saved_oop_result(&map);
228 assert(oopDesc::is_oop_or_null(result), "must be oop");
229 return_value = Handle(thread, result);
230 assert(Universe::heap()->is_in_or_null(result), "must be heap pointer");
231 if (TraceDeoptimization) {
232 ttyLocker ttyl;
233 tty->print_cr("SAVED OOP RESULT " INTPTR_FORMAT " in thread " INTPTR_FORMAT, p2i(result), p2i(thread));
234 }
235 }
236 if (objects != NULL) {
237 JRT_BLOCK
238 realloc_failures = realloc_objects(thread, &deoptee, &map, objects, THREAD);
239 JRT_END
240 bool skip_internal = (cm != NULL) && !cm->is_compiled_by_jvmci();
241 reassign_fields(&deoptee, &map, objects, realloc_failures, skip_internal);
242 #ifndef PRODUCT
243 if (TraceDeoptimization) {
244 ttyLocker ttyl;
245 tty->print_cr("REALLOC OBJECTS in thread " INTPTR_FORMAT, p2i(thread));
246 print_objects(objects, realloc_failures);
247 }
248 #endif
249 }
250 if (save_oop_result) {
251 // Restore result.
252 deoptee.set_saved_oop_result(&map, return_value());
253 }
254 #if !INCLUDE_JVMCI
255 }
256 if (EliminateLocks) {
257 #endif // INCLUDE_JVMCI
258 #ifndef PRODUCT
259 bool first = true;
260 #endif
261 for (int i = 0; i < chunk->length(); i++) {
262 compiledVFrame* cvf = chunk->at(i);
263 assert (cvf->scope() != NULL,"expect only compiled java frames");
264 GrowableArray<MonitorInfo*>* monitors = cvf->monitors();
265 if (monitors->is_nonempty()) {
266 relock_objects(monitors, thread, realloc_failures);
267 #ifndef PRODUCT
268 if (PrintDeoptimizationDetails) {
269 ttyLocker ttyl;
270 for (int j = 0; j < monitors->length(); j++) {
271 MonitorInfo* mi = monitors->at(j);
272 if (mi->eliminated()) {
273 if (first) {
274 first = false;
275 tty->print_cr("RELOCK OBJECTS in thread " INTPTR_FORMAT, p2i(thread));
276 }
277 if (mi->owner_is_scalar_replaced()) {
278 Klass* k = java_lang_Class::as_Klass(mi->owner_klass());
279 tty->print_cr(" failed reallocation for klass %s", k->external_name());
280 } else {
281 tty->print_cr(" object <" INTPTR_FORMAT "> locked", p2i(mi->owner()));
282 }
283 }
284 }
285 }
286 #endif // !PRODUCT
287 }
288 }
289 #if !INCLUDE_JVMCI
290 }
291 }
292 #endif // INCLUDE_JVMCI
293 #endif // COMPILER2_OR_JVMCI
294
295 ScopeDesc* trap_scope = chunk->at(0)->scope();
296 Handle exceptionObject;
297 if (trap_scope->rethrow_exception()) {
298 if (PrintDeoptimizationDetails) {
299 tty->print_cr("Exception to be rethrown in the interpreter for method %s::%s at bci %d", trap_scope->method()->method_holder()->name()->as_C_string(), trap_scope->method()->name()->as_C_string(), trap_scope->bci());
300 }
301 GrowableArray<ScopeValue*>* expressions = trap_scope->expressions();
302 guarantee(expressions != NULL && expressions->length() > 0, "must have exception to throw");
303 ScopeValue* topOfStack = expressions->top();
304 exceptionObject = StackValue::create_stack_value(&deoptee, &map, topOfStack)->get_obj();
305 guarantee(exceptionObject() != NULL, "exception oop can not be null");
306 }
307
308 // Ensure that no safepoint is taken after pointers have been stored
309 // in fields of rematerialized objects. If a safepoint occurs from here on
310 // out the java state residing in the vframeArray will be missed.
311 NoSafepointVerifier no_safepoint;
312
313 vframeArray* array = create_vframeArray(thread, deoptee, &map, chunk, realloc_failures);
314 #if COMPILER2_OR_JVMCI
315 if (realloc_failures) {
316 pop_frames_failed_reallocs(thread, array);
317 }
318 #endif
319
320 assert(thread->vframe_array_head() == NULL, "Pending deopt!");
321 thread->set_vframe_array_head(array);
322
323 // Now that the vframeArray has been created if we have any deferred local writes
324 // added by jvmti then we can free up that structure as the data is now in the
325 // vframeArray
326
327 if (thread->deferred_locals() != NULL) {
328 GrowableArray<jvmtiDeferredLocalVariableSet*>* list = thread->deferred_locals();
329 int i = 0;
330 do {
331 // Because of inlining we could have multiple vframes for a single frame
332 // and several of the vframes could have deferred writes. Find them all.
762 vframeArrayElement* el = cur_array->element(k);
763 tty->print_cr(" %s (bci %d)", el->method()->name_and_sig_as_C_string(), el->bci());
764 }
765 cur_array->print_on_2(tty);
766 } // release tty lock before calling guarantee
767 guarantee(false, "wrong number of expression stack elements during deopt");
768 }
769 VerifyOopClosure verify;
770 iframe->oops_interpreted_do(&verify, &rm, false);
771 callee_size_of_parameters = mh->size_of_parameters();
772 callee_max_locals = mh->max_locals();
773 is_top_frame = false;
774 }
775 }
776 #endif /* !PRODUCT */
777
778
779 return bt;
780 JRT_END
781
782
783 int Deoptimization::deoptimize_dependents() {
784 Threads::deoptimized_wrt_marked_nmethods();
785 return 0;
786 }
787
788 Deoptimization::DeoptAction Deoptimization::_unloaded_action
789 = Deoptimization::Action_reinterpret;
790
791
792
793 #if INCLUDE_JVMCI || INCLUDE_AOT
794 template<typename CacheType>
795 class BoxCacheBase : public CHeapObj<mtCompiler> {
796 protected:
797 static InstanceKlass* find_cache_klass(Symbol* klass_name, TRAPS) {
798 ResourceMark rm;
799 char* klass_name_str = klass_name->as_C_string();
800 Klass* k = SystemDictionary::find(klass_name, Handle(), Handle(), THREAD);
801 guarantee(k != NULL, "%s must be loaded", klass_name_str);
802 InstanceKlass* ik = InstanceKlass::cast(k);
803 guarantee(ik->is_initialized(), "%s must be initialized", klass_name_str);
804 CacheType::compute_offsets(ik);
805 return ik;
1380 array->element(i)->free_monitors(thread);
1381 #ifdef ASSERT
1382 array->element(i)->set_removed_monitors();
1383 #endif
1384 }
1385 }
1386 }
1387 #endif
1388
1389 static void collect_monitors(compiledVFrame* cvf, GrowableArray<Handle>* objects_to_revoke) {
1390 GrowableArray<MonitorInfo*>* monitors = cvf->monitors();
1391 Thread* thread = Thread::current();
1392 for (int i = 0; i < monitors->length(); i++) {
1393 MonitorInfo* mon_info = monitors->at(i);
1394 if (!mon_info->eliminated() && mon_info->owner() != NULL) {
1395 objects_to_revoke->append(Handle(thread, mon_info->owner()));
1396 }
1397 }
1398 }
1399
1400
1401 void Deoptimization::revoke_biases_of_monitors(JavaThread* thread, frame fr, RegisterMap* map) {
1402 if (!UseBiasedLocking) {
1403 return;
1404 }
1405
1406 GrowableArray<Handle>* objects_to_revoke = new GrowableArray<Handle>();
1407
1408 // Unfortunately we don't have a RegisterMap available in most of
1409 // the places we want to call this routine so we need to walk the
1410 // stack again to update the register map.
1411 if (map == NULL || !map->update_map()) {
1412 StackFrameStream sfs(thread, true);
1413 bool found = false;
1414 while (!found && !sfs.is_done()) {
1415 frame* cur = sfs.current();
1416 sfs.next();
1417 found = cur->id() == fr.id();
1418 }
1419 assert(found, "frame to be deoptimized not found on target thread's stack");
1420 map = sfs.register_map();
1421 }
1422
1423 vframe* vf = vframe::new_vframe(&fr, map, thread);
1424 compiledVFrame* cvf = compiledVFrame::cast(vf);
1425 // Revoke monitors' biases in all scopes
1426 while (!cvf->is_top()) {
1427 collect_monitors(cvf, objects_to_revoke);
1428 cvf = compiledVFrame::cast(cvf->sender());
1429 }
1430 collect_monitors(cvf, objects_to_revoke);
1431
1432 if (SafepointSynchronize::is_at_safepoint()) {
1433 BiasedLocking::revoke_at_safepoint(objects_to_revoke);
1434 } else {
1435 BiasedLocking::revoke(objects_to_revoke, thread);
1436 }
1437 }
1438
1439
1440 void Deoptimization::deoptimize_single_frame(JavaThread* thread, frame fr, Deoptimization::DeoptReason reason) {
1441 assert(fr.can_be_deoptimized(), "checking frame type");
1442
1443 gather_statistics(reason, Action_none, Bytecodes::_illegal);
1444
1445 if (LogCompilation && xtty != NULL) {
1446 CompiledMethod* cm = fr.cb()->as_compiled_method_or_null();
1447 assert(cm != NULL, "only compiled methods can deopt");
1448
1449 ttyLocker ttyl;
1450 xtty->begin_head("deoptimized thread='" UINTX_FORMAT "' reason='%s' pc='" INTPTR_FORMAT "'",(uintx)thread->osthread()->thread_id(), trap_reason_name(reason), p2i(fr.pc()));
1451 cm->log_identity(xtty);
1452 xtty->end_head();
1453 for (ScopeDesc* sd = cm->scope_desc_at(fr.pc()); ; sd = sd->sender()) {
1454 xtty->begin_elem("jvms bci='%d'", sd->bci());
1455 xtty->method(sd->method());
1456 xtty->end_elem();
1457 if (sd->is_top()) break;
1458 }
1459 xtty->tail("deoptimized");
1460 }
1461
1462 // Patch the compiled method so that when execution returns to it we will
1463 // deopt the execution state and return to the interpreter.
1464 fr.deoptimize(thread);
1465 }
1466
1467 void Deoptimization::deoptimize(JavaThread* thread, frame fr, RegisterMap *map) {
1468 deoptimize(thread, fr, map, Reason_constraint);
1469 }
1470
1471 void Deoptimization::deoptimize(JavaThread* thread, frame fr, RegisterMap *map, DeoptReason reason) {
1472 // Deoptimize only if the frame comes from compile code.
1473 // Do not deoptimize the frame which is already patched
1474 // during the execution of the loops below.
1475 if (!fr.is_compiled_frame() || fr.is_deoptimized_frame()) {
1476 return;
1477 }
1478 ResourceMark rm;
1479 DeoptimizationMarker dm;
1480 if (UseBiasedLocking) {
1481 revoke_biases_of_monitors(thread, fr, map);
1482 }
1483 deoptimize_single_frame(thread, fr, reason);
1484
1485 }
1486
1487 #if INCLUDE_JVMCI
1488 address Deoptimization::deoptimize_for_missing_exception_handler(CompiledMethod* cm) {
1489 // there is no exception handler for this pc => deoptimize
1490 cm->make_not_entrant();
1491
1492 // Use Deoptimization::deoptimize for all of its side-effects:
1493 // revoking biases of monitors, gathering traps statistics, logging...
1494 // it also patches the return pc but we do not care about that
1495 // since we return a continuation to the deopt_blob below.
1496 JavaThread* thread = JavaThread::current();
1497 RegisterMap reg_map(thread, UseBiasedLocking);
1498 frame runtime_frame = thread->last_frame();
1499 frame caller_frame = runtime_frame.sender(®_map);
1500 assert(caller_frame.cb()->as_compiled_method_or_null() == cm, "expect top frame compiled method");
1501 Deoptimization::deoptimize(thread, caller_frame, ®_map, Deoptimization::Reason_not_compiled_exception_handler);
1502
1503 MethodData* trap_mdo = get_method_data(thread, cm->method(), true);
1504 if (trap_mdo != NULL) {
1624
1625 // We need to update the map if we have biased locking.
1626 #if INCLUDE_JVMCI
1627 // JVMCI might need to get an exception from the stack, which in turn requires the register map to be valid
1628 RegisterMap reg_map(thread, true);
1629 #else
1630 RegisterMap reg_map(thread, UseBiasedLocking);
1631 #endif
1632 frame stub_frame = thread->last_frame();
1633 frame fr = stub_frame.sender(®_map);
1634 // Make sure the calling nmethod is not getting deoptimized and removed
1635 // before we are done with it.
1636 nmethodLocker nl(fr.pc());
1637
1638 // Log a message
1639 Events::log_deopt_message(thread, "Uncommon trap: trap_request=" PTR32_FORMAT " fr.pc=" INTPTR_FORMAT " relative=" INTPTR_FORMAT,
1640 trap_request, p2i(fr.pc()), fr.pc() - fr.cb()->code_begin());
1641
1642 {
1643 ResourceMark rm;
1644
1645 // Revoke biases of any monitors in the frame to ensure we can migrate them
1646 revoke_biases_of_monitors(thread, fr, ®_map);
1647
1648 DeoptReason reason = trap_request_reason(trap_request);
1649 DeoptAction action = trap_request_action(trap_request);
1650 #if INCLUDE_JVMCI
1651 int debug_id = trap_request_debug_id(trap_request);
1652 #endif
1653 jint unloaded_class_index = trap_request_index(trap_request); // CP idx or -1
1654
1655 vframe* vf = vframe::new_vframe(&fr, ®_map, thread);
1656 compiledVFrame* cvf = compiledVFrame::cast(vf);
1657
1658 CompiledMethod* nm = cvf->code();
1659
1660 ScopeDesc* trap_scope = cvf->scope();
1661
1662 if (TraceDeoptimization) {
1663 ttyLocker ttyl;
1664 tty->print_cr(" bci=%d pc=" INTPTR_FORMAT ", relative_pc=" INTPTR_FORMAT ", method=%s" JVMCI_ONLY(", debug_id=%d"), trap_scope->bci(), p2i(fr.pc()), fr.pc() - nm->code_begin(), trap_scope->method()->name_and_sig_as_C_string()
1665 #if INCLUDE_JVMCI
1666 , debug_id
|
140 // ResetNoHandleMark and HandleMark were removed from it. The actual reallocation
141 // of previously eliminated objects occurs in realloc_objects, which is
142 // called from the method fetch_unroll_info_helper below.
143 JRT_BLOCK_ENTRY(Deoptimization::UnrollBlock*, Deoptimization::fetch_unroll_info(JavaThread* thread, int exec_mode))
144 // It is actually ok to allocate handles in a leaf method. It causes no safepoints,
145 // but makes the entry a little slower. There is however a little dance we have to
146 // do in debug mode to get around the NoHandleMark code in the JRT_LEAF macro
147
148 // fetch_unroll_info() is called at the beginning of the deoptimization
149 // handler. Note this fact before we start generating temporary frames
150 // that can confuse an asynchronous stack walker. This counter is
151 // decremented at the end of unpack_frames().
152 if (TraceDeoptimization) {
153 tty->print_cr("Deoptimizing thread " INTPTR_FORMAT, p2i(thread));
154 }
155 thread->inc_in_deopt_handler();
156
157 return fetch_unroll_info_helper(thread, exec_mode);
158 JRT_END
159
160 #if COMPILER2_OR_JVMCI
161 static bool eliminate_allocations(JavaThread* thread, int exec_mode, CompiledMethod* compiled_method,
162 frame& deoptee, RegisterMap& map, GrowableArray<compiledVFrame*>* chunk) {
163 bool realloc_failures = false;
164 assert (chunk->at(0)->scope() != NULL,"expect only compiled java frames");
165
166 GrowableArray<ScopeValue*>* objects = chunk->at(0)->scope()->objects();
167
168 // The flag return_oop() indicates call sites which return oop
169 // in compiled code. Such sites include java method calls,
170 // runtime calls (for example, used to allocate new objects/arrays
171 // on slow code path) and any other calls generated in compiled code.
172 // It is not guaranteed that we can get such information here only
173 // by analyzing bytecode in deoptimized frames. This is why this flag
174 // is set during method compilation (see Compile::Process_OopMap_Node()).
175 // If the previous frame was popped or if we are dispatching an exception,
176 // we don't have an oop result.
177 bool save_oop_result = chunk->at(0)->scope()->return_oop() && !thread->popframe_forcing_deopt_reexecution() && (exec_mode == Deoptimization::Unpack_deopt);
178 Handle return_value;
179 if (save_oop_result) {
180 // Reallocation may trigger GC. If deoptimization happened on return from
181 // call which returns oop we need to save it since it is not in oopmap.
182 oop result = deoptee.saved_oop_result(&map);
183 assert(oopDesc::is_oop_or_null(result), "must be oop");
184 return_value = Handle(thread, result);
185 assert(Universe::heap()->is_in_or_null(result), "must be heap pointer");
186 if (TraceDeoptimization) {
187 ttyLocker ttyl;
188 tty->print_cr("SAVED OOP RESULT " INTPTR_FORMAT " in thread " INTPTR_FORMAT, p2i(result), p2i(thread));
189 }
190 }
191 if (objects != NULL) {
192 JRT_BLOCK
193 realloc_failures = Deoptimization::realloc_objects(thread, &deoptee, &map, objects, THREAD);
194 JRT_END
195 bool skip_internal = (compiled_method != NULL) && !compiled_method->is_compiled_by_jvmci();
196 Deoptimization::reassign_fields(&deoptee, &map, objects, realloc_failures, skip_internal);
197 #ifndef PRODUCT
198 if (TraceDeoptimization) {
199 ttyLocker ttyl;
200 tty->print_cr("REALLOC OBJECTS in thread " INTPTR_FORMAT, p2i(thread));
201 Deoptimization::print_objects(objects, realloc_failures);
202 }
203 #endif
204 }
205 if (save_oop_result) {
206 // Restore result.
207 deoptee.set_saved_oop_result(&map, return_value());
208 }
209 return realloc_failures;
210 }
211
212 static void eliminate_locks(JavaThread* thread, GrowableArray<compiledVFrame*>* chunk, bool realloc_failures) {
213 #ifndef PRODUCT
214 bool first = true;
215 #endif
216 for (int i = 0; i < chunk->length(); i++) {
217 compiledVFrame* cvf = chunk->at(i);
218 assert (cvf->scope() != NULL,"expect only compiled java frames");
219 GrowableArray<MonitorInfo*>* monitors = cvf->monitors();
220 if (monitors->is_nonempty()) {
221 Deoptimization::relock_objects(monitors, thread, realloc_failures);
222 #ifndef PRODUCT
223 if (PrintDeoptimizationDetails) {
224 ttyLocker ttyl;
225 for (int j = 0; j < monitors->length(); j++) {
226 MonitorInfo* mi = monitors->at(j);
227 if (mi->eliminated()) {
228 if (first) {
229 first = false;
230 tty->print_cr("RELOCK OBJECTS in thread " INTPTR_FORMAT, p2i(thread));
231 }
232 if (mi->owner_is_scalar_replaced()) {
233 Klass* k = java_lang_Class::as_Klass(mi->owner_klass());
234 tty->print_cr(" failed reallocation for klass %s", k->external_name());
235 } else {
236 tty->print_cr(" object <" INTPTR_FORMAT "> locked", p2i(mi->owner()));
237 }
238 }
239 }
240 }
241 #endif // !PRODUCT
242 }
243 }
244 }
245 #endif // COMPILER2_OR_JVMCI
246
247 // This is factored, since it is both called from a JRT_LEAF (deoptimization) and a JRT_ENTRY (uncommon_trap)
248 Deoptimization::UnrollBlock* Deoptimization::fetch_unroll_info_helper(JavaThread* thread, int exec_mode) {
249
250 // Note: there is a safepoint safety issue here. No matter whether we enter
251 // via vanilla deopt or uncommon trap we MUST NOT stop at a safepoint once
252 // the vframeArray is created.
253 //
254
255 // Allocate our special deoptimization ResourceMark
256 DeoptResourceMark* dmark = new DeoptResourceMark(thread);
257 assert(thread->deopt_mark() == NULL, "Pending deopt!");
258 thread->set_deopt_mark(dmark);
259
260 frame stub_frame = thread->last_frame(); // Makes stack walkable as side effect
261 RegisterMap map(thread, true);
262 RegisterMap dummy_map(thread, false);
263 // Now get the deoptee with a valid map
264 frame deoptee = stub_frame.sender(&map);
265 // Set the deoptee nmethod
266 assert(thread->deopt_compiled_method() == NULL, "Pending deopt!");
267 CompiledMethod* cm = deoptee.cb()->as_compiled_method_or_null();
268 thread->set_deopt_compiled_method(cm);
269
270 if (VerifyStack) {
271 thread->validate_frame_layout();
272 }
273
274 // Create a growable array of VFrames where each VFrame represents an inlined
275 // Java frame. This storage is allocated with the usual system arena.
276 assert(deoptee.is_compiled_frame(), "Wrong frame type");
277 GrowableArray<compiledVFrame*>* chunk = new GrowableArray<compiledVFrame*>(10);
278 vframe* vf = vframe::new_vframe(&deoptee, &map, thread);
279 while (!vf->is_top()) {
280 assert(vf->is_compiled_frame(), "Wrong frame type");
281 chunk->push(compiledVFrame::cast(vf));
282 vf = vf->sender();
283 }
284 assert(vf->is_compiled_frame(), "Wrong frame type");
285 chunk->push(compiledVFrame::cast(vf));
286
287 bool realloc_failures = false;
288
289 #if COMPILER2_OR_JVMCI
290 #if INCLUDE_JVMCI
291 bool jvmci_enabled = true;
292 #else
293 bool jvmci_enabled = false;
294 #endif
295
296 // Reallocate the non-escaping objects and restore their fields. Then
297 // relock objects if synchronization on them was eliminated.
298 if (jvmci_enabled || ((DoEscapeAnalysis || EliminateNestedLocks) && EliminateAllocations)) {
299 realloc_failures = eliminate_allocations(thread, exec_mode, cm, deoptee, map, chunk);
300 }
301
302 // Revoke biases, done with in java state.
303 // No safepoints allowed after this
304 revoke_from_deopt_handler(thread, deoptee, &map);
305
306 // Ensure that no safepoint is taken after pointers have been stored
307 // in fields of rematerialized objects. If a safepoint occurs from here on
308 // out the java state residing in the vframeArray will be missed.
309 // Locks may be rebaised in a safepoint.
310 NoSafepointVerifier no_safepoint;
311
312 if (jvmci_enabled || ((DoEscapeAnalysis || EliminateNestedLocks) && EliminateLocks)) {
313 eliminate_locks(thread, chunk, realloc_failures);
314 }
315 #endif // COMPILER2_OR_JVMCI
316
317 ScopeDesc* trap_scope = chunk->at(0)->scope();
318 Handle exceptionObject;
319 if (trap_scope->rethrow_exception()) {
320 if (PrintDeoptimizationDetails) {
321 tty->print_cr("Exception to be rethrown in the interpreter for method %s::%s at bci %d", trap_scope->method()->method_holder()->name()->as_C_string(), trap_scope->method()->name()->as_C_string(), trap_scope->bci());
322 }
323 GrowableArray<ScopeValue*>* expressions = trap_scope->expressions();
324 guarantee(expressions != NULL && expressions->length() > 0, "must have exception to throw");
325 ScopeValue* topOfStack = expressions->top();
326 exceptionObject = StackValue::create_stack_value(&deoptee, &map, topOfStack)->get_obj();
327 guarantee(exceptionObject() != NULL, "exception oop can not be null");
328 }
329
330 vframeArray* array = create_vframeArray(thread, deoptee, &map, chunk, realloc_failures);
331 #if COMPILER2_OR_JVMCI
332 if (realloc_failures) {
333 pop_frames_failed_reallocs(thread, array);
334 }
335 #endif
336
337 assert(thread->vframe_array_head() == NULL, "Pending deopt!");
338 thread->set_vframe_array_head(array);
339
340 // Now that the vframeArray has been created if we have any deferred local writes
341 // added by jvmti then we can free up that structure as the data is now in the
342 // vframeArray
343
344 if (thread->deferred_locals() != NULL) {
345 GrowableArray<jvmtiDeferredLocalVariableSet*>* list = thread->deferred_locals();
346 int i = 0;
347 do {
348 // Because of inlining we could have multiple vframes for a single frame
349 // and several of the vframes could have deferred writes. Find them all.
779 vframeArrayElement* el = cur_array->element(k);
780 tty->print_cr(" %s (bci %d)", el->method()->name_and_sig_as_C_string(), el->bci());
781 }
782 cur_array->print_on_2(tty);
783 } // release tty lock before calling guarantee
784 guarantee(false, "wrong number of expression stack elements during deopt");
785 }
786 VerifyOopClosure verify;
787 iframe->oops_interpreted_do(&verify, &rm, false);
788 callee_size_of_parameters = mh->size_of_parameters();
789 callee_max_locals = mh->max_locals();
790 is_top_frame = false;
791 }
792 }
793 #endif /* !PRODUCT */
794
795
796 return bt;
797 JRT_END
798
799 class DeoptimizeMarkedTC : public ThreadClosure {
800 public:
801 virtual void do_thread(Thread* thread) {
802 assert(thread->is_Java_thread(), "must be");
803 JavaThread* jt = (JavaThread*)thread;
804 jt->deoptimize_marked_methods();
805 }
806 };
807
808 void Deoptimization::deoptimize_all_marked() {
809 ResourceMark rm;
810 DeoptimizationMarker dm;
811
812 if (SafepointSynchronize::is_at_safepoint()) {
813 DeoptimizeMarkedTC deopt;
814 // Make the dependent methods not entrant
815 CodeCache::make_marked_nmethods_not_entrant();
816 Threads::java_threads_do(&deopt);
817 } else {
818 // Make the dependent methods not entrant
819 {
820 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
821 CodeCache::make_marked_nmethods_not_entrant();
822 }
823 DeoptimizeMarkedTC deopt;
824 Handshake::execute(&deopt);
825 }
826 }
827
828 Deoptimization::DeoptAction Deoptimization::_unloaded_action
829 = Deoptimization::Action_reinterpret;
830
831
832
833 #if INCLUDE_JVMCI || INCLUDE_AOT
834 template<typename CacheType>
835 class BoxCacheBase : public CHeapObj<mtCompiler> {
836 protected:
837 static InstanceKlass* find_cache_klass(Symbol* klass_name, TRAPS) {
838 ResourceMark rm;
839 char* klass_name_str = klass_name->as_C_string();
840 Klass* k = SystemDictionary::find(klass_name, Handle(), Handle(), THREAD);
841 guarantee(k != NULL, "%s must be loaded", klass_name_str);
842 InstanceKlass* ik = InstanceKlass::cast(k);
843 guarantee(ik->is_initialized(), "%s must be initialized", klass_name_str);
844 CacheType::compute_offsets(ik);
845 return ik;
1420 array->element(i)->free_monitors(thread);
1421 #ifdef ASSERT
1422 array->element(i)->set_removed_monitors();
1423 #endif
1424 }
1425 }
1426 }
1427 #endif
1428
1429 static void collect_monitors(compiledVFrame* cvf, GrowableArray<Handle>* objects_to_revoke) {
1430 GrowableArray<MonitorInfo*>* monitors = cvf->monitors();
1431 Thread* thread = Thread::current();
1432 for (int i = 0; i < monitors->length(); i++) {
1433 MonitorInfo* mon_info = monitors->at(i);
1434 if (!mon_info->eliminated() && mon_info->owner() != NULL) {
1435 objects_to_revoke->append(Handle(thread, mon_info->owner()));
1436 }
1437 }
1438 }
1439
1440 static void get_monitors_from_stack(GrowableArray<Handle>* objects_to_revoke, JavaThread* thread, frame fr, RegisterMap* map) {
1441 // Unfortunately we don't have a RegisterMap available in most of
1442 // the places we want to call this routine so we need to walk the
1443 // stack again to update the register map.
1444 if (map == NULL || !map->update_map()) {
1445 StackFrameStream sfs(thread, true);
1446 bool found = false;
1447 while (!found && !sfs.is_done()) {
1448 frame* cur = sfs.current();
1449 sfs.next();
1450 found = cur->id() == fr.id();
1451 }
1452 assert(found, "frame to be deoptimized not found on target thread's stack");
1453 map = sfs.register_map();
1454 }
1455
1456 vframe* vf = vframe::new_vframe(&fr, map, thread);
1457 compiledVFrame* cvf = compiledVFrame::cast(vf);
1458 // Revoke monitors' biases in all scopes
1459 while (!cvf->is_top()) {
1460 collect_monitors(cvf, objects_to_revoke);
1461 cvf = compiledVFrame::cast(cvf->sender());
1462 }
1463 collect_monitors(cvf, objects_to_revoke);
1464 }
1465
1466 void Deoptimization::revoke_from_deopt_handler(JavaThread* thread, frame fr, RegisterMap* map) {
1467 if (!UseBiasedLocking) {
1468 return;
1469 }
1470 GrowableArray<Handle>* objects_to_revoke = new GrowableArray<Handle>();
1471 get_monitors_from_stack(objects_to_revoke, thread, fr, map);
1472
1473 int len = objects_to_revoke->length();
1474 for (int i = 0; i < len; i++) {
1475 oop obj = (objects_to_revoke->at(i))();
1476 BiasedLocking::revoke_own_locks(objects_to_revoke->at(i), thread);
1477 assert(!obj->mark().has_bias_pattern(), "biases should be revoked by now");
1478 }
1479 }
1480
1481
1482 void Deoptimization::deoptimize_single_frame(JavaThread* thread, frame fr, Deoptimization::DeoptReason reason) {
1483 assert(fr.can_be_deoptimized(), "checking frame type");
1484
1485 gather_statistics(reason, Action_none, Bytecodes::_illegal);
1486
1487 if (LogCompilation && xtty != NULL) {
1488 CompiledMethod* cm = fr.cb()->as_compiled_method_or_null();
1489 assert(cm != NULL, "only compiled methods can deopt");
1490
1491 ttyLocker ttyl;
1492 xtty->begin_head("deoptimized thread='" UINTX_FORMAT "' reason='%s' pc='" INTPTR_FORMAT "'",(uintx)thread->osthread()->thread_id(), trap_reason_name(reason), p2i(fr.pc()));
1493 cm->log_identity(xtty);
1494 xtty->end_head();
1495 for (ScopeDesc* sd = cm->scope_desc_at(fr.pc()); ; sd = sd->sender()) {
1496 xtty->begin_elem("jvms bci='%d'", sd->bci());
1497 xtty->method(sd->method());
1498 xtty->end_elem();
1499 if (sd->is_top()) break;
1500 }
1501 xtty->tail("deoptimized");
1502 }
1503
1504 // Patch the compiled method so that when execution returns to it we will
1505 // deopt the execution state and return to the interpreter.
1506 fr.deoptimize(thread);
1507 }
1508
1509 void Deoptimization::deoptimize(JavaThread* thread, frame fr, RegisterMap *map, DeoptReason reason) {
1510 // Deoptimize only if the frame comes from compile code.
1511 // Do not deoptimize the frame which is already patched
1512 // during the execution of the loops below.
1513 if (!fr.is_compiled_frame() || fr.is_deoptimized_frame()) {
1514 return;
1515 }
1516 ResourceMark rm;
1517 DeoptimizationMarker dm;
1518 deoptimize_single_frame(thread, fr, reason);
1519 }
1520
1521 #if INCLUDE_JVMCI
1522 address Deoptimization::deoptimize_for_missing_exception_handler(CompiledMethod* cm) {
1523 // there is no exception handler for this pc => deoptimize
1524 cm->make_not_entrant();
1525
1526 // Use Deoptimization::deoptimize for all of its side-effects:
1527 // revoking biases of monitors, gathering traps statistics, logging...
1528 // it also patches the return pc but we do not care about that
1529 // since we return a continuation to the deopt_blob below.
1530 JavaThread* thread = JavaThread::current();
1531 RegisterMap reg_map(thread, UseBiasedLocking);
1532 frame runtime_frame = thread->last_frame();
1533 frame caller_frame = runtime_frame.sender(®_map);
1534 assert(caller_frame.cb()->as_compiled_method_or_null() == cm, "expect top frame compiled method");
1535 Deoptimization::deoptimize(thread, caller_frame, ®_map, Deoptimization::Reason_not_compiled_exception_handler);
1536
1537 MethodData* trap_mdo = get_method_data(thread, cm->method(), true);
1538 if (trap_mdo != NULL) {
1658
1659 // We need to update the map if we have biased locking.
1660 #if INCLUDE_JVMCI
1661 // JVMCI might need to get an exception from the stack, which in turn requires the register map to be valid
1662 RegisterMap reg_map(thread, true);
1663 #else
1664 RegisterMap reg_map(thread, UseBiasedLocking);
1665 #endif
1666 frame stub_frame = thread->last_frame();
1667 frame fr = stub_frame.sender(®_map);
1668 // Make sure the calling nmethod is not getting deoptimized and removed
1669 // before we are done with it.
1670 nmethodLocker nl(fr.pc());
1671
1672 // Log a message
1673 Events::log_deopt_message(thread, "Uncommon trap: trap_request=" PTR32_FORMAT " fr.pc=" INTPTR_FORMAT " relative=" INTPTR_FORMAT,
1674 trap_request, p2i(fr.pc()), fr.pc() - fr.cb()->code_begin());
1675
1676 {
1677 ResourceMark rm;
1678
1679 DeoptReason reason = trap_request_reason(trap_request);
1680 DeoptAction action = trap_request_action(trap_request);
1681 #if INCLUDE_JVMCI
1682 int debug_id = trap_request_debug_id(trap_request);
1683 #endif
1684 jint unloaded_class_index = trap_request_index(trap_request); // CP idx or -1
1685
1686 vframe* vf = vframe::new_vframe(&fr, ®_map, thread);
1687 compiledVFrame* cvf = compiledVFrame::cast(vf);
1688
1689 CompiledMethod* nm = cvf->code();
1690
1691 ScopeDesc* trap_scope = cvf->scope();
1692
1693 if (TraceDeoptimization) {
1694 ttyLocker ttyl;
1695 tty->print_cr(" bci=%d pc=" INTPTR_FORMAT ", relative_pc=" INTPTR_FORMAT ", method=%s" JVMCI_ONLY(", debug_id=%d"), trap_scope->bci(), p2i(fr.pc()), fr.pc() - nm->code_begin(), trap_scope->method()->name_and_sig_as_C_string()
1696 #if INCLUDE_JVMCI
1697 , debug_id
|