322 do {
323 // Because of inlining we could have multiple vframes for a single frame
324 // and several of the vframes could have deferred writes. Find them all.
325 if (list->at(i)->id() == array->original().id()) {
326 jvmtiDeferredLocalVariableSet* dlv = list->at(i);
327 list->remove_at(i);
328 // individual jvmtiDeferredLocalVariableSet are CHeapObj's
329 delete dlv;
330 } else {
331 i++;
332 }
333 } while ( i < list->length() );
334 if (list->length() == 0) {
335 thread->set_deferred_locals(NULL);
336 // free the list and elements back to C heap.
337 delete list;
338 }
339
340 }
341
342 #ifndef SHARK
343 // Compute the caller frame based on the sender sp of stub_frame and stored frame sizes info.
344 CodeBlob* cb = stub_frame.cb();
345 // Verify we have the right vframeArray
346 assert(cb->frame_size() >= 0, "Unexpected frame size");
347 intptr_t* unpack_sp = stub_frame.sp() + cb->frame_size();
348
349 // If the deopt call site is a MethodHandle invoke call site we have
350 // to adjust the unpack_sp.
351 nmethod* deoptee_nm = deoptee.cb()->as_nmethod_or_null();
352 if (deoptee_nm != NULL && deoptee_nm->is_method_handle_return(deoptee.pc()))
353 unpack_sp = deoptee.unextended_sp();
354
355 #ifdef ASSERT
356 assert(cb->is_deoptimization_stub() ||
357 cb->is_uncommon_trap_stub() ||
358 strcmp("Stub<DeoptimizationStub.deoptimizationHandler>", cb->name()) == 0 ||
359 strcmp("Stub<UncommonTrapStub.uncommonTrapHandler>", cb->name()) == 0,
360 "unexpected code blob: %s", cb->name());
361 #endif
362 #else
363 intptr_t* unpack_sp = stub_frame.sender(&dummy_map).unextended_sp();
364 #endif // !SHARK
365
366 // This is a guarantee instead of an assert because if vframe doesn't match
367 // we will unpack the wrong deoptimized frame and wind up in strange places
368 // where it will be very difficult to figure out what went wrong. Better
369 // to die an early death here than some very obscure death later when the
370 // trail is cold.
371 // Note: on ia64 this guarantee can be fooled by frames with no memory stack
372 // in that it will fail to detect a problem when there is one. This needs
373 // more work in tiger timeframe.
374 guarantee(array->unextended_sp() == unpack_sp, "vframe_array_head must contain the vframeArray to unpack");
375
376 int number_of_frames = array->frames();
377
378 // Compute the vframes' sizes. Note that frame_sizes[] entries are ordered from outermost to innermost
379 // virtual activation, which is the reverse of the elements in the vframes array.
380 intptr_t* frame_sizes = NEW_C_HEAP_ARRAY(intptr_t, number_of_frames, mtCompiler);
381 // +1 because we always have an interpreter return address for the final slot.
382 address* frame_pcs = NEW_C_HEAP_ARRAY(address, number_of_frames + 1, mtCompiler);
383 int popframe_extra_args = 0;
384 // Create an interpreter return address for the stub to use as its return
471 // may not even be enough space).
472
473 // QQQ I'd rather see this pushed down into last_frame_adjust
474 // and have it take the sender (aka caller).
475
476 if (deopt_sender.is_compiled_frame() || caller_was_method_handle) {
477 caller_adjustment = last_frame_adjust(0, callee_locals);
478 } else if (callee_locals > callee_parameters) {
479 // The caller frame may need extending to accommodate
480 // non-parameter locals of the first unpacked interpreted frame.
481 // Compute that adjustment.
482 caller_adjustment = last_frame_adjust(callee_parameters, callee_locals);
483 }
484
485 // If the sender is deoptimized the we must retrieve the address of the handler
486 // since the frame will "magically" show the original pc before the deopt
487 // and we'd undo the deopt.
488
489 frame_pcs[0] = deopt_sender.raw_pc();
490
491 #ifndef SHARK
492 assert(CodeCache::find_blob_unsafe(frame_pcs[0]) != NULL, "bad pc");
493 #endif // SHARK
494
495 #ifdef INCLUDE_JVMCI
496 if (exceptionObject() != NULL) {
497 thread->set_exception_oop(exceptionObject());
498 exec_mode = Unpack_exception;
499 }
500 #endif
501
502 if (thread->frames_to_pop_failed_realloc() > 0 && exec_mode != Unpack_uncommon_trap) {
503 assert(thread->has_pending_exception(), "should have thrown OOME");
504 thread->set_exception_oop(thread->pending_exception());
505 thread->clear_pending_exception();
506 exec_mode = Unpack_exception;
507 }
508
509 #if INCLUDE_JVMCI
510 if (thread->frames_to_pop_failed_realloc() > 0) {
511 thread->set_pending_monitorenter(false);
512 }
513 #endif
1432 JRT_END
1433
1434 MethodData*
1435 Deoptimization::get_method_data(JavaThread* thread, const methodHandle& m,
1436 bool create_if_missing) {
1437 Thread* THREAD = thread;
1438 MethodData* mdo = m()->method_data();
1439 if (mdo == NULL && create_if_missing && !HAS_PENDING_EXCEPTION) {
1440 // Build an MDO. Ignore errors like OutOfMemory;
1441 // that simply means we won't have an MDO to update.
1442 Method::build_interpreter_method_data(m, THREAD);
1443 if (HAS_PENDING_EXCEPTION) {
1444 assert((PENDING_EXCEPTION->is_a(SystemDictionary::OutOfMemoryError_klass())), "we expect only an OOM error here");
1445 CLEAR_PENDING_EXCEPTION;
1446 }
1447 mdo = m()->method_data();
1448 }
1449 return mdo;
1450 }
1451
1452 #if defined(COMPILER2) || defined(SHARK) || INCLUDE_JVMCI
1453 void Deoptimization::load_class_by_index(const constantPoolHandle& constant_pool, int index, TRAPS) {
1454 // in case of an unresolved klass entry, load the class.
1455 if (constant_pool->tag_at(index).is_unresolved_klass()) {
1456 Klass* tk = constant_pool->klass_at_ignore_error(index, CHECK);
1457 return;
1458 }
1459
1460 if (!constant_pool->tag_at(index).is_symbol()) return;
1461
1462 Handle class_loader (THREAD, constant_pool->pool_holder()->class_loader());
1463 Symbol* symbol = constant_pool->symbol_at(index);
1464
1465 // class name?
1466 if (symbol->byte_at(0) != '(') {
1467 Handle protection_domain (THREAD, constant_pool->pool_holder()->protection_domain());
1468 SystemDictionary::resolve_or_null(symbol, class_loader, protection_domain, CHECK);
1469 return;
1470 }
1471
1472 // then it must be a signature!
2349 if (bc_case == BC_CASE_LIMIT && (int)bc == 0)
2350 bc = Bytecodes::_illegal;
2351 sprintf(name, "%s/%s/%s",
2352 trap_reason_name(reason),
2353 trap_action_name(action),
2354 Bytecodes::is_defined(bc)? Bytecodes::name(bc): "other");
2355 juint r = counter >> LSB_BITS;
2356 tty->print_cr(" %40s: " UINT32_FORMAT " (%.1f%%)", name, r, (r * 100.0) / total);
2357 account -= r;
2358 }
2359 }
2360 }
2361 }
2362 if (account != 0) {
2363 PRINT_STAT_LINE("unaccounted", account);
2364 }
2365 #undef PRINT_STAT_LINE
2366 if (xtty != NULL) xtty->tail("statistics");
2367 }
2368 }
2369 #else // COMPILER2 || SHARK || INCLUDE_JVMCI
2370
2371
2372 // Stubs for C1 only system.
2373 bool Deoptimization::trap_state_is_recompiled(int trap_state) {
2374 return false;
2375 }
2376
2377 const char* Deoptimization::trap_reason_name(int reason) {
2378 return "unknown";
2379 }
2380
2381 void Deoptimization::print_statistics() {
2382 // no output
2383 }
2384
2385 void
2386 Deoptimization::update_method_data_from_interpreter(MethodData* trap_mdo, int trap_bci, int reason) {
2387 // no udpate
2388 }
2389
2390 int Deoptimization::trap_state_has_reason(int trap_state, int reason) {
2391 return 0;
2392 }
2393
2394 void Deoptimization::gather_statistics(DeoptReason reason, DeoptAction action,
2395 Bytecodes::Code bc) {
2396 // no update
2397 }
2398
2399 const char* Deoptimization::format_trap_state(char* buf, size_t buflen,
2400 int trap_state) {
2401 jio_snprintf(buf, buflen, "#%d", trap_state);
2402 return buf;
2403 }
2404
2405 #endif // COMPILER2 || SHARK || INCLUDE_JVMCI
|
322 do {
323 // Because of inlining we could have multiple vframes for a single frame
324 // and several of the vframes could have deferred writes. Find them all.
325 if (list->at(i)->id() == array->original().id()) {
326 jvmtiDeferredLocalVariableSet* dlv = list->at(i);
327 list->remove_at(i);
328 // individual jvmtiDeferredLocalVariableSet are CHeapObj's
329 delete dlv;
330 } else {
331 i++;
332 }
333 } while ( i < list->length() );
334 if (list->length() == 0) {
335 thread->set_deferred_locals(NULL);
336 // free the list and elements back to C heap.
337 delete list;
338 }
339
340 }
341
342 // Compute the caller frame based on the sender sp of stub_frame and stored frame sizes info.
343 CodeBlob* cb = stub_frame.cb();
344 // Verify we have the right vframeArray
345 assert(cb->frame_size() >= 0, "Unexpected frame size");
346 intptr_t* unpack_sp = stub_frame.sp() + cb->frame_size();
347
348 // If the deopt call site is a MethodHandle invoke call site we have
349 // to adjust the unpack_sp.
350 nmethod* deoptee_nm = deoptee.cb()->as_nmethod_or_null();
351 if (deoptee_nm != NULL && deoptee_nm->is_method_handle_return(deoptee.pc()))
352 unpack_sp = deoptee.unextended_sp();
353
354 #ifdef ASSERT
355 assert(cb->is_deoptimization_stub() ||
356 cb->is_uncommon_trap_stub() ||
357 strcmp("Stub<DeoptimizationStub.deoptimizationHandler>", cb->name()) == 0 ||
358 strcmp("Stub<UncommonTrapStub.uncommonTrapHandler>", cb->name()) == 0,
359 "unexpected code blob: %s", cb->name());
360 #endif
361
362 // This is a guarantee instead of an assert because if vframe doesn't match
363 // we will unpack the wrong deoptimized frame and wind up in strange places
364 // where it will be very difficult to figure out what went wrong. Better
365 // to die an early death here than some very obscure death later when the
366 // trail is cold.
367 // Note: on ia64 this guarantee can be fooled by frames with no memory stack
368 // in that it will fail to detect a problem when there is one. This needs
369 // more work in tiger timeframe.
370 guarantee(array->unextended_sp() == unpack_sp, "vframe_array_head must contain the vframeArray to unpack");
371
372 int number_of_frames = array->frames();
373
374 // Compute the vframes' sizes. Note that frame_sizes[] entries are ordered from outermost to innermost
375 // virtual activation, which is the reverse of the elements in the vframes array.
376 intptr_t* frame_sizes = NEW_C_HEAP_ARRAY(intptr_t, number_of_frames, mtCompiler);
377 // +1 because we always have an interpreter return address for the final slot.
378 address* frame_pcs = NEW_C_HEAP_ARRAY(address, number_of_frames + 1, mtCompiler);
379 int popframe_extra_args = 0;
380 // Create an interpreter return address for the stub to use as its return
467 // may not even be enough space).
468
469 // QQQ I'd rather see this pushed down into last_frame_adjust
470 // and have it take the sender (aka caller).
471
472 if (deopt_sender.is_compiled_frame() || caller_was_method_handle) {
473 caller_adjustment = last_frame_adjust(0, callee_locals);
474 } else if (callee_locals > callee_parameters) {
475 // The caller frame may need extending to accommodate
476 // non-parameter locals of the first unpacked interpreted frame.
477 // Compute that adjustment.
478 caller_adjustment = last_frame_adjust(callee_parameters, callee_locals);
479 }
480
481 // If the sender is deoptimized the we must retrieve the address of the handler
482 // since the frame will "magically" show the original pc before the deopt
483 // and we'd undo the deopt.
484
485 frame_pcs[0] = deopt_sender.raw_pc();
486
487 assert(CodeCache::find_blob_unsafe(frame_pcs[0]) != NULL, "bad pc");
488
489 #ifdef INCLUDE_JVMCI
490 if (exceptionObject() != NULL) {
491 thread->set_exception_oop(exceptionObject());
492 exec_mode = Unpack_exception;
493 }
494 #endif
495
496 if (thread->frames_to_pop_failed_realloc() > 0 && exec_mode != Unpack_uncommon_trap) {
497 assert(thread->has_pending_exception(), "should have thrown OOME");
498 thread->set_exception_oop(thread->pending_exception());
499 thread->clear_pending_exception();
500 exec_mode = Unpack_exception;
501 }
502
503 #if INCLUDE_JVMCI
504 if (thread->frames_to_pop_failed_realloc() > 0) {
505 thread->set_pending_monitorenter(false);
506 }
507 #endif
1426 JRT_END
1427
1428 MethodData*
1429 Deoptimization::get_method_data(JavaThread* thread, const methodHandle& m,
1430 bool create_if_missing) {
1431 Thread* THREAD = thread;
1432 MethodData* mdo = m()->method_data();
1433 if (mdo == NULL && create_if_missing && !HAS_PENDING_EXCEPTION) {
1434 // Build an MDO. Ignore errors like OutOfMemory;
1435 // that simply means we won't have an MDO to update.
1436 Method::build_interpreter_method_data(m, THREAD);
1437 if (HAS_PENDING_EXCEPTION) {
1438 assert((PENDING_EXCEPTION->is_a(SystemDictionary::OutOfMemoryError_klass())), "we expect only an OOM error here");
1439 CLEAR_PENDING_EXCEPTION;
1440 }
1441 mdo = m()->method_data();
1442 }
1443 return mdo;
1444 }
1445
1446 #if defined(COMPILER2) || INCLUDE_JVMCI
1447 void Deoptimization::load_class_by_index(const constantPoolHandle& constant_pool, int index, TRAPS) {
1448 // in case of an unresolved klass entry, load the class.
1449 if (constant_pool->tag_at(index).is_unresolved_klass()) {
1450 Klass* tk = constant_pool->klass_at_ignore_error(index, CHECK);
1451 return;
1452 }
1453
1454 if (!constant_pool->tag_at(index).is_symbol()) return;
1455
1456 Handle class_loader (THREAD, constant_pool->pool_holder()->class_loader());
1457 Symbol* symbol = constant_pool->symbol_at(index);
1458
1459 // class name?
1460 if (symbol->byte_at(0) != '(') {
1461 Handle protection_domain (THREAD, constant_pool->pool_holder()->protection_domain());
1462 SystemDictionary::resolve_or_null(symbol, class_loader, protection_domain, CHECK);
1463 return;
1464 }
1465
1466 // then it must be a signature!
2343 if (bc_case == BC_CASE_LIMIT && (int)bc == 0)
2344 bc = Bytecodes::_illegal;
2345 sprintf(name, "%s/%s/%s",
2346 trap_reason_name(reason),
2347 trap_action_name(action),
2348 Bytecodes::is_defined(bc)? Bytecodes::name(bc): "other");
2349 juint r = counter >> LSB_BITS;
2350 tty->print_cr(" %40s: " UINT32_FORMAT " (%.1f%%)", name, r, (r * 100.0) / total);
2351 account -= r;
2352 }
2353 }
2354 }
2355 }
2356 if (account != 0) {
2357 PRINT_STAT_LINE("unaccounted", account);
2358 }
2359 #undef PRINT_STAT_LINE
2360 if (xtty != NULL) xtty->tail("statistics");
2361 }
2362 }
2363 #else // COMPILER2 || INCLUDE_JVMCI
2364
2365
2366 // Stubs for C1 only system.
2367 bool Deoptimization::trap_state_is_recompiled(int trap_state) {
2368 return false;
2369 }
2370
2371 const char* Deoptimization::trap_reason_name(int reason) {
2372 return "unknown";
2373 }
2374
2375 void Deoptimization::print_statistics() {
2376 // no output
2377 }
2378
2379 void
2380 Deoptimization::update_method_data_from_interpreter(MethodData* trap_mdo, int trap_bci, int reason) {
2381 // no udpate
2382 }
2383
2384 int Deoptimization::trap_state_has_reason(int trap_state, int reason) {
2385 return 0;
2386 }
2387
2388 void Deoptimization::gather_statistics(DeoptReason reason, DeoptAction action,
2389 Bytecodes::Code bc) {
2390 // no update
2391 }
2392
2393 const char* Deoptimization::format_trap_state(char* buf, size_t buflen,
2394 int trap_state) {
2395 jio_snprintf(buf, buflen, "#%d", trap_state);
2396 return buf;
2397 }
2398
2399 #endif // COMPILER2 || INCLUDE_JVMCI
|