< prev index next >

src/hotspot/share/runtime/sharedRuntime.cpp

Print this page




1297     // which may happen when multiply alive nmethod (tiered compilation)
1298     // will be supported.
1299     if (!callee_method->is_old() &&
1300         (callee == NULL || (callee->is_in_use() && callee_method->code() == callee))) {
1301 #ifdef ASSERT
1302       // We must not try to patch to jump to an already unloaded method.
1303       if (dest_entry_point != 0) {
1304         CodeBlob* cb = CodeCache::find_blob(dest_entry_point);
1305         assert((cb != NULL) && cb->is_compiled() && (((CompiledMethod*)cb) == callee),
1306                "should not call unloaded nmethod");
1307       }
1308 #endif
1309       if (is_virtual) {
1310         CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1311         if (inline_cache->is_clean()) {
1312           if (!inline_cache->set_to_monomorphic(virtual_call_info)) {
1313             return false;
1314           }
1315         }
1316       } else {





1317         CompiledStaticCall* ssc = caller_nm->compiledStaticCall_before(caller_frame.pc());
1318         if (ssc->is_clean()) ssc->set(static_call_info);
1319       }
1320     }
1321   } // unlock CompiledICLocker
1322   return true;
1323 }
1324 
1325 // Resolves a call.  The compilers generate code for calls that go here
1326 // and are patched with the real destination of the call.
1327 methodHandle SharedRuntime::resolve_sub_helper(JavaThread *thread,
1328                                                bool is_virtual,
1329                                                bool is_optimized, TRAPS) {
1330 
1331   ResourceMark rm(thread);
1332   RegisterMap cbl_map(thread, false);
1333   frame caller_frame = thread->last_frame().sender(&cbl_map);
1334 
1335   CodeBlob* caller_cb = caller_frame.cb();
1336   guarantee(caller_cb != NULL && caller_cb->is_compiled(), "must be called from compiled method");


1359   assert(caller_nm->is_alive() && !caller_nm->is_unloading(), "It should be alive");
1360 
1361 #ifndef PRODUCT
1362   // tracing/debugging/statistics
1363   int *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1364                 (is_virtual) ? (&_resolve_virtual_ctr) :
1365                                (&_resolve_static_ctr);
1366   Atomic::inc(addr);
1367 
1368   if (TraceCallFixup) {
1369     ResourceMark rm(thread);
1370     tty->print("resolving %s%s (%s) call to",
1371       (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1372       Bytecodes::name(invoke_code));
1373     callee_method->print_short_name(tty);
1374     tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1375                   p2i(caller_frame.pc()), p2i(callee_method->code()));
1376   }
1377 #endif
1378 


1379   // Do not patch call site for static call to another class
1380   // when the class is not fully initialized.
1381   if (invoke_code == Bytecodes::_invokestatic) {
1382     if (!callee_method->method_holder()->is_initialized() &&
1383         callee_method->method_holder() != caller_nm->method()->method_holder()) {
1384       assert(callee_method->method_holder()->is_linked(), "must be");
1385       return callee_method;
1386     } else {
1387       assert(callee_method->method_holder()->is_initialized() ||
1388              callee_method->method_holder()->is_reentrant_initialization(thread),
1389              "invalid class initialization state for invoke_static");
1390     }
1391   }
1392 
1393   // JSR 292 key invariant:
1394   // If the resolved method is a MethodHandle invoke target, the call
1395   // site must be a MethodHandle call site, because the lambda form might tail-call
1396   // leaving the stack in a state unknown to either caller or callee
1397   // TODO detune for now but we might need it again
1398 //  assert(!callee_method->is_compiled_lambda_form() ||
1399 //         caller_nm->is_method_handle_return(caller_frame.pc()), "must be MH call site");
1400 
1401   // Compute entry points. This might require generation of C2I converter
1402   // frames, so we cannot be holding any locks here. Furthermore, the
1403   // computation of the entry points is independent of patching the call.  We
1404   // always return the entry-point, but we only patch the stub if the call has
1405   // not been deoptimized.  Return values: For a virtual call this is an
1406   // (cached_oop, destination address) pair. For a static call/optimized
1407   // virtual this is just a destination address.
1408 
1409   // Patching IC caches may fail if we run out if transition stubs.
1410   // We refill the ic stubs then and try again.
1411   for (;;) {




1297     // which may happen when multiply alive nmethod (tiered compilation)
1298     // will be supported.
1299     if (!callee_method->is_old() &&
1300         (callee == NULL || (callee->is_in_use() && callee_method->code() == callee))) {
1301 #ifdef ASSERT
1302       // We must not try to patch to jump to an already unloaded method.
1303       if (dest_entry_point != 0) {
1304         CodeBlob* cb = CodeCache::find_blob(dest_entry_point);
1305         assert((cb != NULL) && cb->is_compiled() && (((CompiledMethod*)cb) == callee),
1306                "should not call unloaded nmethod");
1307       }
1308 #endif
1309       if (is_virtual) {
1310         CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1311         if (inline_cache->is_clean()) {
1312           if (!inline_cache->set_to_monomorphic(virtual_call_info)) {
1313             return false;
1314           }
1315         }
1316       } else {
1317         if (UseFastClassInitChecks && invoke_code == Bytecodes::_invokestatic &&
1318             callee_method->needs_clinit_barrier() &&
1319             callee != NULL && (callee->is_compiled_by_jvmci() || callee->is_aot())) {
1320           return true; // skip patching for JVMCI or AOT code
1321         }
1322         CompiledStaticCall* ssc = caller_nm->compiledStaticCall_before(caller_frame.pc());
1323         if (ssc->is_clean()) ssc->set(static_call_info);
1324       }
1325     }
1326   } // unlock CompiledICLocker
1327   return true;
1328 }
1329 
1330 // Resolves a call.  The compilers generate code for calls that go here
1331 // and are patched with the real destination of the call.
1332 methodHandle SharedRuntime::resolve_sub_helper(JavaThread *thread,
1333                                                bool is_virtual,
1334                                                bool is_optimized, TRAPS) {
1335 
1336   ResourceMark rm(thread);
1337   RegisterMap cbl_map(thread, false);
1338   frame caller_frame = thread->last_frame().sender(&cbl_map);
1339 
1340   CodeBlob* caller_cb = caller_frame.cb();
1341   guarantee(caller_cb != NULL && caller_cb->is_compiled(), "must be called from compiled method");


1364   assert(caller_nm->is_alive() && !caller_nm->is_unloading(), "It should be alive");
1365 
1366 #ifndef PRODUCT
1367   // tracing/debugging/statistics
1368   int *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1369                 (is_virtual) ? (&_resolve_virtual_ctr) :
1370                                (&_resolve_static_ctr);
1371   Atomic::inc(addr);
1372 
1373   if (TraceCallFixup) {
1374     ResourceMark rm(thread);
1375     tty->print("resolving %s%s (%s) call to",
1376       (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1377       Bytecodes::name(invoke_code));
1378     callee_method->print_short_name(tty);
1379     tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1380                   p2i(caller_frame.pc()), p2i(callee_method->code()));
1381   }
1382 #endif
1383 
1384   if (invoke_code == Bytecodes::_invokestatic) {
1385     if (!UseFastClassInitChecks && callee_method->needs_clinit_barrier()) {
1386       // Do not patch call site for static call to another class
1387       // when the class is not fully initialized.



1388       assert(callee_method->method_holder()->is_linked(), "must be");
1389       return callee_method;
1390     }
1391     assert(callee_method->method_holder()->is_initialized() ||
1392            callee_method->method_holder()->is_reentrant_initialization(thread),
1393            "invalid class initialization state for invoke_static");
1394   }

1395 
1396   // JSR 292 key invariant:
1397   // If the resolved method is a MethodHandle invoke target, the call
1398   // site must be a MethodHandle call site, because the lambda form might tail-call
1399   // leaving the stack in a state unknown to either caller or callee
1400   // TODO detune for now but we might need it again
1401 //  assert(!callee_method->is_compiled_lambda_form() ||
1402 //         caller_nm->is_method_handle_return(caller_frame.pc()), "must be MH call site");
1403 
1404   // Compute entry points. This might require generation of C2I converter
1405   // frames, so we cannot be holding any locks here. Furthermore, the
1406   // computation of the entry points is independent of patching the call.  We
1407   // always return the entry-point, but we only patch the stub if the call has
1408   // not been deoptimized.  Return values: For a virtual call this is an
1409   // (cached_oop, destination address) pair. For a static call/optimized
1410   // virtual this is just a destination address.
1411 
1412   // Patching IC caches may fail if we run out if transition stubs.
1413   // We refill the ic stubs then and try again.
1414   for (;;) {


< prev index next >