< prev index next >

src/hotspot/share/runtime/sharedRuntime.cpp

Print this page


1230     RegisterMap reg_map(thread, false);
1231     frame fr = thread->last_frame();
1232     assert(fr.is_runtime_frame(), "must be a runtimeStub");
1233     fr = fr.sender(&reg_map);
1234     assert(fr.is_entry_frame(), "must be");
1235     // fr is now pointing to the entry frame.
1236     callee_method = methodHandle(THREAD, fr.entry_frame_call_wrapper()->callee_method());
1237   } else {
1238     Bytecodes::Code bc;
1239     CallInfo callinfo;
1240     find_callee_info_helper(thread, vfst, bc, callinfo, CHECK_(methodHandle()));
1241     callee_method = callinfo.selected_method();
1242   }
1243   assert(callee_method()->is_method(), "must be");
1244   return callee_method;
1245 }
1246 
1247 // Resolves a call.
1248 methodHandle SharedRuntime::resolve_helper(JavaThread *thread,
1249                                            bool is_virtual,
1250                                            bool is_optimized, TRAPS) {

1251   methodHandle callee_method;
1252   callee_method = resolve_sub_helper(thread, is_virtual, is_optimized, THREAD);
1253   if (JvmtiExport::can_hotswap_or_post_breakpoint()) {
1254     int retry_count = 0;
1255     while (!HAS_PENDING_EXCEPTION && callee_method->is_old() &&
1256            callee_method->method_holder() != SystemDictionary::Object_klass()) {
1257       // If has a pending exception then there is no need to re-try to
1258       // resolve this method.
1259       // If the method has been redefined, we need to try again.
1260       // Hack: we have no way to update the vtables of arrays, so don't
1261       // require that java.lang.Object has been updated.
1262 
1263       // It is very unlikely that method is redefined more than 100 times
1264       // in the middle of resolve. If it is looping here more than 100 times
1265       // means then there could be a bug here.
1266       guarantee((retry_count++ < 100),
1267                 "Could not resolve to latest version of redefined method");
1268       // method is redefined in the middle of resolve so re-try.
1269       callee_method = resolve_sub_helper(thread, is_virtual, is_optimized, THREAD);
1270     }
1271   }
1272   return callee_method;
1273 }
1274 
1275 // This fails if resolution required refilling of IC stubs
1276 bool SharedRuntime::resolve_sub_helper_internal(methodHandle callee_method, const frame& caller_frame,
1277                                                 CompiledMethod* caller_nm, bool is_virtual, bool is_optimized,
1278                                                 Handle receiver, CallInfo& call_info, Bytecodes::Code invoke_code, TRAPS) {
1279   StaticCallInfo static_call_info;
1280   CompiledICInfo virtual_call_info;
1281 
1282   // Make sure the callee nmethod does not get deoptimized and removed before
1283   // we are done patching the code.
1284   CompiledMethod* callee = callee_method->code();
1285 
1286   if (callee != NULL) {
1287     assert(callee->is_compiled(), "must be nmethod for patching");
1288   }
1289 
1290   if (callee != NULL && !callee->is_in_use()) {
1291     // Patch call site to C2I adapter if callee nmethod is deoptimized or unloaded.
1292     callee = NULL;
1293   }
1294   nmethodLocker nl_callee(callee);
1295 #ifdef ASSERT
1296   address dest_entry_point = callee == NULL ? 0 : callee->entry_point(); // used below
1297 #endif
1298 
1299   bool is_nmethod = caller_nm->is_nmethod();

1300 
1301   if (is_virtual) {
1302     Klass* receiver_klass = NULL;
1303     if (ValueTypePassFieldsAsArgs && callee_method->method_holder()->is_value()) {
1304       // If the receiver is a value type that is passed as fields, no oop is available
1305       receiver_klass = callee_method->method_holder();
1306     } else {
1307       assert(receiver.not_null() || invoke_code == Bytecodes::_invokehandle, "sanity check");
1308       receiver_klass = invoke_code == Bytecodes::_invokehandle ? NULL : receiver->klass();
1309     }
1310     bool static_bound = call_info.resolved_method()->can_be_statically_bound();
1311     CompiledIC::compute_monomorphic_entry(callee_method, receiver_klass,
1312                      is_optimized, static_bound, is_nmethod, virtual_call_info,
1313                      CHECK_false);
1314   } else {
1315     // static call
1316     CompiledStaticCall::compute_entry(callee_method, is_nmethod, static_call_info);
1317   }
1318 
1319   // grab lock, check for deoptimization and potentially patch caller
1320   {
1321     CompiledICLocker ml(caller_nm);
1322 
1323     // Lock blocks for safepoint during which both nmethods can change state.
1324 
1325     // Now that we are ready to patch if the Method* was redefined then
1326     // don't update call site and let the caller retry.
1327     // Don't update call site if callee nmethod was unloaded or deoptimized.
1328     // Don't update call site if callee nmethod was replaced by an other nmethod
1329     // which may happen when multiply alive nmethod (tiered compilation)
1330     // will be supported.
1331     if (!callee_method->is_old() &&
1332         (callee == NULL || (callee->is_in_use() && callee_method->code() == callee))) {
1333 #ifdef ASSERT
1334       // We must not try to patch to jump to an already unloaded method.
1335       if (dest_entry_point != 0) {
1336         CodeBlob* cb = CodeCache::find_blob(dest_entry_point);


1341       if (is_virtual) {
1342         CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1343         if (inline_cache->is_clean()) {
1344           if (!inline_cache->set_to_monomorphic(virtual_call_info)) {
1345             return false;
1346           }
1347         }
1348       } else {
1349         CompiledStaticCall* ssc = caller_nm->compiledStaticCall_before(caller_frame.pc());
1350         if (ssc->is_clean()) ssc->set(static_call_info);
1351       }
1352     }
1353   } // unlock CompiledICLocker
1354   return true;
1355 }
1356 
1357 // Resolves a call.  The compilers generate code for calls that go here
1358 // and are patched with the real destination of the call.
1359 methodHandle SharedRuntime::resolve_sub_helper(JavaThread *thread,
1360                                                bool is_virtual,
1361                                                bool is_optimized, TRAPS) {

1362 
1363   ResourceMark rm(thread);
1364   RegisterMap cbl_map(thread, false);
1365   frame caller_frame = thread->last_frame().sender(&cbl_map);
1366 
1367   CodeBlob* caller_cb = caller_frame.cb();
1368   guarantee(caller_cb != NULL && caller_cb->is_compiled(), "must be called from compiled method");
1369   CompiledMethod* caller_nm = caller_cb->as_compiled_method_or_null();

1370 
1371   // make sure caller is not getting deoptimized
1372   // and removed before we are done with it.
1373   // CLEANUP - with lazy deopt shouldn't need this lock
1374   nmethodLocker caller_lock(caller_nm);
1375 
1376   if (!is_virtual && !is_optimized) {
1377     SimpleScopeDesc ssd(caller_nm, caller_frame.pc());
1378     Bytecode bc(ssd.method(), ssd.method()->bcp_from(ssd.bci()));
1379     // Substitutability test implementation piggy backs on static call resolution
1380     if (bc.code() == Bytecodes::_if_acmpeq || bc.code() == Bytecodes::_if_acmpne) {
1381       SystemDictionary::ValueBootstrapMethods_klass()->initialize(CHECK_NULL);
1382       return SystemDictionary::ValueBootstrapMethods_klass()->find_method(vmSymbols::isSubstitutable_name(), vmSymbols::object_object_boolean_signature());
1383     }
1384   }
1385 
1386   // determine call info & receiver
1387   // note: a) receiver is NULL for static calls
1388   //       b) an exception is thrown if receiver is NULL for non-static calls
1389   CallInfo call_info;


1460     } else {
1461       InlineCacheBuffer::refill_ic_stubs();
1462     }
1463   }
1464 
1465 }
1466 
1467 
1468 // Inline caches exist only in compiled code
1469 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* thread))
1470 #ifdef ASSERT
1471   RegisterMap reg_map(thread, false);
1472   frame stub_frame = thread->last_frame();
1473   assert(stub_frame.is_runtime_frame(), "sanity check");
1474   frame caller_frame = stub_frame.sender(&reg_map);
1475   assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame(), "unexpected frame");
1476 #endif /* ASSERT */
1477 
1478   methodHandle callee_method;
1479   bool is_optimized = false;

1480   JRT_BLOCK
1481     callee_method = SharedRuntime::handle_ic_miss_helper(thread, is_optimized, CHECK_NULL);
1482     // Return Method* through TLS
1483     thread->set_vm_result_2(callee_method());
1484   JRT_BLOCK_END
1485   // return compiled code entry point after potential safepoints
1486   assert(callee_method->verified_code_entry() != NULL, "Jump to zero!");
1487   assert(callee_method->verified_value_ro_code_entry() != NULL, "Jump to zero!");
1488   return is_optimized ? callee_method->verified_code_entry() : callee_method->verified_value_ro_code_entry();
1489 JRT_END
1490 
1491 
1492 // Handle call site that has been made non-entrant
1493 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* thread))
1494   // 6243940 We might end up in here if the callee is deoptimized
1495   // as we race to call it.  We don't want to take a safepoint if
1496   // the caller was interpreted because the caller frame will look
1497   // interpreted to the stack walkers and arguments are now
1498   // "compiled" so it is much better to make this transition
1499   // invisible to the stack walking code. The i2c path will
1500   // place the callee method in the callee_target. It is stashed
1501   // there because if we try and find the callee by normal means a
1502   // safepoint is possible and have trouble gc'ing the compiled args.
1503   RegisterMap reg_map(thread, false);
1504   frame stub_frame = thread->last_frame();
1505   assert(stub_frame.is_runtime_frame(), "sanity check");
1506   frame caller_frame = stub_frame.sender(&reg_map);
1507 
1508   if (caller_frame.is_interpreted_frame() ||
1509       caller_frame.is_entry_frame()) {
1510     Method* callee = thread->callee_target();
1511     guarantee(callee != NULL && callee->is_method(), "bad handshake");
1512     thread->set_vm_result_2(callee);
1513     thread->set_callee_target(NULL);
1514     return callee->get_c2i_entry();
1515   }
1516 
1517   // Must be compiled to compiled path which is safe to stackwalk
1518   methodHandle callee_method;
1519   bool is_optimized = false;

1520   JRT_BLOCK
1521     // Force resolving of caller (if we called from compiled frame)
1522     callee_method = SharedRuntime::reresolve_call_site(thread, is_optimized, CHECK_NULL);
1523     thread->set_vm_result_2(callee_method());
1524   JRT_BLOCK_END
1525   // return compiled code entry point after potential safepoints
1526   assert(callee_method->verified_code_entry() != NULL, "Jump to zero!");
1527   assert(callee_method->verified_value_ro_code_entry() != NULL, "Jump to zero!");
1528   return is_optimized ? callee_method->verified_code_entry() : callee_method->verified_value_ro_code_entry();
1529 JRT_END
1530 
1531 // Handle abstract method call
1532 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* thread))
1533   // Verbose error message for AbstractMethodError.
1534   // Get the called method from the invoke bytecode.
1535   vframeStream vfst(thread, true);
1536   assert(!vfst.at_end(), "Java frame must exist");
1537   methodHandle caller(vfst.method());
1538   Bytecode_invoke invoke(caller, vfst.bci());
1539   DEBUG_ONLY( invoke.verify(); )
1540 
1541   // Find the compiled caller frame.
1542   RegisterMap reg_map(thread);
1543   frame stubFrame = thread->last_frame();
1544   assert(stubFrame.is_runtime_frame(), "must be");
1545   frame callerFrame = stubFrame.sender(&reg_map);
1546   assert(callerFrame.is_compiled_frame(), "must be");
1547 
1548   // Install exception and return forward entry.
1549   address res = StubRoutines::throw_AbstractMethodError_entry();
1550   JRT_BLOCK
1551     methodHandle callee = invoke.static_target(thread);
1552     if (!callee.is_null()) {
1553       oop recv = callerFrame.retrieve_receiver(&reg_map);
1554       Klass *recv_klass = (recv != NULL) ? recv->klass() : NULL;
1555       LinkResolver::throw_abstract_method_error(callee, recv_klass, thread);
1556       res = StubRoutines::forward_exception_entry();
1557     }
1558   JRT_BLOCK_END
1559   return res;
1560 JRT_END
1561 
1562 
1563 // resolve a static call and patch code
1564 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread *thread ))
1565   methodHandle callee_method;

1566   JRT_BLOCK
1567     callee_method = SharedRuntime::resolve_helper(thread, false, false, CHECK_NULL);
1568     thread->set_vm_result_2(callee_method());
1569   JRT_BLOCK_END
1570   // return compiled code entry point after potential safepoints
1571   assert(callee_method->verified_code_entry() != NULL, "Jump to zero!");
1572   return callee_method->verified_code_entry();


1573 JRT_END
1574 
1575 
1576 // resolve virtual call and update inline cache to monomorphic
1577 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread *thread ))
1578   methodHandle callee_method;

1579   JRT_BLOCK
1580     callee_method = SharedRuntime::resolve_helper(thread, true, false, CHECK_NULL);
1581     thread->set_vm_result_2(callee_method());
1582   JRT_BLOCK_END
1583   // return compiled code entry point after potential safepoints
1584   assert(callee_method->verified_value_ro_code_entry() != NULL, "Jump to zero!");
1585   return callee_method->verified_value_ro_code_entry();


1586 JRT_END
1587 
1588 
1589 // Resolve a virtual call that can be statically bound (e.g., always
1590 // monomorphic, so it has no inline cache).  Patch code to resolved target.
1591 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread *thread))
1592   methodHandle callee_method;

1593   JRT_BLOCK
1594     callee_method = SharedRuntime::resolve_helper(thread, true, true, CHECK_NULL);
1595     thread->set_vm_result_2(callee_method());
1596   JRT_BLOCK_END
1597   // return compiled code entry point after potential safepoints
1598   assert(callee_method->verified_code_entry() != NULL, "Jump to zero!");
1599   return callee_method->verified_code_entry();


1600 JRT_END
1601 
1602 // The handle_ic_miss_helper_internal function returns false if it failed due
1603 // to either running out of vtable stubs or ic stubs due to IC transitions
1604 // to transitional states. The needs_ic_stub_refill value will be set if
1605 // the failure was due to running out of IC stubs, in which case handle_ic_miss_helper
1606 // refills the IC stubs and tries again.
1607 bool SharedRuntime::handle_ic_miss_helper_internal(Handle receiver, CompiledMethod* caller_nm,
1608                                                    const frame& caller_frame, methodHandle callee_method,
1609                                                    Bytecodes::Code bc, CallInfo& call_info,
1610                                                    bool& needs_ic_stub_refill, bool& is_optimized, TRAPS) {
1611   CompiledICLocker ml(caller_nm);
1612   CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1613   bool should_be_mono = false;
1614   if (inline_cache->is_optimized()) {
1615     if (TraceCallFixup) {
1616       ResourceMark rm(THREAD);
1617       tty->print("OPTIMIZED IC miss (%s) call to", Bytecodes::name(bc));
1618       callee_method->print_short_name(tty);
1619       tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1620     }
1621     is_optimized = true;
1622     should_be_mono = true;
1623   } else if (inline_cache->is_icholder_call()) {
1624     CompiledICHolder* ic_oop = inline_cache->cached_icholder();
1625     if (ic_oop != NULL) {
1626       if (!ic_oop->is_loader_alive()) {
1627         // Deferred IC cleaning due to concurrent class unloading
1628         if (!inline_cache->set_to_clean()) {
1629           needs_ic_stub_refill = true;
1630           return false;


1639           ResourceMark rm(THREAD);
1640           tty->print("FALSE IC miss (%s) converting to compiled call to", Bytecodes::name(bc));
1641           callee_method->print_short_name(tty);
1642           tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1643         }
1644         should_be_mono = true;
1645       }
1646     }
1647   }
1648 
1649   if (should_be_mono) {
1650     // We have a path that was monomorphic but was going interpreted
1651     // and now we have (or had) a compiled entry. We correct the IC
1652     // by using a new icBuffer.
1653     CompiledICInfo info;
1654     Klass* receiver_klass = receiver()->klass();
1655     inline_cache->compute_monomorphic_entry(callee_method,
1656                                             receiver_klass,
1657                                             inline_cache->is_optimized(),
1658                                             false, caller_nm->is_nmethod(),

1659                                             info, CHECK_false);
1660     if (!inline_cache->set_to_monomorphic(info)) {
1661       needs_ic_stub_refill = true;
1662       return false;
1663     }
1664   } else if (!inline_cache->is_megamorphic() && !inline_cache->is_clean()) {
1665     // Potential change to megamorphic
1666 
1667     bool successful = inline_cache->set_to_megamorphic(&call_info, bc, needs_ic_stub_refill, CHECK_false);
1668     if (needs_ic_stub_refill) {
1669       return false;
1670     }
1671     if (!successful) {
1672       if (!inline_cache->set_to_clean()) {
1673         needs_ic_stub_refill = true;
1674         return false;
1675       }
1676     }
1677   } else {
1678     // Either clean or megamorphic
1679   }
1680   return true;
1681 }
1682 
1683 methodHandle SharedRuntime::handle_ic_miss_helper(JavaThread *thread, bool& is_optimized, TRAPS) {
1684   ResourceMark rm(thread);
1685   CallInfo call_info;
1686   Bytecodes::Code bc;
1687 
1688   // receiver is NULL for static calls. An exception is thrown for NULL
1689   // receivers for non-static calls
1690   Handle receiver = find_callee_info(thread, bc, call_info,
1691                                      CHECK_(methodHandle()));
1692   // Compiler1 can produce virtual call sites that can actually be statically bound
1693   // If we fell thru to below we would think that the site was going megamorphic
1694   // when in fact the site can never miss. Worse because we'd think it was megamorphic
1695   // we'd try and do a vtable dispatch however methods that can be statically bound
1696   // don't have vtable entries (vtable_index < 0) and we'd blow up. So we force a
1697   // reresolution of the  call site (as if we did a handle_wrong_method and not an
1698   // plain ic_miss) and the site will be converted to an optimized virtual call site
1699   // never to miss again. I don't believe C2 will produce code like this but if it
1700   // did this would still be the correct thing to do for it too, hence no ifdef.
1701   //
1702   if (call_info.resolved_method()->can_be_statically_bound()) {
1703     methodHandle callee_method = SharedRuntime::reresolve_call_site(thread, is_optimized, CHECK_(methodHandle()));
1704     if (TraceCallFixup) {
1705       RegisterMap reg_map(thread, false);
1706       frame caller_frame = thread->last_frame().sender(&reg_map);
1707       ResourceMark rm(thread);
1708       tty->print("converting IC miss to reresolve (%s) call to", Bytecodes::name(bc));
1709       callee_method->print_short_name(tty);
1710       tty->print_cr(" from pc: " INTPTR_FORMAT, p2i(caller_frame.pc()));
1711       tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1712     }
1713     return callee_method;
1714   }
1715 
1716   methodHandle callee_method = call_info.selected_method();
1717 
1718 #ifndef PRODUCT
1719   Atomic::inc(&_ic_miss_ctr);
1720 
1721   // Statistics & Tracing
1722   if (TraceCallFixup) {
1723     ResourceMark rm(thread);


1733     // produce statistics under the lock
1734     trace_ic_miss(f.pc());
1735   }
1736 #endif
1737 
1738   // install an event collector so that when a vtable stub is created the
1739   // profiler can be notified via a DYNAMIC_CODE_GENERATED event. The
1740   // event can't be posted when the stub is created as locks are held
1741   // - instead the event will be deferred until the event collector goes
1742   // out of scope.
1743   JvmtiDynamicCodeEventCollector event_collector;
1744 
1745   // Update inline cache to megamorphic. Skip update if we are called from interpreted.
1746   // Transitioning IC caches may require transition stubs. If we run out
1747   // of transition stubs, we have to drop locks and perform a safepoint
1748   // that refills them.
1749   RegisterMap reg_map(thread, false);
1750   frame caller_frame = thread->last_frame().sender(&reg_map);
1751   CodeBlob* cb = caller_frame.cb();
1752   CompiledMethod* caller_nm = cb->as_compiled_method();

1753 
1754   for (;;) {
1755     ICRefillVerifier ic_refill_verifier;
1756     bool needs_ic_stub_refill = false;
1757     bool successful = handle_ic_miss_helper_internal(receiver, caller_nm, caller_frame, callee_method,
1758                                                      bc, call_info, needs_ic_stub_refill, is_optimized, CHECK_(methodHandle()));
1759     if (successful || !needs_ic_stub_refill) {
1760       return callee_method;
1761     } else {
1762       InlineCacheBuffer::refill_ic_stubs();
1763     }
1764   }
1765 }
1766 
1767 static bool clear_ic_at_addr(CompiledMethod* caller_nm, address call_addr, bool is_static_call) {
1768   CompiledICLocker ml(caller_nm);
1769   if (is_static_call) {
1770     CompiledStaticCall* ssc = caller_nm->compiledStaticCall_at(call_addr);
1771     if (!ssc->is_clean()) {
1772       return ssc->set_to_clean();
1773     }
1774   } else {
1775     // compiled, dispatched call (which used to call an interpreted method)
1776     CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1777     if (!inline_cache->is_clean()) {
1778       return inline_cache->set_to_clean();
1779     }
1780   }
1781   return true;
1782 }
1783 
1784 //
1785 // Resets a call-site in compiled code so it will get resolved again.
1786 // This routines handles both virtual call sites, optimized virtual call
1787 // sites, and static call sites. Typically used to change a call sites
1788 // destination from compiled to interpreted.
1789 //
1790 methodHandle SharedRuntime::reresolve_call_site(JavaThread *thread, bool& is_optimized, TRAPS) {
1791   ResourceMark rm(thread);
1792   RegisterMap reg_map(thread, false);
1793   frame stub_frame = thread->last_frame();
1794   assert(stub_frame.is_runtime_frame(), "must be a runtimeStub");
1795   frame caller = stub_frame.sender(&reg_map);
1796 
1797   // Do nothing if the frame isn't a live compiled frame.
1798   // nmethod could be deoptimized by the time we get here
1799   // so no update to the caller is needed.
1800 
1801   if (caller.is_compiled_frame() && !caller.is_deoptimized_frame()) {
1802 
1803     address pc = caller.pc();
1804 
1805     // Check for static or virtual call
1806     bool is_static_call = false;
1807     CompiledMethod* caller_nm = CodeCache::find_compiled(pc);

1808 
1809     // Default call_addr is the location of the "basic" call.
1810     // Determine the address of the call we a reresolving. With
1811     // Inline Caches we will always find a recognizable call.
1812     // With Inline Caches disabled we may or may not find a
1813     // recognizable call. We will always find a call for static
1814     // calls and for optimized virtual calls. For vanilla virtual
1815     // calls it depends on the state of the UseInlineCaches switch.
1816     //
1817     // With Inline Caches disabled we can get here for a virtual call
1818     // for two reasons:
1819     //   1 - calling an abstract method. The vtable for abstract methods
1820     //       will run us thru handle_wrong_method and we will eventually
1821     //       end up in the interpreter to throw the ame.
1822     //   2 - a racing deoptimization. We could be doing a vanilla vtable
1823     //       call and between the time we fetch the entry address and
1824     //       we jump to it the target gets deoptimized. Similar to 1
1825     //       we will wind up in the interprter (thru a c2i with c2).
1826     //
1827     address call_addr = NULL;




1230     RegisterMap reg_map(thread, false);
1231     frame fr = thread->last_frame();
1232     assert(fr.is_runtime_frame(), "must be a runtimeStub");
1233     fr = fr.sender(&reg_map);
1234     assert(fr.is_entry_frame(), "must be");
1235     // fr is now pointing to the entry frame.
1236     callee_method = methodHandle(THREAD, fr.entry_frame_call_wrapper()->callee_method());
1237   } else {
1238     Bytecodes::Code bc;
1239     CallInfo callinfo;
1240     find_callee_info_helper(thread, vfst, bc, callinfo, CHECK_(methodHandle()));
1241     callee_method = callinfo.selected_method();
1242   }
1243   assert(callee_method()->is_method(), "must be");
1244   return callee_method;
1245 }
1246 
1247 // Resolves a call.
1248 methodHandle SharedRuntime::resolve_helper(JavaThread *thread,
1249                                            bool is_virtual,
1250                                            bool is_optimized,
1251                                            bool* caller_is_c1, TRAPS) {
1252   methodHandle callee_method;
1253   callee_method = resolve_sub_helper(thread, is_virtual, is_optimized, caller_is_c1, THREAD);
1254   if (JvmtiExport::can_hotswap_or_post_breakpoint()) {
1255     int retry_count = 0;
1256     while (!HAS_PENDING_EXCEPTION && callee_method->is_old() &&
1257            callee_method->method_holder() != SystemDictionary::Object_klass()) {
1258       // If has a pending exception then there is no need to re-try to
1259       // resolve this method.
1260       // If the method has been redefined, we need to try again.
1261       // Hack: we have no way to update the vtables of arrays, so don't
1262       // require that java.lang.Object has been updated.
1263 
1264       // It is very unlikely that method is redefined more than 100 times
1265       // in the middle of resolve. If it is looping here more than 100 times
1266       // means then there could be a bug here.
1267       guarantee((retry_count++ < 100),
1268                 "Could not resolve to latest version of redefined method");
1269       // method is redefined in the middle of resolve so re-try.
1270       callee_method = resolve_sub_helper(thread, is_virtual, is_optimized, caller_is_c1, THREAD);
1271     }
1272   }
1273   return callee_method;
1274 }
1275 
1276 // This fails if resolution required refilling of IC stubs
1277 bool SharedRuntime::resolve_sub_helper_internal(methodHandle callee_method, const frame& caller_frame,
1278                                                 CompiledMethod* caller_nm, bool is_virtual, bool is_optimized,
1279                                                 Handle receiver, CallInfo& call_info, Bytecodes::Code invoke_code, TRAPS) {
1280   StaticCallInfo static_call_info;
1281   CompiledICInfo virtual_call_info;
1282 
1283   // Make sure the callee nmethod does not get deoptimized and removed before
1284   // we are done patching the code.
1285   CompiledMethod* callee = callee_method->code();
1286 
1287   if (callee != NULL) {
1288     assert(callee->is_compiled(), "must be nmethod for patching");
1289   }
1290 
1291   if (callee != NULL && !callee->is_in_use()) {
1292     // Patch call site to C2I adapter if callee nmethod is deoptimized or unloaded.
1293     callee = NULL;
1294   }
1295   nmethodLocker nl_callee(callee);
1296 #ifdef ASSERT
1297   address dest_entry_point = callee == NULL ? 0 : callee->entry_point(); // used below
1298 #endif
1299 
1300   bool is_nmethod = caller_nm->is_nmethod();
1301   bool caller_is_c1 = caller_nm->is_c1();
1302 
1303   if (is_virtual) {
1304     Klass* receiver_klass = NULL;
1305     if (ValueTypePassFieldsAsArgs && !caller_is_c1 && callee_method->method_holder()->is_value()) {
1306       // If the receiver is a value type that is passed as fields, no oop is available
1307       receiver_klass = callee_method->method_holder();
1308     } else {
1309       assert(receiver.not_null() || invoke_code == Bytecodes::_invokehandle, "sanity check");
1310       receiver_klass = invoke_code == Bytecodes::_invokehandle ? NULL : receiver->klass();
1311     }
1312     bool static_bound = call_info.resolved_method()->can_be_statically_bound();
1313     CompiledIC::compute_monomorphic_entry(callee_method, receiver_klass,
1314                      is_optimized, static_bound, is_nmethod, caller_is_c1, virtual_call_info,
1315                      CHECK_false);
1316   } else {
1317     // static call
1318     CompiledStaticCall::compute_entry(callee_method, caller_nm, static_call_info);
1319   }
1320 
1321   // grab lock, check for deoptimization and potentially patch caller
1322   {
1323     CompiledICLocker ml(caller_nm);
1324 
1325     // Lock blocks for safepoint during which both nmethods can change state.
1326 
1327     // Now that we are ready to patch if the Method* was redefined then
1328     // don't update call site and let the caller retry.
1329     // Don't update call site if callee nmethod was unloaded or deoptimized.
1330     // Don't update call site if callee nmethod was replaced by an other nmethod
1331     // which may happen when multiply alive nmethod (tiered compilation)
1332     // will be supported.
1333     if (!callee_method->is_old() &&
1334         (callee == NULL || (callee->is_in_use() && callee_method->code() == callee))) {
1335 #ifdef ASSERT
1336       // We must not try to patch to jump to an already unloaded method.
1337       if (dest_entry_point != 0) {
1338         CodeBlob* cb = CodeCache::find_blob(dest_entry_point);


1343       if (is_virtual) {
1344         CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1345         if (inline_cache->is_clean()) {
1346           if (!inline_cache->set_to_monomorphic(virtual_call_info)) {
1347             return false;
1348           }
1349         }
1350       } else {
1351         CompiledStaticCall* ssc = caller_nm->compiledStaticCall_before(caller_frame.pc());
1352         if (ssc->is_clean()) ssc->set(static_call_info);
1353       }
1354     }
1355   } // unlock CompiledICLocker
1356   return true;
1357 }
1358 
1359 // Resolves a call.  The compilers generate code for calls that go here
1360 // and are patched with the real destination of the call.
1361 methodHandle SharedRuntime::resolve_sub_helper(JavaThread *thread,
1362                                                bool is_virtual,
1363                                                bool is_optimized,
1364                                                bool* caller_is_c1, TRAPS) {
1365 
1366   ResourceMark rm(thread);
1367   RegisterMap cbl_map(thread, false);
1368   frame caller_frame = thread->last_frame().sender(&cbl_map);
1369 
1370   CodeBlob* caller_cb = caller_frame.cb();
1371   guarantee(caller_cb != NULL && caller_cb->is_compiled(), "must be called from compiled method");
1372   CompiledMethod* caller_nm = caller_cb->as_compiled_method_or_null();
1373   *caller_is_c1 = caller_nm->is_c1();
1374 
1375   // make sure caller is not getting deoptimized
1376   // and removed before we are done with it.
1377   // CLEANUP - with lazy deopt shouldn't need this lock
1378   nmethodLocker caller_lock(caller_nm);
1379 
1380   if (!is_virtual && !is_optimized) {
1381     SimpleScopeDesc ssd(caller_nm, caller_frame.pc());
1382     Bytecode bc(ssd.method(), ssd.method()->bcp_from(ssd.bci()));
1383     // Substitutability test implementation piggy backs on static call resolution
1384     if (bc.code() == Bytecodes::_if_acmpeq || bc.code() == Bytecodes::_if_acmpne) {
1385       SystemDictionary::ValueBootstrapMethods_klass()->initialize(CHECK_NULL);
1386       return SystemDictionary::ValueBootstrapMethods_klass()->find_method(vmSymbols::isSubstitutable_name(), vmSymbols::object_object_boolean_signature());
1387     }
1388   }
1389 
1390   // determine call info & receiver
1391   // note: a) receiver is NULL for static calls
1392   //       b) an exception is thrown if receiver is NULL for non-static calls
1393   CallInfo call_info;


1464     } else {
1465       InlineCacheBuffer::refill_ic_stubs();
1466     }
1467   }
1468 
1469 }
1470 
1471 
1472 // Inline caches exist only in compiled code
1473 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* thread))
1474 #ifdef ASSERT
1475   RegisterMap reg_map(thread, false);
1476   frame stub_frame = thread->last_frame();
1477   assert(stub_frame.is_runtime_frame(), "sanity check");
1478   frame caller_frame = stub_frame.sender(&reg_map);
1479   assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame(), "unexpected frame");
1480 #endif /* ASSERT */
1481 
1482   methodHandle callee_method;
1483   bool is_optimized = false;
1484   bool caller_is_c1 = false;
1485   JRT_BLOCK
1486     callee_method = SharedRuntime::handle_ic_miss_helper(thread, is_optimized, caller_is_c1, CHECK_NULL);
1487     // Return Method* through TLS
1488     thread->set_vm_result_2(callee_method());
1489   JRT_BLOCK_END
1490   // return compiled code entry point after potential safepoints
1491   return entry_for_handle_wrong_method(callee_method, is_optimized, caller_is_c1);


1492 JRT_END
1493 
1494 
1495 // Handle call site that has been made non-entrant
1496 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* thread))
1497   // 6243940 We might end up in here if the callee is deoptimized
1498   // as we race to call it.  We don't want to take a safepoint if
1499   // the caller was interpreted because the caller frame will look
1500   // interpreted to the stack walkers and arguments are now
1501   // "compiled" so it is much better to make this transition
1502   // invisible to the stack walking code. The i2c path will
1503   // place the callee method in the callee_target. It is stashed
1504   // there because if we try and find the callee by normal means a
1505   // safepoint is possible and have trouble gc'ing the compiled args.
1506   RegisterMap reg_map(thread, false);
1507   frame stub_frame = thread->last_frame();
1508   assert(stub_frame.is_runtime_frame(), "sanity check");
1509   frame caller_frame = stub_frame.sender(&reg_map);
1510 
1511   if (caller_frame.is_interpreted_frame() ||
1512       caller_frame.is_entry_frame()) {
1513     Method* callee = thread->callee_target();
1514     guarantee(callee != NULL && callee->is_method(), "bad handshake");
1515     thread->set_vm_result_2(callee);
1516     thread->set_callee_target(NULL);
1517     return callee->get_c2i_entry();
1518   }
1519 
1520   // Must be compiled to compiled path which is safe to stackwalk
1521   methodHandle callee_method;
1522   bool is_optimized = false;
1523   bool caller_is_c1 = false;
1524   JRT_BLOCK
1525     // Force resolving of caller (if we called from compiled frame)
1526     callee_method = SharedRuntime::reresolve_call_site(thread, is_optimized, caller_is_c1, CHECK_NULL);
1527     thread->set_vm_result_2(callee_method());
1528   JRT_BLOCK_END
1529   // return compiled code entry point after potential safepoints
1530   return entry_for_handle_wrong_method(callee_method, is_optimized, caller_is_c1);


1531 JRT_END
1532 
1533 // Handle abstract method call
1534 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* thread))
1535   // Verbose error message for AbstractMethodError.
1536   // Get the called method from the invoke bytecode.
1537   vframeStream vfst(thread, true);
1538   assert(!vfst.at_end(), "Java frame must exist");
1539   methodHandle caller(vfst.method());
1540   Bytecode_invoke invoke(caller, vfst.bci());
1541   DEBUG_ONLY( invoke.verify(); )
1542 
1543   // Find the compiled caller frame.
1544   RegisterMap reg_map(thread);
1545   frame stubFrame = thread->last_frame();
1546   assert(stubFrame.is_runtime_frame(), "must be");
1547   frame callerFrame = stubFrame.sender(&reg_map);
1548   assert(callerFrame.is_compiled_frame(), "must be");
1549 
1550   // Install exception and return forward entry.
1551   address res = StubRoutines::throw_AbstractMethodError_entry();
1552   JRT_BLOCK
1553     methodHandle callee = invoke.static_target(thread);
1554     if (!callee.is_null()) {
1555       oop recv = callerFrame.retrieve_receiver(&reg_map);
1556       Klass *recv_klass = (recv != NULL) ? recv->klass() : NULL;
1557       LinkResolver::throw_abstract_method_error(callee, recv_klass, thread);
1558       res = StubRoutines::forward_exception_entry();
1559     }
1560   JRT_BLOCK_END
1561   return res;
1562 JRT_END
1563 
1564 
1565 // resolve a static call and patch code
1566 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread *thread ))
1567   methodHandle callee_method;
1568   bool caller_is_c1;
1569   JRT_BLOCK
1570     callee_method = SharedRuntime::resolve_helper(thread, false, false, &caller_is_c1, CHECK_NULL);
1571     thread->set_vm_result_2(callee_method());
1572   JRT_BLOCK_END
1573   // return compiled code entry point after potential safepoints
1574   address entry = caller_is_c1 ? 
1575     callee_method->verified_value_code_entry() : callee_method->verified_code_entry();
1576   assert(entry != NULL, "Jump to zero!");
1577   return entry;
1578 JRT_END
1579 
1580 
1581 // resolve virtual call and update inline cache to monomorphic
1582 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread *thread ))
1583   methodHandle callee_method;
1584   bool caller_is_c1;
1585   JRT_BLOCK
1586     callee_method = SharedRuntime::resolve_helper(thread, true, false, &caller_is_c1, CHECK_NULL);
1587     thread->set_vm_result_2(callee_method());
1588   JRT_BLOCK_END
1589   // return compiled code entry point after potential safepoints
1590   address entry = caller_is_c1 ? 
1591     callee_method->verified_value_code_entry() : callee_method->verified_value_ro_code_entry();
1592   assert(entry != NULL, "Jump to zero!");
1593   return entry;
1594 JRT_END
1595 
1596 
1597 // Resolve a virtual call that can be statically bound (e.g., always
1598 // monomorphic, so it has no inline cache).  Patch code to resolved target.
1599 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread *thread))
1600   methodHandle callee_method;
1601   bool caller_is_c1;
1602   JRT_BLOCK
1603     callee_method = SharedRuntime::resolve_helper(thread, true, true, &caller_is_c1, CHECK_NULL);
1604     thread->set_vm_result_2(callee_method());
1605   JRT_BLOCK_END
1606   // return compiled code entry point after potential safepoints
1607   address entry = caller_is_c1 ? 
1608     callee_method->verified_value_code_entry() : callee_method->verified_code_entry();
1609   assert(entry != NULL, "Jump to zero!");
1610   return entry;
1611 JRT_END
1612 
1613 // The handle_ic_miss_helper_internal function returns false if it failed due
1614 // to either running out of vtable stubs or ic stubs due to IC transitions
1615 // to transitional states. The needs_ic_stub_refill value will be set if
1616 // the failure was due to running out of IC stubs, in which case handle_ic_miss_helper
1617 // refills the IC stubs and tries again.
1618 bool SharedRuntime::handle_ic_miss_helper_internal(Handle receiver, CompiledMethod* caller_nm,
1619                                                    const frame& caller_frame, methodHandle callee_method,
1620                                                    Bytecodes::Code bc, CallInfo& call_info,
1621                                                    bool& needs_ic_stub_refill, bool& is_optimized, bool caller_is_c1, TRAPS) {
1622   CompiledICLocker ml(caller_nm);
1623   CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1624   bool should_be_mono = false;
1625   if (inline_cache->is_optimized()) {
1626     if (TraceCallFixup) {
1627       ResourceMark rm(THREAD);
1628       tty->print("OPTIMIZED IC miss (%s) call to", Bytecodes::name(bc));
1629       callee_method->print_short_name(tty);
1630       tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1631     }
1632     is_optimized = true;
1633     should_be_mono = true;
1634   } else if (inline_cache->is_icholder_call()) {
1635     CompiledICHolder* ic_oop = inline_cache->cached_icholder();
1636     if (ic_oop != NULL) {
1637       if (!ic_oop->is_loader_alive()) {
1638         // Deferred IC cleaning due to concurrent class unloading
1639         if (!inline_cache->set_to_clean()) {
1640           needs_ic_stub_refill = true;
1641           return false;


1650           ResourceMark rm(THREAD);
1651           tty->print("FALSE IC miss (%s) converting to compiled call to", Bytecodes::name(bc));
1652           callee_method->print_short_name(tty);
1653           tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1654         }
1655         should_be_mono = true;
1656       }
1657     }
1658   }
1659 
1660   if (should_be_mono) {
1661     // We have a path that was monomorphic but was going interpreted
1662     // and now we have (or had) a compiled entry. We correct the IC
1663     // by using a new icBuffer.
1664     CompiledICInfo info;
1665     Klass* receiver_klass = receiver()->klass();
1666     inline_cache->compute_monomorphic_entry(callee_method,
1667                                             receiver_klass,
1668                                             inline_cache->is_optimized(),
1669                                             false, caller_nm->is_nmethod(),
1670                                             caller_nm->is_c1(),
1671                                             info, CHECK_false);
1672     if (!inline_cache->set_to_monomorphic(info)) {
1673       needs_ic_stub_refill = true;
1674       return false;
1675     }
1676   } else if (!inline_cache->is_megamorphic() && !inline_cache->is_clean()) {
1677     // Potential change to megamorphic
1678 
1679     bool successful = inline_cache->set_to_megamorphic(&call_info, bc, needs_ic_stub_refill, caller_is_c1, CHECK_false);
1680     if (needs_ic_stub_refill) {
1681       return false;
1682     }
1683     if (!successful) {
1684       if (!inline_cache->set_to_clean()) {
1685         needs_ic_stub_refill = true;
1686         return false;
1687       }
1688     }
1689   } else {
1690     // Either clean or megamorphic
1691   }
1692   return true;
1693 }
1694 
1695 methodHandle SharedRuntime::handle_ic_miss_helper(JavaThread *thread, bool& is_optimized, bool& caller_is_c1, TRAPS) {
1696   ResourceMark rm(thread);
1697   CallInfo call_info;
1698   Bytecodes::Code bc;
1699 
1700   // receiver is NULL for static calls. An exception is thrown for NULL
1701   // receivers for non-static calls
1702   Handle receiver = find_callee_info(thread, bc, call_info,
1703                                      CHECK_(methodHandle()));
1704   // Compiler1 can produce virtual call sites that can actually be statically bound
1705   // If we fell thru to below we would think that the site was going megamorphic
1706   // when in fact the site can never miss. Worse because we'd think it was megamorphic
1707   // we'd try and do a vtable dispatch however methods that can be statically bound
1708   // don't have vtable entries (vtable_index < 0) and we'd blow up. So we force a
1709   // reresolution of the  call site (as if we did a handle_wrong_method and not an
1710   // plain ic_miss) and the site will be converted to an optimized virtual call site
1711   // never to miss again. I don't believe C2 will produce code like this but if it
1712   // did this would still be the correct thing to do for it too, hence no ifdef.
1713   //
1714   if (call_info.resolved_method()->can_be_statically_bound()) {
1715     methodHandle callee_method = SharedRuntime::reresolve_call_site(thread, is_optimized, caller_is_c1, CHECK_(methodHandle()));
1716     if (TraceCallFixup) {
1717       RegisterMap reg_map(thread, false);
1718       frame caller_frame = thread->last_frame().sender(&reg_map);
1719       ResourceMark rm(thread);
1720       tty->print("converting IC miss to reresolve (%s) call to", Bytecodes::name(bc));
1721       callee_method->print_short_name(tty);
1722       tty->print_cr(" from pc: " INTPTR_FORMAT, p2i(caller_frame.pc()));
1723       tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1724     }
1725     return callee_method;
1726   }
1727 
1728   methodHandle callee_method = call_info.selected_method();
1729 
1730 #ifndef PRODUCT
1731   Atomic::inc(&_ic_miss_ctr);
1732 
1733   // Statistics & Tracing
1734   if (TraceCallFixup) {
1735     ResourceMark rm(thread);


1745     // produce statistics under the lock
1746     trace_ic_miss(f.pc());
1747   }
1748 #endif
1749 
1750   // install an event collector so that when a vtable stub is created the
1751   // profiler can be notified via a DYNAMIC_CODE_GENERATED event. The
1752   // event can't be posted when the stub is created as locks are held
1753   // - instead the event will be deferred until the event collector goes
1754   // out of scope.
1755   JvmtiDynamicCodeEventCollector event_collector;
1756 
1757   // Update inline cache to megamorphic. Skip update if we are called from interpreted.
1758   // Transitioning IC caches may require transition stubs. If we run out
1759   // of transition stubs, we have to drop locks and perform a safepoint
1760   // that refills them.
1761   RegisterMap reg_map(thread, false);
1762   frame caller_frame = thread->last_frame().sender(&reg_map);
1763   CodeBlob* cb = caller_frame.cb();
1764   CompiledMethod* caller_nm = cb->as_compiled_method();
1765   caller_is_c1 = caller_nm->is_c1();
1766 
1767   for (;;) {
1768     ICRefillVerifier ic_refill_verifier;
1769     bool needs_ic_stub_refill = false;
1770     bool successful = handle_ic_miss_helper_internal(receiver, caller_nm, caller_frame, callee_method,
1771                                                      bc, call_info, needs_ic_stub_refill, is_optimized, caller_is_c1, CHECK_(methodHandle()));
1772     if (successful || !needs_ic_stub_refill) {
1773       return callee_method;
1774     } else {
1775       InlineCacheBuffer::refill_ic_stubs();
1776     }
1777   }
1778 }
1779 
1780 static bool clear_ic_at_addr(CompiledMethod* caller_nm, address call_addr, bool is_static_call) {
1781   CompiledICLocker ml(caller_nm);
1782   if (is_static_call) {
1783     CompiledStaticCall* ssc = caller_nm->compiledStaticCall_at(call_addr);
1784     if (!ssc->is_clean()) {
1785       return ssc->set_to_clean();
1786     }
1787   } else {
1788     // compiled, dispatched call (which used to call an interpreted method)
1789     CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1790     if (!inline_cache->is_clean()) {
1791       return inline_cache->set_to_clean();
1792     }
1793   }
1794   return true;
1795 }
1796 
1797 //
1798 // Resets a call-site in compiled code so it will get resolved again.
1799 // This routines handles both virtual call sites, optimized virtual call
1800 // sites, and static call sites. Typically used to change a call sites
1801 // destination from compiled to interpreted.
1802 //
1803 methodHandle SharedRuntime::reresolve_call_site(JavaThread *thread, bool& is_optimized, bool& caller_is_c1, TRAPS) {
1804   ResourceMark rm(thread);
1805   RegisterMap reg_map(thread, false);
1806   frame stub_frame = thread->last_frame();
1807   assert(stub_frame.is_runtime_frame(), "must be a runtimeStub");
1808   frame caller = stub_frame.sender(&reg_map);
1809 
1810   // Do nothing if the frame isn't a live compiled frame.
1811   // nmethod could be deoptimized by the time we get here
1812   // so no update to the caller is needed.
1813 
1814   if (caller.is_compiled_frame() && !caller.is_deoptimized_frame()) {
1815 
1816     address pc = caller.pc();
1817 
1818     // Check for static or virtual call
1819     bool is_static_call = false;
1820     CompiledMethod* caller_nm = CodeCache::find_compiled(pc);
1821     caller_is_c1 = caller_nm->is_c1();
1822 
1823     // Default call_addr is the location of the "basic" call.
1824     // Determine the address of the call we a reresolving. With
1825     // Inline Caches we will always find a recognizable call.
1826     // With Inline Caches disabled we may or may not find a
1827     // recognizable call. We will always find a call for static
1828     // calls and for optimized virtual calls. For vanilla virtual
1829     // calls it depends on the state of the UseInlineCaches switch.
1830     //
1831     // With Inline Caches disabled we can get here for a virtual call
1832     // for two reasons:
1833     //   1 - calling an abstract method. The vtable for abstract methods
1834     //       will run us thru handle_wrong_method and we will eventually
1835     //       end up in the interpreter to throw the ame.
1836     //   2 - a racing deoptimization. We could be doing a vanilla vtable
1837     //       call and between the time we fetch the entry address and
1838     //       we jump to it the target gets deoptimized. Similar to 1
1839     //       we will wind up in the interprter (thru a c2i with c2).
1840     //
1841     address call_addr = NULL;


< prev index next >