< prev index next >

src/hotspot/share/runtime/sharedRuntime.cpp


7  *                                                                                                                                   
8  * This code is distributed in the hope that it will be useful, but WITHOUT                                                          
9  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or                                                             
10  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License                                                             
11  * version 2 for more details (a copy is included in the LICENSE file that                                                           
12  * accompanied this code).                                                                                                           
13  *                                                                                                                                   
14  * You should have received a copy of the GNU General Public License version                                                         
15  * 2 along with this work; if not, write to the Free Software Foundation,                                                            
16  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.                                                                     
17  *                                                                                                                                   
18  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA                                                           
19  * or visit www.oracle.com if you need additional information or have any                                                            
20  * questions.                                                                                                                        
21  *                                                                                                                                   
22  */                                                                                                                                  
23 
24 #include "precompiled.hpp"                                                                                                           
25 #include "jvm.h"                                                                                                                     
26 #include "aot/aotLoader.hpp"                                                                                                         
27 #include "code/compiledMethod.inline.hpp"                                                                                            
28 #include "classfile/stringTable.hpp"                                                                                                 
29 #include "classfile/systemDictionary.hpp"                                                                                            
30 #include "classfile/vmSymbols.hpp"                                                                                                   
31 #include "code/codeCache.hpp"                                                                                                        
32 #include "code/compiledIC.hpp"                                                                                                       
                                                                                                                                     
                                                                                                                                     
33 #include "code/scopeDesc.hpp"                                                                                                        
34 #include "code/vtableStubs.hpp"                                                                                                      
35 #include "compiler/abstractCompiler.hpp"                                                                                             
36 #include "compiler/compileBroker.hpp"                                                                                                
37 #include "compiler/disassembler.hpp"                                                                                                 
38 #include "gc/shared/barrierSet.hpp"                                                                                                  
39 #include "gc/shared/gcLocker.inline.hpp"                                                                                             
40 #include "interpreter/interpreter.hpp"                                                                                               
41 #include "interpreter/interpreterRuntime.hpp"                                                                                        
42 #include "jfr/jfrEvents.hpp"                                                                                                         
43 #include "logging/log.hpp"                                                                                                           
44 #include "memory/metaspaceShared.hpp"                                                                                                
45 #include "memory/resourceArea.hpp"                                                                                                   
46 #include "memory/universe.hpp"                                                                                                       
47 #include "oops/klass.hpp"                                                                                                            
48 #include "oops/method.inline.hpp"                                                                                                    
49 #include "oops/objArrayKlass.hpp"                                                                                                    
50 #include "oops/oop.inline.hpp"                                                                                                       
51 #include "prims/forte.hpp"                                                                                                           

7  *
8  * This code is distributed in the hope that it will be useful, but WITHOUT
9  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
10  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
11  * version 2 for more details (a copy is included in the LICENSE file that
12  * accompanied this code).
13  *
14  * You should have received a copy of the GNU General Public License version
15  * 2 along with this work; if not, write to the Free Software Foundation,
16  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
17  *
18  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
19  * or visit www.oracle.com if you need additional information or have any
20  * questions.
21  *
22  */
23 
24 #include "precompiled.hpp"
25 #include "jvm.h"
26 #include "aot/aotLoader.hpp"

27 #include "classfile/stringTable.hpp"
28 #include "classfile/systemDictionary.hpp"
29 #include "classfile/vmSymbols.hpp"
30 #include "code/codeCache.hpp"
31 #include "code/compiledIC.hpp"
32 #include "code/icBuffer.hpp"
33 #include "code/compiledMethod.inline.hpp"
34 #include "code/scopeDesc.hpp"
35 #include "code/vtableStubs.hpp"
36 #include "compiler/abstractCompiler.hpp"
37 #include "compiler/compileBroker.hpp"
38 #include "compiler/disassembler.hpp"
39 #include "gc/shared/barrierSet.hpp"
40 #include "gc/shared/gcLocker.inline.hpp"
41 #include "interpreter/interpreter.hpp"
42 #include "interpreter/interpreterRuntime.hpp"
43 #include "jfr/jfrEvents.hpp"
44 #include "logging/log.hpp"
45 #include "memory/metaspaceShared.hpp"
46 #include "memory/resourceArea.hpp"
47 #include "memory/universe.hpp"
48 #include "oops/klass.hpp"
49 #include "oops/method.inline.hpp"
50 #include "oops/objArrayKlass.hpp"
51 #include "oops/oop.inline.hpp"
52 #include "prims/forte.hpp"

1297                   p2i(caller_frame.pc()), p2i(callee_method->code()));                                                               
1298   }                                                                                                                                  
1299 #endif                                                                                                                               
1300 
1301   // JSR 292 key invariant:                                                                                                          
1302   // If the resolved method is a MethodHandle invoke target, the call                                                                
1303   // site must be a MethodHandle call site, because the lambda form might tail-call                                                  
1304   // leaving the stack in a state unknown to either caller or callee                                                                 
1305   // TODO detune for now but we might need it again                                                                                  
1306 //  assert(!callee_method->is_compiled_lambda_form() ||                                                                              
1307 //         caller_nm->is_method_handle_return(caller_frame.pc()), "must be MH call site");                                           
1308 
1309   // Compute entry points. This might require generation of C2I converter                                                            
1310   // frames, so we cannot be holding any locks here. Furthermore, the                                                                
1311   // computation of the entry points is independent of patching the call.  We                                                        
1312   // always return the entry-point, but we only patch the stub if the call has                                                       
1313   // not been deoptimized.  Return values: For a virtual call this is an                                                             
1314   // (cached_oop, destination address) pair. For a static call/optimized                                                             
1315   // virtual this is just a destination address.                                                                                     
1316 
1317   StaticCallInfo static_call_info;                                                                                                   
1318   CompiledICInfo virtual_call_info;                                                                                                  
                                                                                                                                     
                                                                                                                                     
                                                                                                                                     
                                                                                                                                     
                                                                                                                                     
                                                                                                                                     
1319 
1320   // Make sure the callee nmethod does not get deoptimized and removed before                                                        
1321   // we are done patching the code.                                                                                                  
1322   CompiledMethod* callee = callee_method->code();                                                                                    
1323 
1324   if (callee != NULL) {                                                                                                              
1325     assert(callee->is_compiled(), "must be nmethod for patching");                                                                   
1326   }                                                                                                                                  
1327 
1328   if (callee != NULL && !callee->is_in_use()) {                                                                                      
1329     // Patch call site to C2I adapter if callee nmethod is deoptimized or unloaded.                                                  
1330     callee = NULL;                                                                                                                   
1331   }                                                                                                                                  
1332   nmethodLocker nl_callee(callee);                                                                                                   
                                                                                                                                     
                                                                                                                                     
                                                                                                                                     
                                                                                                                                     
1333 #ifdef ASSERT                                                                                                                        
1334   address dest_entry_point = callee == NULL ? 0 : callee->entry_point(); // used below                                               
1335 #endif                                                                                                                               
1336 
1337   bool is_nmethod = caller_nm->is_nmethod();                                                                                         
1338 
1339   if (is_virtual) {                                                                                                                  
1340     assert(receiver.not_null() || invoke_code == Bytecodes::_invokehandle, "sanity check");                                          
1341     bool static_bound = call_info.resolved_method()->can_be_statically_bound();                                                      
1342     Klass* klass = invoke_code == Bytecodes::_invokehandle ? NULL : receiver->klass();                                               
1343     CompiledIC::compute_monomorphic_entry(callee_method, klass,                                                                      
1344                      is_optimized, static_bound, is_nmethod, virtual_call_info,                                                      
1345                      CHECK_(methodHandle()));                                                                                        
1346   } else {                                                                                                                           
1347     // static call                                                                                                                   
1348     CompiledStaticCall::compute_entry(callee_method, is_nmethod, static_call_info);                                                  
1349   }                                                                                                                                  
1350 
1351   // grab lock, check for deoptimization and potentially patch caller                                                                
1352   {                                                                                                                                  
1353     CompiledICLocker ml(caller_nm);                                                                                                  
1354 
1355     // Lock blocks for safepoint during which both nmethods can change state.                                                        
1356 
1357     // Now that we are ready to patch if the Method* was redefined then                                                              
1358     // don't update call site and let the caller retry.                                                                              
1359     // Don't update call site if callee nmethod was unloaded or deoptimized.                                                         
1360     // Don't update call site if callee nmethod was replaced by an other nmethod                                                     
1361     // which may happen when multiply alive nmethod (tiered compilation)                                                             
1362     // will be supported.                                                                                                            
1363     if (!callee_method->is_old() &&                                                                                                  
1364         (callee == NULL || (callee->is_in_use() && callee_method->code() == callee))) {                                              
1365 #ifdef ASSERT                                                                                                                        
1366       // We must not try to patch to jump to an already unloaded method.                                                             
1367       if (dest_entry_point != 0) {                                                                                                   
1368         CodeBlob* cb = CodeCache::find_blob(dest_entry_point);                                                                       
1369         assert((cb != NULL) && cb->is_compiled() && (((CompiledMethod*)cb) == callee),                                               
1370                "should not call unloaded nmethod");                                                                                  
1371       }                                                                                                                              
1372 #endif                                                                                                                               
1373       if (is_virtual) {                                                                                                              
1374         CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());                                                  
1375         if (inline_cache->is_clean()) {                                                                                              
1376           inline_cache->set_to_monomorphic(virtual_call_info);                                                                       
                                                                                                                                     
                                                                                                                                     
                                                                                                                                     
                                                                                                                                     
                                                                                                                                     
                                                                                                                                     
1377         }                                                                                                                            
1378       } else {                                                                                                                       
1379         CompiledStaticCall* ssc = caller_nm->compiledStaticCall_before(caller_frame.pc());                                           
1380         if (ssc->is_clean()) ssc->set(static_call_info);                                                                             
1381       }                                                                                                                              
1382     }                                                                                                                                
1383                                                                                                                                      
1384   } // unlock CompiledICLocker                                                                                                       
1385 
1386   return callee_method;                                                                                                              
1387 }                                                                                                                                    
1388 
1389 
1390 // Inline caches exist only in compiled code                                                                                         
1391 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* thread))                                             
1392 #ifdef ASSERT                                                                                                                        
1393   RegisterMap reg_map(thread, false);                                                                                                
1394   frame stub_frame = thread->last_frame();                                                                                           
1395   assert(stub_frame.is_runtime_frame(), "sanity check");                                                                             
1396   frame caller_frame = stub_frame.sender(&reg_map);                                                                                  
1397   assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame(), "unexpected frame");                                
1398 #endif /* ASSERT */                                                                                                                  
1399 
1400   methodHandle callee_method;                                                                                                        
1401   JRT_BLOCK                                                                                                                          
1402     callee_method = SharedRuntime::handle_ic_miss_helper(thread, CHECK_NULL);                                                        
1403     // Return Method* through TLS                                                                                                    

1298                   p2i(caller_frame.pc()), p2i(callee_method->code()));
1299   }
1300 #endif
1301 
1302   // JSR 292 key invariant:
1303   // If the resolved method is a MethodHandle invoke target, the call
1304   // site must be a MethodHandle call site, because the lambda form might tail-call
1305   // leaving the stack in a state unknown to either caller or callee
1306   // TODO detune for now but we might need it again
1307 //  assert(!callee_method->is_compiled_lambda_form() ||
1308 //         caller_nm->is_method_handle_return(caller_frame.pc()), "must be MH call site");
1309 
1310   // Compute entry points. This might require generation of C2I converter
1311   // frames, so we cannot be holding any locks here. Furthermore, the
1312   // computation of the entry points is independent of patching the call.  We
1313   // always return the entry-point, but we only patch the stub if the call has
1314   // not been deoptimized.  Return values: For a virtual call this is an
1315   // (cached_oop, destination address) pair. For a static call/optimized
1316   // virtual this is just a destination address.
1317 
1318   bool first_try = true;
1319   for (;;) {
1320     if (!first_try) {
1321       // Patching IC caches may fail if we run out if transition stubs.
1322       // We refill the ic stubs then.
1323       InlineCacheBuffer::refill_ic_stubs();
1324     }
1325     first_try = false;
1326 
1327     StaticCallInfo static_call_info;
1328     CompiledICInfo virtual_call_info;

1329 
1330     // Make sure the callee nmethod does not get deoptimized and removed before
1331     // we are done patching the code.
1332     CompiledMethod* callee = callee_method->code();
1333 
1334     if (callee != NULL) {
1335       assert(callee->is_compiled(), "must be nmethod for patching");
1336     }
1337 
1338     if (callee != NULL && !callee->is_in_use()) {
1339       // Patch call site to C2I adapter if callee nmethod is deoptimized or unloaded.
1340       callee = NULL;
1341     }
1342     nmethodLocker nl_callee(callee);
1343 #ifdef ASSERT
1344     address dest_entry_point = callee == NULL ? 0 : callee->entry_point(); // used below
1345 #endif
1346 
1347     bool is_nmethod = caller_nm->is_nmethod();
1348 
1349     if (is_virtual) {
1350       assert(receiver.not_null() || invoke_code == Bytecodes::_invokehandle, "sanity check");
1351       bool static_bound = call_info.resolved_method()->can_be_statically_bound();
1352       Klass* klass = invoke_code == Bytecodes::_invokehandle ? NULL : receiver->klass();
1353       CompiledIC::compute_monomorphic_entry(callee_method, klass,
1354                        is_optimized, static_bound, is_nmethod, virtual_call_info,
1355                        CHECK_(methodHandle()));
1356     } else {
1357       // static call
1358       CompiledStaticCall::compute_entry(callee_method, is_nmethod, static_call_info);
1359     }
1360 
1361     // grab lock, check for deoptimization and potentially patch caller
1362     {
1363       CompiledICLocker ml(caller_nm);
1364 
1365       // Lock blocks for safepoint during which both nmethods can change state.
1366 
1367       // Now that we are ready to patch if the Method* was redefined then
1368       // don't update call site and let the caller retry.
1369       // Don't update call site if callee nmethod was unloaded or deoptimized.
1370       // Don't update call site if callee nmethod was replaced by an other nmethod
1371       // which may happen when multiply alive nmethod (tiered compilation)
1372       // will be supported.
1373       if (!callee_method->is_old() &&
1374           (callee == NULL || (callee->is_in_use() && callee_method->code() == callee))) {
1375 #ifdef ASSERT
1376         // We must not try to patch to jump to an already unloaded method.
1377         if (dest_entry_point != 0) {
1378           CodeBlob* cb = CodeCache::find_blob(dest_entry_point);
1379           assert((cb != NULL) && cb->is_compiled() && (((CompiledMethod*)cb) == callee),
1380                  "should not call unloaded nmethod");
1381         }
1382 #endif
1383         if (is_virtual) {
1384           CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1385           if (inline_cache->is_clean()) {
1386             if (!inline_cache->set_to_monomorphic(virtual_call_info)) {
1387               continue;
1388             }
1389           }
1390         } else {
1391           CompiledStaticCall* ssc = caller_nm->compiledStaticCall_before(caller_frame.pc());
1392           if (ssc->is_clean()) ssc->set(static_call_info);
1393         }



1394       }
1395     } // unlock CompiledICLocker
1396     break;
1397   }
1398 
1399   return callee_method;
1400 }
1401 
1402 
1403 // Inline caches exist only in compiled code
1404 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* thread))
1405 #ifdef ASSERT
1406   RegisterMap reg_map(thread, false);
1407   frame stub_frame = thread->last_frame();
1408   assert(stub_frame.is_runtime_frame(), "sanity check");
1409   frame caller_frame = stub_frame.sender(&reg_map);
1410   assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame(), "unexpected frame");
1411 #endif /* ASSERT */
1412 
1413   methodHandle callee_method;
1414   JRT_BLOCK
1415     callee_method = SharedRuntime::handle_ic_miss_helper(thread, CHECK_NULL);
1416     // Return Method* through TLS

1537   // plain ic_miss) and the site will be converted to an optimized virtual call site                                                 
1538   // never to miss again. I don't believe C2 will produce code like this but if it                                                   
1539   // did this would still be the correct thing to do for it too, hence no ifdef.                                                     
1540   //                                                                                                                                 
1541   if (call_info.resolved_method()->can_be_statically_bound()) {                                                                      
1542     methodHandle callee_method = SharedRuntime::reresolve_call_site(thread, CHECK_(methodHandle()));                                 
1543     if (TraceCallFixup) {                                                                                                            
1544       RegisterMap reg_map(thread, false);                                                                                            
1545       frame caller_frame = thread->last_frame().sender(&reg_map);                                                                    
1546       ResourceMark rm(thread);                                                                                                       
1547       tty->print("converting IC miss to reresolve (%s) call to", Bytecodes::name(bc));                                               
1548       callee_method->print_short_name(tty);                                                                                          
1549       tty->print_cr(" from pc: " INTPTR_FORMAT, p2i(caller_frame.pc()));                                                             
1550       tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));                                                            
1551     }                                                                                                                                
1552     return callee_method;                                                                                                            
1553   }                                                                                                                                  
1554 
1555   methodHandle callee_method = call_info.selected_method();                                                                          
1556 
1557   bool should_be_mono = false;                                                                                                       
1558                                                                                                                                      
1559 #ifndef PRODUCT                                                                                                                      
1560   Atomic::inc(&_ic_miss_ctr);                                                                                                        
1561 
1562   // Statistics & Tracing                                                                                                            
1563   if (TraceCallFixup) {                                                                                                              
1564     ResourceMark rm(thread);                                                                                                         
1565     tty->print("IC miss (%s) call to", Bytecodes::name(bc));                                                                         
1566     callee_method->print_short_name(tty);                                                                                            
1567     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));                                                              
1568   }                                                                                                                                  
1569 
1570   if (ICMissHistogram) {                                                                                                             
1571     MutexLocker m(VMStatistic_lock);                                                                                                 
1572     RegisterMap reg_map(thread, false);                                                                                              
1573     frame f = thread->last_frame().real_sender(&reg_map);// skip runtime stub                                                        
1574     // produce statistics under the lock                                                                                             
1575     trace_ic_miss(f.pc());                                                                                                           
1576   }                                                                                                                                  
1577 #endif                                                                                                                               
1578 
1579   // install an event collector so that when a vtable stub is created the                                                            
1580   // profiler can be notified via a DYNAMIC_CODE_GENERATED event. The                                                                
1581   // event can't be posted when the stub is created as locks are held                                                                
1582   // - instead the event will be deferred until the event collector goes                                                             
1583   // out of scope.                                                                                                                   
1584   JvmtiDynamicCodeEventCollector event_collector;                                                                                    
1585 
1586   // Update inline cache to megamorphic. Skip update if we are called from interpreted.                                              
1587   {                                                                                                                                  
                                                                                                                                     
                                                                                                                                     
                                                                                                                                     
                                                                                                                                     
                                                                                                                                     
                                                                                                                                     
                                                                                                                                     
                                                                                                                                     
1588     RegisterMap reg_map(thread, false);                                                                                              
1589     frame caller_frame = thread->last_frame().sender(&reg_map);                                                                      
1590     CodeBlob* cb = caller_frame.cb();                                                                                                
1591     CompiledMethod* caller_nm = cb->as_compiled_method_or_null();                                                                    
1592     CompiledICLocker ml(caller_nm);                                                                                                  
1593 
1594     if (cb->is_compiled()) {                                                                                                         
1595       CompiledIC* inline_cache = CompiledIC_before(((CompiledMethod*)cb), caller_frame.pc());                                        
1596       bool should_be_mono = false;                                                                                                   
1597       if (inline_cache->is_optimized()) {                                                                                            
1598         if (TraceCallFixup) {                                                                                                        
1599           ResourceMark rm(thread);                                                                                                   
1600           tty->print("OPTIMIZED IC miss (%s) call to", Bytecodes::name(bc));                                                         
1601           callee_method->print_short_name(tty);                                                                                      
1602           tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));                                                        
1603         }                                                                                                                            
1604         should_be_mono = true;                                                                                                       
1605       } else if (inline_cache->is_icholder_call()) {                                                                                 
1606         CompiledICHolder* ic_oop = inline_cache->cached_icholder();                                                                  
1607         if (ic_oop != NULL) {                                                                                                        
1608           if (!ic_oop->is_loader_alive()) {                                                                                          
1609             // Deferred IC cleaning due to concurrent class unloading                                                                
1610             inline_cache->set_to_clean();                                                                                            
1611           } else if (receiver()->klass() == ic_oop->holder_klass()) {                                                                
1612             // This isn't a real miss. We must have seen that compiled code                                                          
1613             // is now available and we want the call site converted to a                                                             
1614             // monomorphic compiled call site.                                                                                       
1615             // We can't assert for callee_method->code() != NULL because it                                                          
1616             // could have been deoptimized in the meantime                                                                           
1617             if (TraceCallFixup) {                                                                                                    
1618               ResourceMark rm(thread);                                                                                               
1619               tty->print("FALSE IC miss (%s) converting to compiled call to", Bytecodes::name(bc));                                  
1620               callee_method->print_short_name(tty);                                                                                  
1621               tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));                                                    
1622             }                                                                                                                        
1623             should_be_mono = true;                                                                                                   
1624           }                                                                                                                          
                                                                                                                                     
1625         }                                                                                                                            
1626       }                                                                                                                              
                                                                                                                                     
1627 
1628       if (should_be_mono) {                                                                                                          
1629                                                                                                                                      
1630         // We have a path that was monomorphic but was going interpreted                                                             
1631         // and now we have (or had) a compiled entry. We correct the IC                                                              
1632         // by using a new icBuffer.                                                                                                  
1633         CompiledICInfo info;                                                                                                         
1634         Klass* receiver_klass = receiver()->klass();                                                                                 
1635         inline_cache->compute_monomorphic_entry(callee_method,                                                                       
1636                                                 receiver_klass,                                                                      
1637                                                 inline_cache->is_optimized(),                                                        
1638                                                 false, caller_nm->is_nmethod(),                                                      
1639                                                 info, CHECK_(methodHandle()));                                                       
1640         inline_cache->set_to_monomorphic(info);                                                                                      
1641       } else if (!inline_cache->is_megamorphic() && !inline_cache->is_clean()) {                                                     
1642         // Potential change to megamorphic                                                                                           
1643         bool successful = inline_cache->set_to_megamorphic(&call_info, bc, CHECK_(methodHandle()));                                  
1644         if (!successful) {                                                                                                           
1645           inline_cache->set_to_clean();                                                                                              
                                                                                                                                     
                                                                                                                                     
1646         }                                                                                                                            
1647       } else {                                                                                                                       
1648         // Either clean or megamorphic                                                                                               
1649       }                                                                                                                              
1650     } else {                                                                                                                         
1651       fatal("Unimplemented");                                                                                                        
1652     }                                                                                                                                
                                                                                                                                     
1653   } // Release CompiledICLocker                                                                                                      
1654 
1655   return callee_method;                                                                                                              
1656 }                                                                                                                                    
1657 
1658 //                                                                                                                                   
1659 // Resets a call-site in compiled code so it will get resolved again.                                                                
1660 // This routines handles both virtual call sites, optimized virtual call                                                             
1661 // sites, and static call sites. Typically used to change a call sites                                                               
1662 // destination from compiled to interpreted.                                                                                         
1663 //                                                                                                                                   
1664 methodHandle SharedRuntime::reresolve_call_site(JavaThread *thread, TRAPS) {                                                         
1665   ResourceMark rm(thread);                                                                                                           
1666   RegisterMap reg_map(thread, false);                                                                                                
1667   frame stub_frame = thread->last_frame();                                                                                           
1668   assert(stub_frame.is_runtime_frame(), "must be a runtimeStub");                                                                    
1669   frame caller = stub_frame.sender(&reg_map);                                                                                        
1670 
1671   // Do nothing if the frame isn't a live compiled frame.                                                                            

1550   // plain ic_miss) and the site will be converted to an optimized virtual call site
1551   // never to miss again. I don't believe C2 will produce code like this but if it
1552   // did this would still be the correct thing to do for it too, hence no ifdef.
1553   //
1554   if (call_info.resolved_method()->can_be_statically_bound()) {
1555     methodHandle callee_method = SharedRuntime::reresolve_call_site(thread, CHECK_(methodHandle()));
1556     if (TraceCallFixup) {
1557       RegisterMap reg_map(thread, false);
1558       frame caller_frame = thread->last_frame().sender(&reg_map);
1559       ResourceMark rm(thread);
1560       tty->print("converting IC miss to reresolve (%s) call to", Bytecodes::name(bc));
1561       callee_method->print_short_name(tty);
1562       tty->print_cr(" from pc: " INTPTR_FORMAT, p2i(caller_frame.pc()));
1563       tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1564     }
1565     return callee_method;
1566   }
1567 
1568   methodHandle callee_method = call_info.selected_method();
1569 


1570 #ifndef PRODUCT
1571   Atomic::inc(&_ic_miss_ctr);
1572 
1573   // Statistics & Tracing
1574   if (TraceCallFixup) {
1575     ResourceMark rm(thread);
1576     tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1577     callee_method->print_short_name(tty);
1578     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1579   }
1580 
1581   if (ICMissHistogram) {
1582     MutexLocker m(VMStatistic_lock);
1583     RegisterMap reg_map(thread, false);
1584     frame f = thread->last_frame().real_sender(&reg_map);// skip runtime stub
1585     // produce statistics under the lock
1586     trace_ic_miss(f.pc());
1587   }
1588 #endif
1589 
1590   // install an event collector so that when a vtable stub is created the
1591   // profiler can be notified via a DYNAMIC_CODE_GENERATED event. The
1592   // event can't be posted when the stub is created as locks are held
1593   // - instead the event will be deferred until the event collector goes
1594   // out of scope.
1595   JvmtiDynamicCodeEventCollector event_collector;
1596 
1597   // Update inline cache to megamorphic. Skip update if we are called from interpreted.
1598   bool first_try = true;
1599   for (;;) {
1600     if (!first_try) {
1601       // Transitioning IC caches may require transition stubs. If we run out
1602       // of transition stubs, we have to drop locks and perform a safepoint
1603       // that refills them.
1604       InlineCacheBuffer::refill_ic_stubs();
1605     }
1606     first_try = false;
1607     RegisterMap reg_map(thread, false);
1608     frame caller_frame = thread->last_frame().sender(&reg_map);
1609     CodeBlob* cb = caller_frame.cb();
1610     CompiledMethod* caller_nm = cb->as_compiled_method_or_null();
1611     CompiledICLocker ml(caller_nm);
1612 
1613     if (!cb->is_compiled()) {
1614       Unimplemented();
1615     }
1616     CompiledIC* inline_cache = CompiledIC_before(((CompiledMethod*)cb), caller_frame.pc());
1617     bool should_be_mono = false;
1618     if (inline_cache->is_optimized()) {
1619       if (TraceCallFixup) {
1620         ResourceMark rm(thread);
1621         tty->print("OPTIMIZED IC miss (%s) call to", Bytecodes::name(bc));
1622         callee_method->print_short_name(tty);
1623         tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1624       }
1625       should_be_mono = true;
1626     } else if (inline_cache->is_icholder_call()) {
1627       CompiledICHolder* ic_oop = inline_cache->cached_icholder();
1628       if (ic_oop != NULL) {
1629         if (!ic_oop->is_loader_alive()) {
1630           // Deferred IC cleaning due to concurrent class unloading
1631           inline_cache->set_to_clean();
1632         } else if (receiver()->klass() == ic_oop->holder_klass()) {
1633           // This isn't a real miss. We must have seen that compiled code
1634           // is now available and we want the call site converted to a
1635           // monomorphic compiled call site.
1636           // We can't assert for callee_method->code() != NULL because it
1637           // could have been deoptimized in the meantime
1638           if (TraceCallFixup) {
1639             ResourceMark rm(thread);
1640             tty->print("FALSE IC miss (%s) converting to compiled call to", Bytecodes::name(bc));
1641             callee_method->print_short_name(tty);
1642             tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1643           }
1644           should_be_mono = true;
1645         }
1646       }
1647     }
1648 
1649     if (should_be_mono) {
1650       // We have a path that was monomorphic but was going interpreted
1651       // and now we have (or had) a compiled entry. We correct the IC
1652       // by using a new icBuffer.
1653       CompiledICInfo info;
1654       Klass* receiver_klass = receiver()->klass();
1655       inline_cache->compute_monomorphic_entry(callee_method,
1656                                               receiver_klass,
1657                                               inline_cache->is_optimized(),
1658                                               false, caller_nm->is_nmethod(),
1659                                               info, CHECK_(methodHandle()));
1660       if (!inline_cache->set_to_monomorphic(info)) {
1661         continue;
1662       }
1663     } else if (!inline_cache->is_megamorphic() && !inline_cache->is_clean()) {
1664       // Potential change to megamorphic
1665       bool successful = inline_cache->set_to_megamorphic(&call_info, bc, CHECK_(methodHandle()));
1666       if (!successful) {
1667         if (!inline_cache->set_to_clean()) {
1668           continue;
1669         }


1670       }
1671     } else {
1672       // Either clean or megamorphic
1673     }
1674     break;
1675   } // Release CompiledICLocker
1676 
1677   return callee_method;
1678 }
1679 
1680 //
1681 // Resets a call-site in compiled code so it will get resolved again.
1682 // This routines handles both virtual call sites, optimized virtual call
1683 // sites, and static call sites. Typically used to change a call sites
1684 // destination from compiled to interpreted.
1685 //
1686 methodHandle SharedRuntime::reresolve_call_site(JavaThread *thread, TRAPS) {
1687   ResourceMark rm(thread);
1688   RegisterMap reg_map(thread, false);
1689   frame stub_frame = thread->last_frame();
1690   assert(stub_frame.is_runtime_frame(), "must be a runtimeStub");
1691   frame caller = stub_frame.sender(&reg_map);
1692 
1693   // Do nothing if the frame isn't a live compiled frame.

1720           is_static_call = true;                                                                                                     
1721         } else {                                                                                                                     
1722           assert(iter.type() == relocInfo::virtual_call_type ||                                                                      
1723                  iter.type() == relocInfo::opt_virtual_call_type                                                                     
1724                 , "unexpected relocInfo. type");                                                                                     
1725         }                                                                                                                            
1726       } else {                                                                                                                       
1727         assert(!UseInlineCaches, "relocation info. must exist for this address");                                                    
1728       }                                                                                                                              
1729 
1730       // Cleaning the inline cache will force a new resolve. This is more robust                                                     
1731       // than directly setting it to the new destination, since resolving of calls                                                   
1732       // is always done through the same code path. (experience shows that it                                                        
1733       // leads to very hard to track down bugs, if an inline cache gets updated                                                      
1734       // to a wrong method). It should not be performance critical, since the                                                        
1735       // resolve is only done once.                                                                                                  
1736 
1737       CompiledICLocker ml(caller_nm);                                                                                                
1738       if (is_static_call) {                                                                                                          
1739         CompiledStaticCall* ssc = caller_nm->compiledStaticCall_at(call_addr);                                                       
1740         ssc->set_to_clean();                                                                                                         
                                                                                                                                     
                                                                                                                                     
1741       } else {                                                                                                                       
1742         // compiled, dispatched call (which used to call an interpreted method)                                                      
1743         CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);                                                              
1744         inline_cache->set_to_clean();                                                                                                
                                                                                                                                     
                                                                                                                                     
1745       }                                                                                                                              
1746     }                                                                                                                                
1747   }                                                                                                                                  
1748 
1749   methodHandle callee_method = find_callee_method(thread, CHECK_(methodHandle()));                                                   
1750 
1751 
1752 #ifndef PRODUCT                                                                                                                      
1753   Atomic::inc(&_wrong_method_ctr);                                                                                                   
1754 
1755   if (TraceCallFixup) {                                                                                                              
1756     ResourceMark rm(thread);                                                                                                         
1757     tty->print("handle_wrong_method reresolving call to");                                                                           
1758     callee_method->print_short_name(tty);                                                                                            
1759     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));                                                              
1760   }                                                                                                                                  
1761 #endif                                                                                                                               
1762 
1763   return callee_method;                                                                                                              

1742           is_static_call = true;
1743         } else {
1744           assert(iter.type() == relocInfo::virtual_call_type ||
1745                  iter.type() == relocInfo::opt_virtual_call_type
1746                 , "unexpected relocInfo. type");
1747         }
1748       } else {
1749         assert(!UseInlineCaches, "relocation info. must exist for this address");
1750       }
1751 
1752       // Cleaning the inline cache will force a new resolve. This is more robust
1753       // than directly setting it to the new destination, since resolving of calls
1754       // is always done through the same code path. (experience shows that it
1755       // leads to very hard to track down bugs, if an inline cache gets updated
1756       // to a wrong method). It should not be performance critical, since the
1757       // resolve is only done once.
1758 
1759       CompiledICLocker ml(caller_nm);
1760       if (is_static_call) {
1761         CompiledStaticCall* ssc = caller_nm->compiledStaticCall_at(call_addr);
1762         if (!ssc->is_clean()) {
1763           ssc->set_to_clean();
1764         }
1765       } else {
1766         // compiled, dispatched call (which used to call an interpreted method)
1767         CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1768         if (!inline_cache->is_clean()) {
1769           inline_cache->set_to_clean();
1770         }
1771       }
1772     }
1773   }
1774 
1775   methodHandle callee_method = find_callee_method(thread, CHECK_(methodHandle()));
1776 
1777 
1778 #ifndef PRODUCT
1779   Atomic::inc(&_wrong_method_ctr);
1780 
1781   if (TraceCallFixup) {
1782     ResourceMark rm(thread);
1783     tty->print("handle_wrong_method reresolving call to");
1784     callee_method->print_short_name(tty);
1785     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1786   }
1787 #endif
1788 
1789   return callee_method;
< prev index next >