< prev index next >

src/hotspot/share/runtime/sharedRuntime.cpp


1377 
1378   // JSR 292 key invariant:                                                                                                          
1379   // If the resolved method is a MethodHandle invoke target, the call                                                                
1380   // site must be a MethodHandle call site, because the lambda form might tail-call                                                  
1381   // leaving the stack in a state unknown to either caller or callee                                                                 
1382   // TODO detune for now but we might need it again                                                                                  
1383 //  assert(!callee_method->is_compiled_lambda_form() ||                                                                              
1384 //         caller_nm->is_method_handle_return(caller_frame.pc()), "must be MH call site");                                           
1385 
1386   // Compute entry points. This might require generation of C2I converter                                                            
1387   // frames, so we cannot be holding any locks here. Furthermore, the                                                                
1388   // computation of the entry points is independent of patching the call.  We                                                        
1389   // always return the entry-point, but we only patch the stub if the call has                                                       
1390   // not been deoptimized.  Return values: For a virtual call this is an                                                             
1391   // (cached_oop, destination address) pair. For a static call/optimized                                                             
1392   // virtual this is just a destination address.                                                                                     
1393 
1394   // Patching IC caches may fail if we run out if transition stubs.                                                                  
1395   // We refill the ic stubs then and try again.                                                                                      
1396   for (;;) {                                                                                                                         
1397     DEBUG_ONLY(ICRefillVerifier ic_refill_verifier;)                                                                                 
1398     bool successful = resolve_sub_helper_internal(callee_method, caller_frame, caller_nm,                                            
1399                                                   is_virtual, is_optimized, receiver,                                                
1400                                                   call_info, invoke_code, CHECK_(methodHandle()));                                   
1401     if (successful) {                                                                                                                
1402       return callee_method;                                                                                                          
1403     } else {                                                                                                                         
1404       InlineCacheBuffer::refill_ic_stubs();                                                                                          
1405     }                                                                                                                                
1406   }                                                                                                                                  
1407 
1408 }                                                                                                                                    
1409 
1410 
1411 // Inline caches exist only in compiled code                                                                                         
1412 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* thread))                                             
1413 #ifdef ASSERT                                                                                                                        
1414   RegisterMap reg_map(thread, false);                                                                                                
1415   frame stub_frame = thread->last_frame();                                                                                           
1416   assert(stub_frame.is_runtime_frame(), "sanity check");                                                                             

1377 
1378   // JSR 292 key invariant:
1379   // If the resolved method is a MethodHandle invoke target, the call
1380   // site must be a MethodHandle call site, because the lambda form might tail-call
1381   // leaving the stack in a state unknown to either caller or callee
1382   // TODO detune for now but we might need it again
1383 //  assert(!callee_method->is_compiled_lambda_form() ||
1384 //         caller_nm->is_method_handle_return(caller_frame.pc()), "must be MH call site");
1385 
1386   // Compute entry points. This might require generation of C2I converter
1387   // frames, so we cannot be holding any locks here. Furthermore, the
1388   // computation of the entry points is independent of patching the call.  We
1389   // always return the entry-point, but we only patch the stub if the call has
1390   // not been deoptimized.  Return values: For a virtual call this is an
1391   // (cached_oop, destination address) pair. For a static call/optimized
1392   // virtual this is just a destination address.
1393 
1394   // Patching IC caches may fail if we run out if transition stubs.
1395   // We refill the ic stubs then and try again.
1396   for (;;) {
1397     ICRefillVerifier ic_refill_verifier;
1398     bool successful = resolve_sub_helper_internal(callee_method, caller_frame, caller_nm,
1399                                                   is_virtual, is_optimized, receiver,
1400                                                   call_info, invoke_code, CHECK_(methodHandle()));
1401     if (successful) {
1402       return callee_method;
1403     } else {
1404       InlineCacheBuffer::refill_ic_stubs();
1405     }
1406   }
1407 
1408 }
1409 
1410 
1411 // Inline caches exist only in compiled code
1412 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* thread))
1413 #ifdef ASSERT
1414   RegisterMap reg_map(thread, false);
1415   frame stub_frame = thread->last_frame();
1416   assert(stub_frame.is_runtime_frame(), "sanity check");

1673   }                                                                                                                                  
1674 #endif                                                                                                                               
1675 
1676   // install an event collector so that when a vtable stub is created the                                                            
1677   // profiler can be notified via a DYNAMIC_CODE_GENERATED event. The                                                                
1678   // event can't be posted when the stub is created as locks are held                                                                
1679   // - instead the event will be deferred until the event collector goes                                                             
1680   // out of scope.                                                                                                                   
1681   JvmtiDynamicCodeEventCollector event_collector;                                                                                    
1682 
1683   // Update inline cache to megamorphic. Skip update if we are called from interpreted.                                              
1684   // Transitioning IC caches may require transition stubs. If we run out                                                             
1685   // of transition stubs, we have to drop locks and perform a safepoint                                                              
1686   // that refills them.                                                                                                              
1687   RegisterMap reg_map(thread, false);                                                                                                
1688   frame caller_frame = thread->last_frame().sender(&reg_map);                                                                        
1689   CodeBlob* cb = caller_frame.cb();                                                                                                  
1690   CompiledMethod* caller_nm = cb->as_compiled_method();                                                                              
1691 
1692   for (;;) {                                                                                                                         
1693     DEBUG_ONLY(ICRefillVerifier ic_refill_verifier;)                                                                                 
1694     bool needs_ic_stub_refill = false;                                                                                               
1695     bool successful = handle_ic_miss_helper_internal(receiver, caller_nm, caller_frame, callee_method,                               
1696                                                      bc, call_info, needs_ic_stub_refill, CHECK_(methodHandle()));                   
1697     if (successful || !needs_ic_stub_refill) {                                                                                       
1698       return callee_method;                                                                                                          
1699     } else {                                                                                                                         
1700       InlineCacheBuffer::refill_ic_stubs();                                                                                          
1701     }                                                                                                                                
1702   }                                                                                                                                  
1703 }                                                                                                                                    
1704 
1705 static bool clear_ic_at_addr(CompiledMethod* caller_nm, address call_addr, bool is_static_call) {                                    
1706   CompiledICLocker ml(caller_nm);                                                                                                    
1707   if (is_static_call) {                                                                                                              
1708     CompiledStaticCall* ssc = caller_nm->compiledStaticCall_at(call_addr);                                                           
1709     if (!ssc->is_clean()) {                                                                                                          
1710       return ssc->set_to_clean();                                                                                                    
1711     }                                                                                                                                
1712   } else {                                                                                                                           

1673   }
1674 #endif
1675 
1676   // install an event collector so that when a vtable stub is created the
1677   // profiler can be notified via a DYNAMIC_CODE_GENERATED event. The
1678   // event can't be posted when the stub is created as locks are held
1679   // - instead the event will be deferred until the event collector goes
1680   // out of scope.
1681   JvmtiDynamicCodeEventCollector event_collector;
1682 
1683   // Update inline cache to megamorphic. Skip update if we are called from interpreted.
1684   // Transitioning IC caches may require transition stubs. If we run out
1685   // of transition stubs, we have to drop locks and perform a safepoint
1686   // that refills them.
1687   RegisterMap reg_map(thread, false);
1688   frame caller_frame = thread->last_frame().sender(&reg_map);
1689   CodeBlob* cb = caller_frame.cb();
1690   CompiledMethod* caller_nm = cb->as_compiled_method();
1691 
1692   for (;;) {
1693     ICRefillVerifier ic_refill_verifier;
1694     bool needs_ic_stub_refill = false;
1695     bool successful = handle_ic_miss_helper_internal(receiver, caller_nm, caller_frame, callee_method,
1696                                                      bc, call_info, needs_ic_stub_refill, CHECK_(methodHandle()));
1697     if (successful || !needs_ic_stub_refill) {
1698       return callee_method;
1699     } else {
1700       InlineCacheBuffer::refill_ic_stubs();
1701     }
1702   }
1703 }
1704 
1705 static bool clear_ic_at_addr(CompiledMethod* caller_nm, address call_addr, bool is_static_call) {
1706   CompiledICLocker ml(caller_nm);
1707   if (is_static_call) {
1708     CompiledStaticCall* ssc = caller_nm->compiledStaticCall_at(call_addr);
1709     if (!ssc->is_clean()) {
1710       return ssc->set_to_clean();
1711     }
1712   } else {

1782         assert(iter.addr() == call_addr, "must find call");                                                                          
1783         if (iter.type() == relocInfo::static_call_type) {                                                                            
1784           is_static_call = true;                                                                                                     
1785         } else {                                                                                                                     
1786           assert(iter.type() == relocInfo::virtual_call_type ||                                                                      
1787                  iter.type() == relocInfo::opt_virtual_call_type                                                                     
1788                 , "unexpected relocInfo. type");                                                                                     
1789         }                                                                                                                            
1790       } else {                                                                                                                       
1791         assert(!UseInlineCaches, "relocation info. must exist for this address");                                                    
1792       }                                                                                                                              
1793 
1794       // Cleaning the inline cache will force a new resolve. This is more robust                                                     
1795       // than directly setting it to the new destination, since resolving of calls                                                   
1796       // is always done through the same code path. (experience shows that it                                                        
1797       // leads to very hard to track down bugs, if an inline cache gets updated                                                      
1798       // to a wrong method). It should not be performance critical, since the                                                        
1799       // resolve is only done once.                                                                                                  
1800 
1801       for (;;) {                                                                                                                     
1802         DEBUG_ONLY(ICRefillVerifier ic_refill_verifier;)                                                                             
1803         if (!clear_ic_at_addr(caller_nm, call_addr, is_static_call)) {                                                               
1804           InlineCacheBuffer::refill_ic_stubs();                                                                                      
1805         } else {                                                                                                                     
1806           break;                                                                                                                     
1807         }                                                                                                                            
1808       }                                                                                                                              
1809     }                                                                                                                                
1810   }                                                                                                                                  
1811 
1812   methodHandle callee_method = find_callee_method(thread, CHECK_(methodHandle()));                                                   
1813 
1814 
1815 #ifndef PRODUCT                                                                                                                      
1816   Atomic::inc(&_wrong_method_ctr);                                                                                                   
1817 
1818   if (TraceCallFixup) {                                                                                                              
1819     ResourceMark rm(thread);                                                                                                         
1820     tty->print("handle_wrong_method reresolving call to");                                                                           
1821     callee_method->print_short_name(tty);                                                                                            

1782         assert(iter.addr() == call_addr, "must find call");
1783         if (iter.type() == relocInfo::static_call_type) {
1784           is_static_call = true;
1785         } else {
1786           assert(iter.type() == relocInfo::virtual_call_type ||
1787                  iter.type() == relocInfo::opt_virtual_call_type
1788                 , "unexpected relocInfo. type");
1789         }
1790       } else {
1791         assert(!UseInlineCaches, "relocation info. must exist for this address");
1792       }
1793 
1794       // Cleaning the inline cache will force a new resolve. This is more robust
1795       // than directly setting it to the new destination, since resolving of calls
1796       // is always done through the same code path. (experience shows that it
1797       // leads to very hard to track down bugs, if an inline cache gets updated
1798       // to a wrong method). It should not be performance critical, since the
1799       // resolve is only done once.
1800 
1801       for (;;) {
1802         ICRefillVerifier ic_refill_verifier;
1803         if (!clear_ic_at_addr(caller_nm, call_addr, is_static_call)) {
1804           InlineCacheBuffer::refill_ic_stubs();
1805         } else {
1806           break;
1807         }
1808       }
1809     }
1810   }
1811 
1812   methodHandle callee_method = find_callee_method(thread, CHECK_(methodHandle()));
1813 
1814 
1815 #ifndef PRODUCT
1816   Atomic::inc(&_wrong_method_ctr);
1817 
1818   if (TraceCallFixup) {
1819     ResourceMark rm(thread);
1820     tty->print("handle_wrong_method reresolving call to");
1821     callee_method->print_short_name(tty);
< prev index next >