src/share/vm/code/compiledIC.cpp

Print this page




 215 bool CompiledIC::set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode, TRAPS) {
 216   assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
 217   assert(!is_optimized(), "cannot set an optimized virtual call to megamorphic");
 218   assert(is_call_to_compiled() || is_call_to_interpreted(), "going directly to megamorphic?");
 219 
 220   address entry;
 221   if (call_info->call_kind() == CallInfo::itable_call) {
 222     assert(bytecode == Bytecodes::_invokeinterface, "");
 223     int itable_index = call_info->itable_index();
 224     entry = VtableStubs::find_itable_stub(itable_index);
 225     if (entry == false) {
 226       return false;
 227     }
 228 #ifdef ASSERT
 229     int index = call_info->resolved_method()->itable_index();
 230     assert(index == itable_index, "CallInfo pre-computes this");
 231     InstanceKlass* k = call_info->resolved_method()->method_holder();
 232     assert(k->verify_itable_index(itable_index), "sanity check");
 233 #endif //ASSERT
 234     CompiledICHolder* holder = new CompiledICHolder(call_info->resolved_method()->method_holder(),
 235                                                     call_info->resolved_klass()());
 236     holder->claim();
 237     InlineCacheBuffer::create_transition_stub(this, holder, entry);
 238   } else {
 239     assert(call_info->call_kind() == CallInfo::vtable_call, "either itable or vtable");
 240     // Can be different than selected_method->vtable_index(), due to package-private etc.
 241     int vtable_index = call_info->vtable_index();
 242     assert(call_info->resolved_klass()->verify_vtable_index(vtable_index), "sanity check");
 243     entry = VtableStubs::find_vtable_stub(vtable_index);
 244     if (entry == NULL) {
 245       return false;
 246     }
 247     InlineCacheBuffer::create_transition_stub(this, NULL, entry);
 248   }
 249 
 250   if (TraceICs) {
 251     ResourceMark rm;
 252     tty->print_cr ("IC@" INTPTR_FORMAT ": to megamorphic %s entry: " INTPTR_FORMAT,
 253                    p2i(instruction_address()), call_info->selected_method()->print_value_string(), p2i(entry));
 254   }
 255 
 256   // We can't check this anymore. With lazy deopt we could have already
 257   // cleaned this IC entry before we even return. This is possible if
 258   // we ran out of space in the inline cache buffer trying to do the
 259   // set_next and we safepointed to free up space. This is a benign
 260   // race because the IC entry was complete when we safepointed so
 261   // cleaning it immediately is harmless.
 262   // assert(is_megamorphic(), "sanity check");
 263   return true;
 264 }
 265 
 266 
 267 // true if destination is megamorphic stub
 268 bool CompiledIC::is_megamorphic() const {
 269   assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
 270   assert(!is_optimized(), "an optimized call cannot be megamorphic");
 271 
 272   // Cannot rely on cached_value. It is either an interface or a method.
 273   return VtableStubs::is_entry_point(ic_destination());
 274 }
 275 
 276 bool CompiledIC::is_call_to_compiled() const {
 277   assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
 278 
 279   // Use unsafe, since an inline cache might point to a zombie method. However, the zombie
 280   // method is guaranteed to still exist, since we only remove methods after all inline caches
 281   // has been cleaned up
 282   CodeBlob* cb = CodeCache::find_blob_unsafe(ic_destination());
 283   bool is_monomorphic = (cb != NULL && cb->is_nmethod());
 284   // Check that the cached_value is a klass for non-optimized monomorphic calls
 285   // This assertion is invalid for compiler1: a call that does not look optimized (no static stub) can be used
 286   // for calling directly to vep without using the inline cache (i.e., cached_value == NULL)
 287 #ifdef ASSERT
 288   CodeBlob* caller = CodeCache::find_blob_unsafe(instruction_address());
 289   bool is_c1_method = caller->is_compiled_by_c1();
 290   assert( is_c1_method ||
 291          !is_monomorphic ||
 292          is_optimized() ||
 293          !caller->is_alive() ||


 517 #endif // COMPILER2
 518     if (is_optimized) {
 519       // Use stub entry
 520       info.set_interpreter_entry(method()->get_c2i_entry(), method());
 521     } else {
 522       // Use icholder entry
 523       CompiledICHolder* holder = new CompiledICHolder(method(), receiver_klass());
 524       info.set_icholder_entry(method()->get_c2i_unverified_entry(), holder);
 525     }
 526   }
 527   assert(info.is_optimized() == is_optimized, "must agree");
 528 }
 529 
 530 
 531 bool CompiledIC::is_icholder_entry(address entry) {
 532   CodeBlob* cb = CodeCache::find_blob_unsafe(entry);
 533   if (cb != NULL && cb->is_adapter_blob()) {
 534     return true;
 535   }
 536   // itable stubs also use CompiledICHolder
 537   if (VtableStubs::is_entry_point(entry) && VtableStubs::stub_containing(entry)->is_itable_stub()) {
 538     return true;

 539   }

 540   return false;
 541 }
 542 
 543 // ----------------------------------------------------------------------------
 544 
 545 void CompiledStaticCall::set_to_clean() {
 546   assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
 547   // Reset call site
 548   MutexLockerEx pl(SafepointSynchronize::is_at_safepoint() ? NULL : Patching_lock, Mutex::_no_safepoint_check_flag);
 549 #ifdef ASSERT
 550   CodeBlob* cb = CodeCache::find_blob_unsafe(this);
 551   assert(cb != NULL && cb->is_nmethod(), "must be nmethod");
 552 #endif
 553   set_destination_mt_safe(SharedRuntime::get_resolve_static_call_stub());
 554 
 555   // Do not reset stub here:  It is too expensive to call find_stub.
 556   // Instead, rely on caller (nmethod::clear_inline_caches) to clear
 557   // both the call and its stub.
 558 }
 559 




 215 bool CompiledIC::set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode, TRAPS) {
 216   assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
 217   assert(!is_optimized(), "cannot set an optimized virtual call to megamorphic");
 218   assert(is_call_to_compiled() || is_call_to_interpreted(), "going directly to megamorphic?");
 219 
 220   address entry;
 221   if (call_info->call_kind() == CallInfo::itable_call) {
 222     assert(bytecode == Bytecodes::_invokeinterface, "");
 223     int itable_index = call_info->itable_index();
 224     entry = VtableStubs::find_itable_stub(itable_index);
 225     if (entry == false) {
 226       return false;
 227     }
 228 #ifdef ASSERT
 229     int index = call_info->resolved_method()->itable_index();
 230     assert(index == itable_index, "CallInfo pre-computes this");
 231     InstanceKlass* k = call_info->resolved_method()->method_holder();
 232     assert(k->verify_itable_index(itable_index), "sanity check");
 233 #endif //ASSERT
 234     CompiledICHolder* holder = new CompiledICHolder(call_info->resolved_method()->method_holder(),
 235                                                     call_info->resolved_klass()(), false);
 236     holder->claim();
 237     InlineCacheBuffer::create_transition_stub(this, holder, entry);
 238   } else {
 239     assert(call_info->call_kind() == CallInfo::vtable_call, "either itable or vtable");
 240     // Can be different than selected_method->vtable_index(), due to package-private etc.
 241     int vtable_index = call_info->vtable_index();
 242     assert(call_info->resolved_klass()->verify_vtable_index(vtable_index), "sanity check");
 243     entry = VtableStubs::find_vtable_stub(vtable_index);
 244     if (entry == NULL) {
 245       return false;
 246     }
 247     InlineCacheBuffer::create_transition_stub(this, NULL, entry);
 248   }
 249 
 250   if (TraceICs) {
 251     ResourceMark rm;
 252     tty->print_cr ("IC@" INTPTR_FORMAT ": to megamorphic %s entry: " INTPTR_FORMAT,
 253                    p2i(instruction_address()), call_info->selected_method()->print_value_string(), p2i(entry));
 254   }
 255 
 256   // We can't check this anymore. With lazy deopt we could have already
 257   // cleaned this IC entry before we even return. This is possible if
 258   // we ran out of space in the inline cache buffer trying to do the
 259   // set_next and we safepointed to free up space. This is a benign
 260   // race because the IC entry was complete when we safepointed so
 261   // cleaning it immediately is harmless.
 262   // assert(is_megamorphic(), "sanity check");
 263   return true;
 264 }
 265 
 266 
 267 // true if destination is megamorphic stub
 268 bool CompiledIC::is_megamorphic() const {
 269   assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
 270   assert(!is_optimized(), "an optimized call cannot be megamorphic");
 271 
 272   // Cannot rely on cached_value. It is either an interface or a method.
 273   return VtableStubs::entry_point(ic_destination()) != NULL;
 274 }
 275 
 276 bool CompiledIC::is_call_to_compiled() const {
 277   assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
 278 
 279   // Use unsafe, since an inline cache might point to a zombie method. However, the zombie
 280   // method is guaranteed to still exist, since we only remove methods after all inline caches
 281   // has been cleaned up
 282   CodeBlob* cb = CodeCache::find_blob_unsafe(ic_destination());
 283   bool is_monomorphic = (cb != NULL && cb->is_nmethod());
 284   // Check that the cached_value is a klass for non-optimized monomorphic calls
 285   // This assertion is invalid for compiler1: a call that does not look optimized (no static stub) can be used
 286   // for calling directly to vep without using the inline cache (i.e., cached_value == NULL)
 287 #ifdef ASSERT
 288   CodeBlob* caller = CodeCache::find_blob_unsafe(instruction_address());
 289   bool is_c1_method = caller->is_compiled_by_c1();
 290   assert( is_c1_method ||
 291          !is_monomorphic ||
 292          is_optimized() ||
 293          !caller->is_alive() ||


 517 #endif // COMPILER2
 518     if (is_optimized) {
 519       // Use stub entry
 520       info.set_interpreter_entry(method()->get_c2i_entry(), method());
 521     } else {
 522       // Use icholder entry
 523       CompiledICHolder* holder = new CompiledICHolder(method(), receiver_klass());
 524       info.set_icholder_entry(method()->get_c2i_unverified_entry(), holder);
 525     }
 526   }
 527   assert(info.is_optimized() == is_optimized, "must agree");
 528 }
 529 
 530 
 531 bool CompiledIC::is_icholder_entry(address entry) {
 532   CodeBlob* cb = CodeCache::find_blob_unsafe(entry);
 533   if (cb != NULL && cb->is_adapter_blob()) {
 534     return true;
 535   }
 536   // itable stubs also use CompiledICHolder
 537   if (cb != NULL && cb->is_vtable_blob()) {
 538     VtableStub* s = VtableStubs::entry_point(entry); 
 539     return (s != NULL) && s->is_itable_stub(); 
 540   }
 541 
 542   return false;
 543 }
 544 
 545 // ----------------------------------------------------------------------------
 546 
 547 void CompiledStaticCall::set_to_clean() {
 548   assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
 549   // Reset call site
 550   MutexLockerEx pl(SafepointSynchronize::is_at_safepoint() ? NULL : Patching_lock, Mutex::_no_safepoint_check_flag);
 551 #ifdef ASSERT
 552   CodeBlob* cb = CodeCache::find_blob_unsafe(this);
 553   assert(cb != NULL && cb->is_nmethod(), "must be nmethod");
 554 #endif
 555   set_destination_mt_safe(SharedRuntime::get_resolve_static_call_stub());
 556 
 557   // Do not reset stub here:  It is too expensive to call find_stub.
 558   // Instead, rely on caller (nmethod::clear_inline_caches) to clear
 559   // both the call and its stub.
 560 }
 561