src/hotspot/share/code/compiledIC.cpp

Print this page




 218 bool CompiledIC::set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode, TRAPS) {
 219   assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
 220   assert(!is_optimized(), "cannot set an optimized virtual call to megamorphic");
 221   assert(is_call_to_compiled() || is_call_to_interpreted(), "going directly to megamorphic?");
 222 
 223   address entry;
 224   if (call_info->call_kind() == CallInfo::itable_call) {
 225     assert(bytecode == Bytecodes::_invokeinterface, "");
 226     int itable_index = call_info->itable_index();
 227     entry = VtableStubs::find_itable_stub(itable_index);
 228     if (entry == NULL) {
 229       return false;
 230     }
 231 #ifdef ASSERT
 232     int index = call_info->resolved_method()->itable_index();
 233     assert(index == itable_index, "CallInfo pre-computes this");
 234     InstanceKlass* k = call_info->resolved_method()->method_holder();
 235     assert(k->verify_itable_index(itable_index), "sanity check");
 236 #endif //ASSERT
 237     CompiledICHolder* holder = new CompiledICHolder(call_info->resolved_method()->method_holder(),
 238                                                     call_info->resolved_klass());
 239     holder->claim();
 240     InlineCacheBuffer::create_transition_stub(this, holder, entry);
 241   } else {
 242     assert(call_info->call_kind() == CallInfo::vtable_call, "either itable or vtable");
 243     // Can be different than selected_method->vtable_index(), due to package-private etc.
 244     int vtable_index = call_info->vtable_index();
 245     assert(call_info->resolved_klass()->verify_vtable_index(vtable_index), "sanity check");
 246     entry = VtableStubs::find_vtable_stub(vtable_index);
 247     if (entry == NULL) {
 248       return false;
 249     }
 250     InlineCacheBuffer::create_transition_stub(this, NULL, entry);
 251   }
 252 
 253   if (TraceICs) {
 254     ResourceMark rm;
 255     tty->print_cr ("IC@" INTPTR_FORMAT ": to megamorphic %s entry: " INTPTR_FORMAT,
 256                    p2i(instruction_address()), call_info->selected_method()->print_value_string(), p2i(entry));
 257   }
 258 
 259   // We can't check this anymore. With lazy deopt we could have already
 260   // cleaned this IC entry before we even return. This is possible if
 261   // we ran out of space in the inline cache buffer trying to do the
 262   // set_next and we safepointed to free up space. This is a benign
 263   // race because the IC entry was complete when we safepointed so
 264   // cleaning it immediately is harmless.
 265   // assert(is_megamorphic(), "sanity check");
 266   return true;
 267 }
 268 
 269 
 270 // true if destination is megamorphic stub
 271 bool CompiledIC::is_megamorphic() const {
 272   assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
 273   assert(!is_optimized(), "an optimized call cannot be megamorphic");
 274 
 275   // Cannot rely on cached_value. It is either an interface or a method.
 276   return VtableStubs::is_entry_point(ic_destination());
 277 }
 278 
 279 bool CompiledIC::is_call_to_compiled() const {
 280   assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
 281 
 282   // Use unsafe, since an inline cache might point to a zombie method. However, the zombie
 283   // method is guaranteed to still exist, since we only remove methods after all inline caches
 284   // has been cleaned up
 285   CodeBlob* cb = CodeCache::find_blob_unsafe(ic_destination());
 286   bool is_monomorphic = (cb != NULL && cb->is_compiled());
 287   // Check that the cached_value is a klass for non-optimized monomorphic calls
 288   // This assertion is invalid for compiler1: a call that does not look optimized (no static stub) can be used
 289   // for calling directly to vep without using the inline cache (i.e., cached_value == NULL).
 290   // For JVMCI this occurs because CHA is only used to improve inlining so call sites which could be optimized
 291   // virtuals because there are no currently loaded subclasses of a type are left as virtual call sites.
 292 #ifdef ASSERT
 293   CodeBlob* caller = CodeCache::find_blob_unsafe(instruction_address());
 294   bool is_c1_or_jvmci_method = caller->is_compiled_by_c1() || caller->is_compiled_by_jvmci();
 295   assert( is_c1_or_jvmci_method ||
 296          !is_monomorphic ||


 508         // Use stub entry
 509         info.set_interpreter_entry(method()->get_c2i_entry(), method());
 510       }
 511     } else {
 512       // Use icholder entry
 513       assert(method_code == NULL || method_code->is_compiled(), "must be compiled");
 514       CompiledICHolder* holder = new CompiledICHolder(method(), receiver_klass);
 515       info.set_icholder_entry(method()->get_c2i_unverified_entry(), holder);
 516     }
 517   }
 518   assert(info.is_optimized() == is_optimized, "must agree");
 519 }
 520 
 521 
 522 bool CompiledIC::is_icholder_entry(address entry) {
 523   CodeBlob* cb = CodeCache::find_blob_unsafe(entry);
 524   if (cb != NULL && cb->is_adapter_blob()) {
 525     return true;
 526   }
 527   // itable stubs also use CompiledICHolder
 528   if (VtableStubs::is_entry_point(entry) && VtableStubs::stub_containing(entry)->is_itable_stub()) {
 529     return true;

 530   }

 531   return false;
 532 }
 533 
 534 bool CompiledIC::is_icholder_call_site(virtual_call_Relocation* call_site, const CompiledMethod* cm) {
 535   // This call site might have become stale so inspect it carefully.
 536   address dest = cm->call_wrapper_at(call_site->addr())->destination();
 537   return is_icholder_entry(dest);
 538 }
 539 
 540 // Release the CompiledICHolder* associated with this call site is there is one.
 541 void CompiledIC::cleanup_call_site(virtual_call_Relocation* call_site, const CompiledMethod* cm) {
 542   assert(cm->is_nmethod(), "must be nmethod");
 543   // This call site might have become stale so inspect it carefully.
 544   NativeCall* call = nativeCall_at(call_site->addr());
 545   if (is_icholder_entry(call->destination())) {
 546     NativeMovConstReg* value = nativeMovConstReg_at(call_site->cached_value());
 547     InlineCacheBuffer::queue_for_release((CompiledICHolder*)value->data());
 548   }
 549 }
 550 




 218 bool CompiledIC::set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode, TRAPS) {
 219   assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
 220   assert(!is_optimized(), "cannot set an optimized virtual call to megamorphic");
 221   assert(is_call_to_compiled() || is_call_to_interpreted(), "going directly to megamorphic?");
 222 
 223   address entry;
 224   if (call_info->call_kind() == CallInfo::itable_call) {
 225     assert(bytecode == Bytecodes::_invokeinterface, "");
 226     int itable_index = call_info->itable_index();
 227     entry = VtableStubs::find_itable_stub(itable_index);
 228     if (entry == NULL) {
 229       return false;
 230     }
 231 #ifdef ASSERT
 232     int index = call_info->resolved_method()->itable_index();
 233     assert(index == itable_index, "CallInfo pre-computes this");
 234     InstanceKlass* k = call_info->resolved_method()->method_holder();
 235     assert(k->verify_itable_index(itable_index), "sanity check");
 236 #endif //ASSERT
 237     CompiledICHolder* holder = new CompiledICHolder(call_info->resolved_method()->method_holder(),
 238                                                     call_info->resolved_klass(), false);
 239     holder->claim();
 240     InlineCacheBuffer::create_transition_stub(this, holder, entry);
 241   } else {
 242     assert(call_info->call_kind() == CallInfo::vtable_call, "either itable or vtable");
 243     // Can be different than selected_method->vtable_index(), due to package-private etc.
 244     int vtable_index = call_info->vtable_index();
 245     assert(call_info->resolved_klass()->verify_vtable_index(vtable_index), "sanity check");
 246     entry = VtableStubs::find_vtable_stub(vtable_index);
 247     if (entry == NULL) {
 248       return false;
 249     }
 250     InlineCacheBuffer::create_transition_stub(this, NULL, entry);
 251   }
 252 
 253   if (TraceICs) {
 254     ResourceMark rm;
 255     tty->print_cr ("IC@" INTPTR_FORMAT ": to megamorphic %s entry: " INTPTR_FORMAT,
 256                    p2i(instruction_address()), call_info->selected_method()->print_value_string(), p2i(entry));
 257   }
 258 
 259   // We can't check this anymore. With lazy deopt we could have already
 260   // cleaned this IC entry before we even return. This is possible if
 261   // we ran out of space in the inline cache buffer trying to do the
 262   // set_next and we safepointed to free up space. This is a benign
 263   // race because the IC entry was complete when we safepointed so
 264   // cleaning it immediately is harmless.
 265   // assert(is_megamorphic(), "sanity check");
 266   return true;
 267 }
 268 
 269 
 270 // true if destination is megamorphic stub
 271 bool CompiledIC::is_megamorphic() const {
 272   assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
 273   assert(!is_optimized(), "an optimized call cannot be megamorphic");
 274 
 275   // Cannot rely on cached_value. It is either an interface or a method.
 276   return VtableStubs::entry_point(ic_destination()) != NULL;
 277 }
 278 
 279 bool CompiledIC::is_call_to_compiled() const {
 280   assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
 281 
 282   // Use unsafe, since an inline cache might point to a zombie method. However, the zombie
 283   // method is guaranteed to still exist, since we only remove methods after all inline caches
 284   // has been cleaned up
 285   CodeBlob* cb = CodeCache::find_blob_unsafe(ic_destination());
 286   bool is_monomorphic = (cb != NULL && cb->is_compiled());
 287   // Check that the cached_value is a klass for non-optimized monomorphic calls
 288   // This assertion is invalid for compiler1: a call that does not look optimized (no static stub) can be used
 289   // for calling directly to vep without using the inline cache (i.e., cached_value == NULL).
 290   // For JVMCI this occurs because CHA is only used to improve inlining so call sites which could be optimized
 291   // virtuals because there are no currently loaded subclasses of a type are left as virtual call sites.
 292 #ifdef ASSERT
 293   CodeBlob* caller = CodeCache::find_blob_unsafe(instruction_address());
 294   bool is_c1_or_jvmci_method = caller->is_compiled_by_c1() || caller->is_compiled_by_jvmci();
 295   assert( is_c1_or_jvmci_method ||
 296          !is_monomorphic ||


 508         // Use stub entry
 509         info.set_interpreter_entry(method()->get_c2i_entry(), method());
 510       }
 511     } else {
 512       // Use icholder entry
 513       assert(method_code == NULL || method_code->is_compiled(), "must be compiled");
 514       CompiledICHolder* holder = new CompiledICHolder(method(), receiver_klass);
 515       info.set_icholder_entry(method()->get_c2i_unverified_entry(), holder);
 516     }
 517   }
 518   assert(info.is_optimized() == is_optimized, "must agree");
 519 }
 520 
 521 
 522 bool CompiledIC::is_icholder_entry(address entry) {
 523   CodeBlob* cb = CodeCache::find_blob_unsafe(entry);
 524   if (cb != NULL && cb->is_adapter_blob()) {
 525     return true;
 526   }
 527   // itable stubs also use CompiledICHolder
 528   if (cb != NULL && cb->is_vtable_blob()) {
 529     VtableStub* s = VtableStubs::entry_point(entry); 
 530     return (s != NULL) && s->is_itable_stub(); 
 531   }
 532 
 533   return false;
 534 }
 535 
 536 bool CompiledIC::is_icholder_call_site(virtual_call_Relocation* call_site, const CompiledMethod* cm) {
 537   // This call site might have become stale so inspect it carefully.
 538   address dest = cm->call_wrapper_at(call_site->addr())->destination();
 539   return is_icholder_entry(dest);
 540 }
 541 
 542 // Release the CompiledICHolder* associated with this call site is there is one.
 543 void CompiledIC::cleanup_call_site(virtual_call_Relocation* call_site, const CompiledMethod* cm) {
 544   assert(cm->is_nmethod(), "must be nmethod");
 545   // This call site might have become stale so inspect it carefully.
 546   NativeCall* call = nativeCall_at(call_site->addr());
 547   if (is_icholder_entry(call->destination())) {
 548     NativeMovConstReg* value = nativeMovConstReg_at(call_site->cached_value());
 549     InlineCacheBuffer::queue_for_release((CompiledICHolder*)value->data());
 550   }
 551 }
 552