src/share/vm/code/compiledIC.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File hotspot Sdiff src/share/vm/code

src/share/vm/code/compiledIC.cpp

Print this page




 263 
 264 // true if destination is megamorphic stub
 265 bool CompiledIC::is_megamorphic() const {
 266   assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
 267   assert(!is_optimized(), "an optimized call cannot be megamorphic");
 268 
 269   // Cannot rely on cached_value. It is either an interface or a method.
 270   return VtableStubs::is_entry_point(ic_destination());
 271 }
 272 
 273 bool CompiledIC::is_call_to_compiled() const {
 274   assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
 275 
 276   // Use unsafe, since an inline cache might point to a zombie method. However, the zombie
 277   // method is guaranteed to still exist, since we only remove methods after all inline caches
 278   // has been cleaned up
 279   CodeBlob* cb = CodeCache::find_blob_unsafe(ic_destination());
 280   bool is_monomorphic = (cb != NULL && cb->is_nmethod());
 281   // Check that the cached_value is a klass for non-optimized monomorphic calls
 282   // This assertion is invalid for compiler1: a call that does not look optimized (no static stub) can be used
 283   // for calling directly to vep without using the inline cache (i.e., cached_value == NULL)


 284 #ifdef ASSERT
 285   CodeBlob* caller = CodeCache::find_blob_unsafe(instruction_address());
 286   bool is_c1_method = caller->is_compiled_by_c1();
 287   assert( is_c1_method ||
 288          !is_monomorphic ||
 289          is_optimized() ||
 290          (cached_metadata() != NULL && cached_metadata()->is_klass()), "sanity check");
 291 #endif // ASSERT
 292   return is_monomorphic;
 293 }
 294 
 295 
 296 bool CompiledIC::is_call_to_interpreted() const {
 297   assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
 298   // Call to interpreter if destination is either calling to a stub (if it
 299   // is optimized), or calling to an I2C blob
 300   bool is_call_to_interpreted = false;
 301   if (!is_optimized()) {
 302     // must use unsafe because the destination can be a zombie (and we're cleaning)
 303     // and the print_compiled_ic code wants to know if site (in the non-zombie)
 304     // is to the interpreter.
 305     CodeBlob* cb = CodeCache::find_blob_unsafe(ic_destination());
 306     is_call_to_interpreted = (cb != NULL && cb->is_adapter_blob());
 307     assert(!is_call_to_interpreted || (is_icholder_call() && cached_icholder() != NULL), "sanity check");




 263 
 264 // true if destination is megamorphic stub
 265 bool CompiledIC::is_megamorphic() const {
 266   assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
 267   assert(!is_optimized(), "an optimized call cannot be megamorphic");
 268 
 269   // Cannot rely on cached_value. It is either an interface or a method.
 270   return VtableStubs::is_entry_point(ic_destination());
 271 }
 272 
 273 bool CompiledIC::is_call_to_compiled() const {
 274   assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
 275 
 276   // Use unsafe, since an inline cache might point to a zombie method. However, the zombie
 277   // method is guaranteed to still exist, since we only remove methods after all inline caches
 278   // has been cleaned up
 279   CodeBlob* cb = CodeCache::find_blob_unsafe(ic_destination());
 280   bool is_monomorphic = (cb != NULL && cb->is_nmethod());
 281   // Check that the cached_value is a klass for non-optimized monomorphic calls
 282   // This assertion is invalid for compiler1: a call that does not look optimized (no static stub) can be used
 283   // for calling directly to vep without using the inline cache (i.e., cached_value == NULL).
 284   // For JVMCI this occurs because CHA is only used to improve inlining so call sites which could be optimized
 285   // virtuals because there are no currently loaded subclasses of a type are left as virtual call sites.
 286 #ifdef ASSERT
 287   CodeBlob* caller = CodeCache::find_blob_unsafe(instruction_address());
 288   bool is_c1_or_jvmci_method = caller->is_compiled_by_c1() || caller->is_compiled_by_jvmci();
 289   assert( is_c1_or_jvmci_method ||
 290          !is_monomorphic ||
 291          is_optimized() ||
 292          (cached_metadata() != NULL && cached_metadata()->is_klass()), "sanity check");
 293 #endif // ASSERT
 294   return is_monomorphic;
 295 }
 296 
 297 
 298 bool CompiledIC::is_call_to_interpreted() const {
 299   assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
 300   // Call to interpreter if destination is either calling to a stub (if it
 301   // is optimized), or calling to an I2C blob
 302   bool is_call_to_interpreted = false;
 303   if (!is_optimized()) {
 304     // must use unsafe because the destination can be a zombie (and we're cleaning)
 305     // and the print_compiled_ic code wants to know if site (in the non-zombie)
 306     // is to the interpreter.
 307     CodeBlob* cb = CodeCache::find_blob_unsafe(ic_destination());
 308     is_call_to_interpreted = (cb != NULL && cb->is_adapter_blob());
 309     assert(!is_call_to_interpreted || (is_icholder_call() && cached_icholder() != NULL), "sanity check");


src/share/vm/code/compiledIC.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File