src/share/vm/code/compiledIC.cpp

Print this page
rev 4449 : 8003853: specify offset of IC load in java_to_interp stub

If a compiled static call calls the interpreter, it jumps past a
java_to_interp stub in the compiled code. Patching this call must
find the load of the IC. So far the shared code assumed this is the
first instruction in the stub.
This might not be the case if, for example, the base of the constant
table must be loaded as it happens on PPC.  The position of the IC
load is platform dependent, but used in shared code.

To fix this, this change cleans up the code handling compiled static
Java calls:
 - Methods using native instructions are moved from compiledIC.cpp
   to compiledIC_<cpu>.cpp
   With this the offset can be implemented platform dependent.
 - Methods emitting the stub moved from the ad file to this class.
   As a side effect this reduces redundancies in x86_64.ad and x86_32.ad.
 - We get rid of extern declarations in output.cpp.

Now all code concerning CompiledStaticCalls is collected on one class,
except for emitting the call itself.  The PPC port needs not change
shared code any more to implement them.

@@ -43,29 +43,10 @@
 
 
 // Every time a compiled IC is changed or its type is being accessed,
 // either the CompiledIC_lock must be set or we must be at a safe point.
 
-
-// Release the CompiledICHolder* associated with this call site is there is one.
-void CompiledIC::cleanup_call_site(virtual_call_Relocation* call_site) {
-  // This call site might have become stale so inspect it carefully.
-  NativeCall* call = nativeCall_at(call_site->addr());
-  if (is_icholder_entry(call->destination())) {
-    NativeMovConstReg* value = nativeMovConstReg_at(call_site->cached_value());
-    InlineCacheBuffer::queue_for_release((CompiledICHolder*)value->data());
-  }
-}
-
-
-bool CompiledIC::is_icholder_call_site(virtual_call_Relocation* call_site) {
-  // This call site might have become stale so inspect it carefully.
-  NativeCall* call = nativeCall_at(call_site->addr());
-  return is_icholder_entry(call->destination());
-}
-
-
 //-----------------------------------------------------------------------------
 // Low-level access to an inline cache. Private, since they might not be
 // MT-safe to use.
 
 void* CompiledIC::cached_value() const {

@@ -486,37 +467,10 @@
 bool CompiledIC::is_icholder_entry(address entry) {
   CodeBlob* cb = CodeCache::find_blob_unsafe(entry);
   return (cb != NULL && cb->is_adapter_blob());
 }
 
-
-CompiledIC::CompiledIC(nmethod* nm, NativeCall* call)
-  : _ic_call(call)
-{
-  address ic_call = call->instruction_address();
-
-  assert(ic_call != NULL, "ic_call address must be set");
-  assert(nm != NULL, "must pass nmethod");
-  assert(nm->contains(ic_call),   "must be in nmethod");
-
-  // search for the ic_call at the given address
-  RelocIterator iter(nm, ic_call, ic_call+1);
-  bool ret = iter.next();
-  assert(ret == true, "relocInfo must exist at this address");
-  assert(iter.addr() == ic_call, "must find ic_call");
-  if (iter.type() == relocInfo::virtual_call_type) {
-    virtual_call_Relocation* r = iter.virtual_call_reloc();
-    _is_optimized = false;
-    _value = nativeMovConstReg_at(r->cached_value());
-  } else {
-    assert(iter.type() == relocInfo::opt_virtual_call_type, "must be a virtual call");
-    _is_optimized = true;
-    _value = NULL;
-}
-}
-
-
 // ----------------------------------------------------------------------------
 
 void CompiledStaticCall::set_to_clean() {
   assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
   // Reset call site

@@ -547,37 +501,10 @@
   // must be in the stub part of the nmethod that contains the call
   nmethod* nm = CodeCache::find_nmethod(instruction_address());
   return nm->stub_contains(destination());
 }
 
-
-void CompiledStaticCall::set_to_interpreted(methodHandle callee, address entry) {
-  address stub=find_stub();
-  guarantee(stub != NULL, "stub not found");
-
-  if (TraceICs) {
-    ResourceMark rm;
-    tty->print_cr("CompiledStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s",
-                  instruction_address(),
-                  callee->name_and_sig_as_C_string());
-  }
-
-  NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);   // creation also verifies the object
-  NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
-
-  assert(method_holder->data()    == 0           || method_holder->data()    == (intptr_t)callee(), "a) MT-unsafe modification of inline cache");
-  assert(jump->jump_destination() == (address)-1 || jump->jump_destination() == entry, "b) MT-unsafe modification of inline cache");
-
-  // Update stub
-  method_holder->set_data((intptr_t)callee());
-  jump->set_jump_destination(entry);
-
-  // Update jump to call
-  set_destination_mt_safe(stub);
-}
-
-
 void CompiledStaticCall::set(const StaticCallInfo& info) {
   assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
   MutexLockerEx pl(Patching_lock, Mutex::_no_safepoint_check_flag);
   // Updating a cache to the wrong entry can cause bugs that are very hard
   // to track down - if cache entry gets invalid - we just clean it. In

@@ -616,23 +543,10 @@
     info._to_interpreter = true;
     info._entry      = m()->get_c2i_entry();
   }
 }
 
-
-void CompiledStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
-  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
-  // Reset stub
-  address stub = static_stub->addr();
-  assert(stub!=NULL, "stub not found");
-  NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);   // creation also verifies the object
-  NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
-  method_holder->set_data(0);
-  jump->set_jump_destination((address)-1);
-}
-
-
 address CompiledStaticCall::find_stub() {
   // Find reloc. information containing this call-site
   RelocIterator iter((nmethod*)NULL, instruction_address());
   while (iter.next()) {
     if (iter.addr() == instruction_address()) {

@@ -666,23 +580,20 @@
   }
   assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted()
           || is_optimized() || is_megamorphic(), "sanity check");
 }
 
-
 void CompiledIC::print() {
   print_compiled_ic();
   tty->cr();
 }
 
-
 void CompiledIC::print_compiled_ic() {
   tty->print("Inline cache at " INTPTR_FORMAT ", calling %s " INTPTR_FORMAT " cached_value " INTPTR_FORMAT,
              instruction_address(), is_call_to_interpreted() ? "interpreted " : "", ic_destination(), is_optimized() ? NULL : cached_value());
 }
 
-
 void CompiledStaticCall::print() {
   tty->print("static call at " INTPTR_FORMAT " -> ", instruction_address());
   if (is_clean()) {
     tty->print("clean");
   } else if (is_call_to_compiled()) {

@@ -691,23 +602,6 @@
     tty->print("interpreted");
   }
   tty->cr();
 }
 
-void CompiledStaticCall::verify() {
-  // Verify call
-  NativeCall::verify();
-  if (os::is_MP()) {
-    verify_alignment();
-  }
-
-  // Verify stub
-  address stub = find_stub();
-  assert(stub != NULL, "no stub found for static call");
-  NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);   // creation also verifies the object
-  NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
-
-  // Verify state
-  assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
-}
-
-#endif
+#endif // !PRODUCT