src/share/vm/code/nmethod.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File hotspot Sdiff src/share/vm/code

src/share/vm/code/nmethod.cpp

Print this page




 383     stub_size()          +
 384     scopes_data_size()   +
 385     scopes_pcs_size()    +
 386     handler_table_size() +
 387     nul_chk_table_size();
 388 }
 389 
 390 const char* nmethod::compile_kind() const {
 391   if (is_osr_method())     return "osr";
 392   if (method() != NULL && is_native_method())  return "c2n";
 393   return NULL;
 394 }
 395 
 396 // Fill in default values for various flag fields
 397 void nmethod::init_defaults() {
 398   _state                      = in_use;
 399   _has_flushed_dependencies   = 0;
 400   _lock_count                 = 0;
 401   _stack_traversal_mark       = 0;
 402   _unload_reported            = false; // jvmti state

 403 
 404 #ifdef ASSERT
 405   _oops_are_stale             = false;
 406 #endif
 407 
 408   _oops_do_mark_link       = NULL;
 409   _jmethod_id              = NULL;
 410   _osr_link                = NULL;
 411   if (UseG1GC) {
 412     _unloading_next        = NULL;
 413   } else {
 414     _scavenge_root_link    = NULL;
 415   }
 416   _scavenge_root_state     = 0;
 417 #if INCLUDE_RTM_OPT
 418   _rtm_state               = NoRTM;
 419 #endif
 420 #if INCLUDE_JVMCI
 421   _jvmci_installed_code   = NULL;
 422   _speculation_log        = NULL;


2037   // make sure you didn't forget to adjust the filler fields
2038   assert(sizeof(nmethod) % oopSize == 0, "nmethod size must be multiple of a word");
2039 }
2040 
2041 
2042 //-------------------------------------------------------------------------------------------
2043 
2044 
2045 // QQQ might we make this work from a frame??
2046 nmethodLocker::nmethodLocker(address pc) {
2047   CodeBlob* cb = CodeCache::find_blob(pc);
2048   guarantee(cb != NULL && cb->is_compiled(), "bad pc for a nmethod found");
2049   _nm = cb->as_compiled_method();
2050   lock_nmethod(_nm);
2051 }
2052 
2053 // Only JvmtiDeferredEvent::compiled_method_unload_event()
2054 // should pass zombie_ok == true.
2055 void nmethodLocker::lock_nmethod(CompiledMethod* cm, bool zombie_ok) {
2056   if (cm == NULL)  return;

2057   nmethod* nm = cm->as_nmethod();
2058   Atomic::inc(&nm->_lock_count);
2059   assert(zombie_ok || !nm->is_zombie(), "cannot lock a zombie method");
2060 }
2061 
2062 void nmethodLocker::unlock_nmethod(CompiledMethod* cm) {
2063   if (cm == NULL)  return;

2064   nmethod* nm = cm->as_nmethod();
2065   Atomic::dec(&nm->_lock_count);
2066   assert(nm->_lock_count >= 0, "unmatched nmethod lock/unlock");
2067 }
2068 
2069 
2070 // -----------------------------------------------------------------------------
2071 // Verification
2072 
2073 class VerifyOopsClosure: public OopClosure {
2074   nmethod* _nm;
2075   bool     _ok;
2076 public:
2077   VerifyOopsClosure(nmethod* nm) : _nm(nm), _ok(true) { }
2078   bool ok() { return _ok; }
2079   virtual void do_oop(oop* p) {
2080     if ((*p) == NULL || (*p)->is_oop())  return;
2081     if (_ok) {
2082       _nm->print_nmethod(true);
2083       _ok = false;


2153                                      pd->obj_decode_offset(), pd->should_reexecute(), pd->rethrow_exception(),
2154                                      pd->return_oop());
2155        !sd->is_top(); sd = sd->sender()) {
2156     sd->verify();
2157   }
2158 }
2159 
2160 void nmethod::verify_scopes() {
2161   if( !method() ) return;       // Runtime stubs have no scope
2162   if (method()->is_native()) return; // Ignore stub methods.
2163   // iterate through all interrupt point
2164   // and verify the debug information is valid.
2165   RelocIterator iter((nmethod*)this);
2166   while (iter.next()) {
2167     address stub = NULL;
2168     switch (iter.type()) {
2169       case relocInfo::virtual_call_type:
2170         verify_interrupt_point(iter.addr());
2171         break;
2172       case relocInfo::opt_virtual_call_type:
2173         stub = iter.opt_virtual_call_reloc()->static_stub();
2174         verify_interrupt_point(iter.addr());
2175         break;
2176       case relocInfo::static_call_type:
2177         stub = iter.static_call_reloc()->static_stub();
2178         //verify_interrupt_point(iter.addr());
2179         break;
2180       case relocInfo::runtime_call_type:
2181       case relocInfo::runtime_call_w_cp_type:
2182         address destination = iter.reloc()->value();
2183         // Right now there is no way to find out which entries support
2184         // an interrupt point.  It would be nice if we had this
2185         // information in a table.
2186         break;
2187     }
2188     assert(stub == NULL || stub_contains(stub), "static call stub outside stub section");
2189   }
2190 }
2191 
2192 
2193 // -----------------------------------------------------------------------------
2194 // Non-product code
2195 #ifndef PRODUCT
2196 
2197 class DebugScavengeRoot: public OopClosure {


2707       }
2708       st->cr();
2709     }
2710   }
2711 
2712   // Print relocation information
2713   const char* str = reloc_string_for(begin, end);
2714   if (str != NULL) {
2715     if (sd != NULL) st->cr();
2716     st->move_to(column);
2717     st->print(";   {%s}", str);
2718   }
2719   int cont_offset = ImplicitExceptionTable(this).at(begin - code_begin());
2720   if (cont_offset != 0) {
2721     st->move_to(column);
2722     st->print("; implicit exception: dispatches to " INTPTR_FORMAT, p2i(code_begin() + cont_offset));
2723   }
2724 
2725 }
2726 












































































































2727 #ifndef PRODUCT
2728 
2729 void nmethod::print_value_on(outputStream* st) const {
2730   st->print("nmethod");
2731   print_on(st, NULL);
2732 }
2733 
2734 void nmethod::print_calls(outputStream* st) {
2735   RelocIterator iter(this);
2736   while (iter.next()) {
2737     switch (iter.type()) {
2738     case relocInfo::virtual_call_type:
2739     case relocInfo::opt_virtual_call_type: {
2740       VerifyMutexLocker mc(CompiledIC_lock);
2741       CompiledIC_at(&iter)->print();
2742       break;
2743     }
2744     case relocInfo::static_call_type:
2745       st->print_cr("Static call at " INTPTR_FORMAT, p2i(iter.reloc()->addr()));
2746       compiledStaticCall_at(iter.reloc())->print();
2747       break;
2748     }
2749   }
2750 }
2751 
2752 void nmethod::print_handler_table() {
2753   ExceptionHandlerTable(this).print();
2754 }
2755 
2756 void nmethod::print_nul_chk_table() {
2757   ImplicitExceptionTable(this).print(code_begin());
2758 }
2759 
2760 void nmethod::print_statistics() {
2761   ttyLocker ttyl;
2762   if (xtty != NULL)  xtty->head("statistics type='nmethod'");
2763   native_nmethod_stats.print_native_nmethod_stats();
2764 #ifdef COMPILER1
2765   c1_java_nmethod_stats.print_nmethod_stats("C1");
2766 #endif




 383     stub_size()          +
 384     scopes_data_size()   +
 385     scopes_pcs_size()    +
 386     handler_table_size() +
 387     nul_chk_table_size();
 388 }
 389 
 390 const char* nmethod::compile_kind() const {
 391   if (is_osr_method())     return "osr";
 392   if (method() != NULL && is_native_method())  return "c2n";
 393   return NULL;
 394 }
 395 
 396 // Fill in default values for various flag fields
 397 void nmethod::init_defaults() {
 398   _state                      = in_use;
 399   _has_flushed_dependencies   = 0;
 400   _lock_count                 = 0;
 401   _stack_traversal_mark       = 0;
 402   _unload_reported            = false; // jvmti state
 403   _is_far_code                = false; // nmethods are located in CodeCache
 404 
 405 #ifdef ASSERT
 406   _oops_are_stale             = false;
 407 #endif
 408 
 409   _oops_do_mark_link       = NULL;
 410   _jmethod_id              = NULL;
 411   _osr_link                = NULL;
 412   if (UseG1GC) {
 413     _unloading_next        = NULL;
 414   } else {
 415     _scavenge_root_link    = NULL;
 416   }
 417   _scavenge_root_state     = 0;
 418 #if INCLUDE_RTM_OPT
 419   _rtm_state               = NoRTM;
 420 #endif
 421 #if INCLUDE_JVMCI
 422   _jvmci_installed_code   = NULL;
 423   _speculation_log        = NULL;


2038   // make sure you didn't forget to adjust the filler fields
2039   assert(sizeof(nmethod) % oopSize == 0, "nmethod size must be multiple of a word");
2040 }
2041 
2042 
2043 //-------------------------------------------------------------------------------------------
2044 
2045 
2046 // QQQ might we make this work from a frame??
2047 nmethodLocker::nmethodLocker(address pc) {
2048   CodeBlob* cb = CodeCache::find_blob(pc);
2049   guarantee(cb != NULL && cb->is_compiled(), "bad pc for a nmethod found");
2050   _nm = cb->as_compiled_method();
2051   lock_nmethod(_nm);
2052 }
2053 
2054 // Only JvmtiDeferredEvent::compiled_method_unload_event()
2055 // should pass zombie_ok == true.
2056 void nmethodLocker::lock_nmethod(CompiledMethod* cm, bool zombie_ok) {
2057   if (cm == NULL)  return;
2058   if (cm->is_aot()) return;  // FIXME: Revisit once _lock_count is added to aot_method
2059   nmethod* nm = cm->as_nmethod();
2060   Atomic::inc(&nm->_lock_count);
2061   assert(zombie_ok || !nm->is_zombie(), "cannot lock a zombie method");
2062 }
2063 
2064 void nmethodLocker::unlock_nmethod(CompiledMethod* cm) {
2065   if (cm == NULL)  return;
2066   if (cm->is_aot()) return;  // FIXME: Revisit once _lock_count is added to aot_method
2067   nmethod* nm = cm->as_nmethod();
2068   Atomic::dec(&nm->_lock_count);
2069   assert(nm->_lock_count >= 0, "unmatched nmethod lock/unlock");
2070 }
2071 
2072 
2073 // -----------------------------------------------------------------------------
2074 // Verification
2075 
2076 class VerifyOopsClosure: public OopClosure {
2077   nmethod* _nm;
2078   bool     _ok;
2079 public:
2080   VerifyOopsClosure(nmethod* nm) : _nm(nm), _ok(true) { }
2081   bool ok() { return _ok; }
2082   virtual void do_oop(oop* p) {
2083     if ((*p) == NULL || (*p)->is_oop())  return;
2084     if (_ok) {
2085       _nm->print_nmethod(true);
2086       _ok = false;


2156                                      pd->obj_decode_offset(), pd->should_reexecute(), pd->rethrow_exception(),
2157                                      pd->return_oop());
2158        !sd->is_top(); sd = sd->sender()) {
2159     sd->verify();
2160   }
2161 }
2162 
2163 void nmethod::verify_scopes() {
2164   if( !method() ) return;       // Runtime stubs have no scope
2165   if (method()->is_native()) return; // Ignore stub methods.
2166   // iterate through all interrupt point
2167   // and verify the debug information is valid.
2168   RelocIterator iter((nmethod*)this);
2169   while (iter.next()) {
2170     address stub = NULL;
2171     switch (iter.type()) {
2172       case relocInfo::virtual_call_type:
2173         verify_interrupt_point(iter.addr());
2174         break;
2175       case relocInfo::opt_virtual_call_type:
2176         stub = iter.opt_virtual_call_reloc()->static_stub(false);
2177         verify_interrupt_point(iter.addr());
2178         break;
2179       case relocInfo::static_call_type:
2180         stub = iter.static_call_reloc()->static_stub(false);
2181         //verify_interrupt_point(iter.addr());
2182         break;
2183       case relocInfo::runtime_call_type:
2184       case relocInfo::runtime_call_w_cp_type:
2185         address destination = iter.reloc()->value();
2186         // Right now there is no way to find out which entries support
2187         // an interrupt point.  It would be nice if we had this
2188         // information in a table.
2189         break;
2190     }
2191     assert(stub == NULL || stub_contains(stub), "static call stub outside stub section");
2192   }
2193 }
2194 
2195 
2196 // -----------------------------------------------------------------------------
2197 // Non-product code
2198 #ifndef PRODUCT
2199 
2200 class DebugScavengeRoot: public OopClosure {


2710       }
2711       st->cr();
2712     }
2713   }
2714 
2715   // Print relocation information
2716   const char* str = reloc_string_for(begin, end);
2717   if (str != NULL) {
2718     if (sd != NULL) st->cr();
2719     st->move_to(column);
2720     st->print(";   {%s}", str);
2721   }
2722   int cont_offset = ImplicitExceptionTable(this).at(begin - code_begin());
2723   if (cont_offset != 0) {
2724     st->move_to(column);
2725     st->print("; implicit exception: dispatches to " INTPTR_FORMAT, p2i(code_begin() + cont_offset));
2726   }
2727 
2728 }
2729 
2730 class DirectNativeCallWrapper: public NativeCallWrapper {
2731 private:
2732   NativeCall* _call;
2733 
2734 public:
2735   DirectNativeCallWrapper(NativeCall* call) : _call(call) {}
2736 
2737   virtual address destination() const { return _call->destination(); }
2738   virtual address instruction_address() const { return _call->instruction_address(); }
2739   virtual address next_instruction_address() const { return _call->next_instruction_address(); }
2740   virtual address return_address() const { return _call->return_address(); }
2741 
2742   virtual address get_resolve_call_stub(bool is_optimized) const {
2743     if (is_optimized) {
2744       return SharedRuntime::get_resolve_opt_virtual_call_stub();
2745     }
2746     return SharedRuntime::get_resolve_virtual_call_stub();
2747   }
2748 
2749   virtual void set_destination_mt_safe(address dest) {
2750 #if INCLUDE_AOT
2751     if (UseAOT) {
2752       CodeBlob* callee = CodeCache::find_blob(dest);
2753       CompiledMethod* cm = callee->as_compiled_method_or_null();
2754       if (cm != NULL && cm->is_far_code()) {
2755         // Temporary fix, see JDK-8143106
2756         CompiledDirectStaticCall* csc = CompiledDirectStaticCall::at(instruction_address());
2757         csc->set_to_far(methodHandle(cm->method()), dest);
2758         return;
2759       }
2760     }
2761 #endif
2762     _call->set_destination_mt_safe(dest);
2763   }
2764 
2765   virtual void set_to_interpreted(const methodHandle& method, CompiledICInfo& info) {
2766     CompiledDirectStaticCall* csc = CompiledDirectStaticCall::at(instruction_address());
2767 #if INCLUDE_AOT
2768     if (info.to_aot()) {
2769       csc->set_to_far(method, info.entry());
2770     } else
2771 #endif
2772     {
2773       csc->set_to_interpreted(method, info.entry());
2774     }
2775   }
2776 
2777   virtual void verify() const {
2778     // make sure code pattern is actually a call imm32 instruction
2779     _call->verify();
2780     if (os::is_MP()) {
2781       _call->verify_alignment();
2782     }
2783   }
2784 
2785   virtual void verify_resolve_call(address dest) const {
2786     CodeBlob* db = CodeCache::find_blob_unsafe(dest);
2787     assert(!db->is_adapter_blob(), "must use stub!");
2788   }
2789 
2790   virtual bool is_call_to_interpreted(address dest) const {
2791     CodeBlob* cb = CodeCache::find_blob(_call->instruction_address());
2792     return cb->contains(dest);
2793   }
2794 
2795   virtual bool is_safe_for_patching() const { return false; }
2796 
2797   virtual NativeInstruction* get_load_instruction(virtual_call_Relocation* r) const {
2798     return nativeMovConstReg_at(r->cached_value());
2799   }
2800 
2801   virtual void *get_data(NativeInstruction* instruction) const {
2802     return (void*)((NativeMovConstReg*) instruction)->data();
2803   }
2804 
2805   virtual void set_data(NativeInstruction* instruction, intptr_t data) {
2806     ((NativeMovConstReg*) instruction)->set_data(data);
2807   }
2808 };
2809 
2810 NativeCallWrapper* nmethod::call_wrapper_at(address call) const {
2811   return new DirectNativeCallWrapper((NativeCall*) call);
2812 }
2813 
2814 NativeCallWrapper* nmethod::call_wrapper_before(address return_pc) const {
2815   return new DirectNativeCallWrapper(nativeCall_before(return_pc));
2816 }
2817 
2818 address nmethod::call_instruction_address(address pc) const {
2819   if (NativeCall::is_call_before(pc)) {
2820     NativeCall *ncall = nativeCall_before(pc);
2821     return ncall->instruction_address();
2822   }
2823   return NULL;
2824 }
2825 
2826 CompiledStaticCall* nmethod::compiledStaticCall_at(Relocation* call_site) const {
2827   return CompiledDirectStaticCall::at(call_site);
2828 }
2829 
2830 CompiledStaticCall* nmethod::compiledStaticCall_at(address call_site) const {
2831   return CompiledDirectStaticCall::at(call_site);
2832 }
2833 
2834 CompiledStaticCall* nmethod::compiledStaticCall_before(address return_addr) const {
2835   return CompiledDirectStaticCall::before(return_addr);
2836 }
2837 
2838 #ifndef PRODUCT
2839 
2840 void nmethod::print_value_on(outputStream* st) const {
2841   st->print("nmethod");
2842   print_on(st, NULL);
2843 }
2844 
2845 void nmethod::print_calls(outputStream* st) {
2846   RelocIterator iter(this);
2847   while (iter.next()) {
2848     switch (iter.type()) {
2849     case relocInfo::virtual_call_type:
2850     case relocInfo::opt_virtual_call_type: {
2851       VerifyMutexLocker mc(CompiledIC_lock);
2852       CompiledIC_at(&iter)->print();
2853       break;
2854     }
2855     case relocInfo::static_call_type:
2856       st->print_cr("Static call at " INTPTR_FORMAT, p2i(iter.reloc()->addr()));
2857       CompiledDirectStaticCall::at(iter.reloc())->print();
2858       break;
2859     }
2860   }
2861 }
2862 
2863 void nmethod::print_handler_table() {
2864   ExceptionHandlerTable(this).print();
2865 }
2866 
2867 void nmethod::print_nul_chk_table() {
2868   ImplicitExceptionTable(this).print(code_begin());
2869 }
2870 
2871 void nmethod::print_statistics() {
2872   ttyLocker ttyl;
2873   if (xtty != NULL)  xtty->head("statistics type='nmethod'");
2874   native_nmethod_stats.print_native_nmethod_stats();
2875 #ifdef COMPILER1
2876   c1_java_nmethod_stats.print_nmethod_stats("C1");
2877 #endif


src/share/vm/code/nmethod.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File