< prev index next >

src/hotspot/share/code/nmethod.cpp

Print this page
rev 48535 : [mq]: 8194990


 938   // should not get GC'd.  Skip the first few bytes of oops on
 939   // not-entrant methods.
 940   address low_boundary = verified_entry_point();
 941   if (!is_in_use()) {
 942     low_boundary += NativeJump::instruction_size;
 943     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
 944     // This means that the low_boundary is going to be a little too high.
 945     // This shouldn't matter, since oops of non-entrant methods are never used.
 946     // In fact, why are we bothering to look at oops in a non-entrant method??
 947   }
 948 
 949   ResourceMark rm;
 950   RelocIterator iter(this, low_boundary);
 951   while(iter.next()) {
 952     switch(iter.type()) {
 953       case relocInfo::virtual_call_type:
 954       case relocInfo::opt_virtual_call_type: {
 955         CompiledIC *ic = CompiledIC_at(&iter);
 956         // Ok, to lookup references to zombies here
 957         CodeBlob *cb = CodeCache::find_blob_unsafe(ic->ic_destination());

 958         nmethod* nm = cb->as_nmethod_or_null();
 959         if( nm != NULL ) {
 960           // Verify that inline caches pointing to both zombie and not_entrant methods are clean
 961           if (!nm->is_in_use() || (nm->method()->code() != nm)) {
 962             assert(ic->is_clean(), "IC should be clean");
 963           }
 964         }
 965         break;
 966       }
 967       case relocInfo::static_call_type: {
 968         CompiledStaticCall *csc = compiledStaticCall_at(iter.reloc());
 969         CodeBlob *cb = CodeCache::find_blob_unsafe(csc->destination());

 970         nmethod* nm = cb->as_nmethod_or_null();
 971         if( nm != NULL ) {
 972           // Verify that inline caches pointing to both zombie and not_entrant methods are clean
 973           if (!nm->is_in_use() || (nm->method()->code() != nm)) {
 974             assert(csc->is_clean(), "IC should be clean");
 975           }
 976         }
 977         break;
 978       }
 979       default:
 980         break;
 981     }
 982   }
 983 }
 984 
 985 // This is a private interface with the sweeper.
 986 void nmethod::mark_as_seen_on_stack() {
 987   assert(is_alive(), "Must be an alive method");
 988   // Set the traversal mark to ensure that the sweeper does 2
 989   // cleaning passes before moving to zombie.


2715 #if INCLUDE_AOT
2716     if (info.to_aot()) {
2717       csc->set_to_far(method, info.entry());
2718     } else
2719 #endif
2720     {
2721       csc->set_to_interpreted(method, info.entry());
2722     }
2723   }
2724 
2725   virtual void verify() const {
2726     // make sure code pattern is actually a call imm32 instruction
2727     _call->verify();
2728     if (os::is_MP()) {
2729       _call->verify_alignment();
2730     }
2731   }
2732 
2733   virtual void verify_resolve_call(address dest) const {
2734     CodeBlob* db = CodeCache::find_blob_unsafe(dest);
2735     assert(!db->is_adapter_blob(), "must use stub!");
2736   }
2737 
2738   virtual bool is_call_to_interpreted(address dest) const {
2739     CodeBlob* cb = CodeCache::find_blob(_call->instruction_address());
2740     return cb->contains(dest);
2741   }
2742 
2743   virtual bool is_safe_for_patching() const { return false; }
2744 
2745   virtual NativeInstruction* get_load_instruction(virtual_call_Relocation* r) const {
2746     return nativeMovConstReg_at(r->cached_value());
2747   }
2748 
2749   virtual void *get_data(NativeInstruction* instruction) const {
2750     return (void*)((NativeMovConstReg*) instruction)->data();
2751   }
2752 
2753   virtual void set_data(NativeInstruction* instruction, intptr_t data) {
2754     ((NativeMovConstReg*) instruction)->set_data(data);
2755   }




 938   // should not get GC'd.  Skip the first few bytes of oops on
 939   // not-entrant methods.
 940   address low_boundary = verified_entry_point();
 941   if (!is_in_use()) {
 942     low_boundary += NativeJump::instruction_size;
 943     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
 944     // This means that the low_boundary is going to be a little too high.
 945     // This shouldn't matter, since oops of non-entrant methods are never used.
 946     // In fact, why are we bothering to look at oops in a non-entrant method??
 947   }
 948 
 949   ResourceMark rm;
 950   RelocIterator iter(this, low_boundary);
 951   while(iter.next()) {
 952     switch(iter.type()) {
 953       case relocInfo::virtual_call_type:
 954       case relocInfo::opt_virtual_call_type: {
 955         CompiledIC *ic = CompiledIC_at(&iter);
 956         // Ok, to lookup references to zombies here
 957         CodeBlob *cb = CodeCache::find_blob_unsafe(ic->ic_destination());
 958         assert(cb != NULL, "destination not in CodeBlob?");
 959         nmethod* nm = cb->as_nmethod_or_null();
 960         if( nm != NULL ) {
 961           // Verify that inline caches pointing to both zombie and not_entrant methods are clean
 962           if (!nm->is_in_use() || (nm->method()->code() != nm)) {
 963             assert(ic->is_clean(), "IC should be clean");
 964           }
 965         }
 966         break;
 967       }
 968       case relocInfo::static_call_type: {
 969         CompiledStaticCall *csc = compiledStaticCall_at(iter.reloc());
 970         CodeBlob *cb = CodeCache::find_blob_unsafe(csc->destination());
 971         assert(cb != NULL, "destination not in CodeBlob?");
 972         nmethod* nm = cb->as_nmethod_or_null();
 973         if( nm != NULL ) {
 974           // Verify that inline caches pointing to both zombie and not_entrant methods are clean
 975           if (!nm->is_in_use() || (nm->method()->code() != nm)) {
 976             assert(csc->is_clean(), "IC should be clean");
 977           }
 978         }
 979         break;
 980       }
 981       default:
 982         break;
 983     }
 984   }
 985 }
 986 
 987 // This is a private interface with the sweeper.
 988 void nmethod::mark_as_seen_on_stack() {
 989   assert(is_alive(), "Must be an alive method");
 990   // Set the traversal mark to ensure that the sweeper does 2
 991   // cleaning passes before moving to zombie.


2717 #if INCLUDE_AOT
2718     if (info.to_aot()) {
2719       csc->set_to_far(method, info.entry());
2720     } else
2721 #endif
2722     {
2723       csc->set_to_interpreted(method, info.entry());
2724     }
2725   }
2726 
2727   virtual void verify() const {
2728     // make sure code pattern is actually a call imm32 instruction
2729     _call->verify();
2730     if (os::is_MP()) {
2731       _call->verify_alignment();
2732     }
2733   }
2734 
2735   virtual void verify_resolve_call(address dest) const {
2736     CodeBlob* db = CodeCache::find_blob_unsafe(dest);
2737     assert(db != NULL && !db->is_adapter_blob(), "must use stub!");
2738   }
2739 
2740   virtual bool is_call_to_interpreted(address dest) const {
2741     CodeBlob* cb = CodeCache::find_blob(_call->instruction_address());
2742     return cb->contains(dest);
2743   }
2744 
2745   virtual bool is_safe_for_patching() const { return false; }
2746 
2747   virtual NativeInstruction* get_load_instruction(virtual_call_Relocation* r) const {
2748     return nativeMovConstReg_at(r->cached_value());
2749   }
2750 
2751   virtual void *get_data(NativeInstruction* instruction) const {
2752     return (void*)((NativeMovConstReg*) instruction)->data();
2753   }
2754 
2755   virtual void set_data(NativeInstruction* instruction, intptr_t data) {
2756     ((NativeMovConstReg*) instruction)->set_data(data);
2757   }


< prev index next >