< prev index next >

src/share/vm/code/nmethod.cpp

Print this page




1004   RelocIterator iter(this);
1005   while (iter.next()) {
1006     iter.reloc()->clear_inline_cache();
1007   }
1008 }
1009 
1010 // Clear ICStubs of all compiled ICs
1011 void nmethod::clear_ic_stubs() {
1012   assert_locked_or_safepoint(CompiledIC_lock);
1013   RelocIterator iter(this);
1014   while(iter.next()) {
1015     if (iter.type() == relocInfo::virtual_call_type) {
1016       CompiledIC* ic = CompiledIC_at(&iter);
1017       ic->clear_ic_stub();
1018     }
1019   }
1020 }
1021 
1022 
1023 void nmethod::cleanup_inline_caches() {
1024 
1025   assert_locked_or_safepoint(CompiledIC_lock);
1026 
1027   // If the method is not entrant or zombie then a JMP is plastered over the
1028   // first few bytes.  If an oop in the old code was there, that oop
1029   // should not get GC'd.  Skip the first few bytes of oops on
1030   // not-entrant methods.
1031   address low_boundary = verified_entry_point();
1032   if (!is_in_use()) {
1033     low_boundary += NativeJump::instruction_size;
1034     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
1035     // This means that the low_boundary is going to be a little too high.
1036     // This shouldn't matter, since oops of non-entrant methods are never used.
1037     // In fact, why are we bothering to look at oops in a non-entrant method??
1038   }
1039 
1040   // Find all calls in an nmethod, and clear the ones that points to zombie methods

1041   ResourceMark rm;
1042   RelocIterator iter(this, low_boundary);
1043   while(iter.next()) {
1044     switch(iter.type()) {
1045       case relocInfo::virtual_call_type:
1046       case relocInfo::opt_virtual_call_type: {
1047         CompiledIC *ic = CompiledIC_at(&iter);
1048         // Ok, to lookup references to zombies here
1049         CodeBlob *cb = CodeCache::find_blob_unsafe(ic->ic_destination());
1050         if( cb != NULL && cb->is_nmethod() ) {
1051           nmethod* nm = (nmethod*)cb;
1052           // Clean inline caches pointing to both zombie and not_entrant methods
1053           if (!nm->is_in_use() || (nm->method()->code() != nm)) ic->set_to_clean();
1054         }
1055         break;
1056       }
1057       case relocInfo::static_call_type: {
1058         CompiledStaticCall *csc = compiledStaticCall_at(iter.reloc());
1059         CodeBlob *cb = CodeCache::find_blob_unsafe(csc->destination());
1060         if( cb != NULL && cb->is_nmethod() ) {
1061           nmethod* nm = (nmethod*)cb;
1062           // Clean inline caches pointing to both zombie and not_entrant methods
1063           if (!nm->is_in_use() || (nm->method()->code() != nm)) csc->set_to_clean();
1064         }
1065         break;
1066       }
1067     }
1068   }
1069 }
1070 
1071 void nmethod::verify_clean_inline_caches() {
1072   assert_locked_or_safepoint(CompiledIC_lock);
1073 
1074   // If the method is not entrant or zombie then a JMP is plastered over the
1075   // first few bytes.  If an oop in the old code was there, that oop
1076   // should not get GC'd.  Skip the first few bytes of oops on
1077   // not-entrant methods.
1078   address low_boundary = verified_entry_point();
1079   if (!is_in_use()) {
1080     low_boundary += NativeJump::instruction_size;
1081     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
1082     // This means that the low_boundary is going to be a little too high.


2512 public:
2513   VerifyOopsClosure(nmethod* nm) : _nm(nm), _ok(true) { }
2514   bool ok() { return _ok; }
2515   virtual void do_oop(oop* p) {
2516     if ((*p) == NULL || (*p)->is_oop())  return;
2517     if (_ok) {
2518       _nm->print_nmethod(true);
2519       _ok = false;
2520     }
2521     tty->print_cr("*** non-oop " PTR_FORMAT " found at " PTR_FORMAT " (offset %d)",
2522                   (void *)(*p), (intptr_t)p, (int)((intptr_t)p - (intptr_t)_nm));
2523   }
2524   virtual void do_oop(narrowOop* p) { ShouldNotReachHere(); }
2525 };
2526 
2527 void nmethod::verify() {
2528 
2529   // Hmm. OSR methods can be deopted but not marked as zombie or not_entrant
2530   // seems odd.
2531 
2532   if( is_zombie() || is_not_entrant() )
2533     return;
2534 
2535   // Make sure all the entry points are correctly aligned for patching.
2536   NativeJump::check_verified_entry_alignment(entry_point(), verified_entry_point());
2537 
2538   // assert(method()->is_oop(), "must be valid");
2539 
2540   ResourceMark rm;
2541 
2542   if (!CodeCache::contains(this)) {
2543     fatal(err_msg("nmethod at " INTPTR_FORMAT " not in zone", this));
2544   }
2545 
2546   if(is_native_method() )
2547     return;
2548 
2549   nmethod* nm = CodeCache::find_nmethod(verified_entry_point());
2550   if (nm != this) {
2551     fatal(err_msg("findNMethod did not find this nmethod (" INTPTR_FORMAT ")",
2552                   this));




1004   RelocIterator iter(this);
1005   while (iter.next()) {
1006     iter.reloc()->clear_inline_cache();
1007   }
1008 }
1009 
1010 // Clear ICStubs of all compiled ICs
1011 void nmethod::clear_ic_stubs() {
1012   assert_locked_or_safepoint(CompiledIC_lock);
1013   RelocIterator iter(this);
1014   while(iter.next()) {
1015     if (iter.type() == relocInfo::virtual_call_type) {
1016       CompiledIC* ic = CompiledIC_at(&iter);
1017       ic->clear_ic_stub();
1018     }
1019   }
1020 }
1021 
1022 
1023 void nmethod::cleanup_inline_caches() {

1024   assert_locked_or_safepoint(CompiledIC_lock);
1025 
1026   // If the method is not entrant or zombie then a JMP is plastered over the
1027   // first few bytes.  If an oop in the old code was there, that oop
1028   // should not get GC'd.  Skip the first few bytes of oops on
1029   // not-entrant methods.
1030   address low_boundary = verified_entry_point();
1031   if (!is_in_use()) {
1032     low_boundary += NativeJump::instruction_size;
1033     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
1034     // This means that the low_boundary is going to be a little too high.
1035     // This shouldn't matter, since oops of non-entrant methods are never used.
1036     // In fact, why are we bothering to look at oops in a non-entrant method??
1037   }
1038 
1039   // Find all calls in an nmethod and clear the ones that point to non-entrant,
1040   // zombie and unloaded nmethods.
1041   ResourceMark rm;
1042   RelocIterator iter(this, low_boundary);
1043   while(iter.next()) {
1044     switch(iter.type()) {
1045       case relocInfo::virtual_call_type:
1046       case relocInfo::opt_virtual_call_type: {
1047         CompiledIC *ic = CompiledIC_at(&iter);
1048         // Ok, to lookup references to zombies here
1049         CodeBlob *cb = CodeCache::find_blob_unsafe(ic->ic_destination());
1050         if( cb != NULL && cb->is_nmethod() ) {
1051           nmethod* nm = (nmethod*)cb;
1052           // Clean inline caches pointing to zombie, non-entrant and unloaded methods
1053           if (!nm->is_in_use() || (nm->method()->code() != nm)) ic->set_to_clean();
1054         }
1055         break;
1056       }
1057       case relocInfo::static_call_type: {
1058         CompiledStaticCall *csc = compiledStaticCall_at(iter.reloc());
1059         CodeBlob *cb = CodeCache::find_blob_unsafe(csc->destination());
1060         if( cb != NULL && cb->is_nmethod() ) {
1061           nmethod* nm = (nmethod*)cb;
1062           // Clean inline caches pointing to zombie, non-entrant and unloaded methods
1063           if (!nm->is_in_use() || (nm->method()->code() != nm)) csc->set_to_clean();
1064         }
1065         break;
1066       }
1067     }
1068   }
1069 }
1070 
1071 void nmethod::verify_clean_inline_caches() {
1072   assert_locked_or_safepoint(CompiledIC_lock);
1073 
1074   // If the method is not entrant or zombie then a JMP is plastered over the
1075   // first few bytes.  If an oop in the old code was there, that oop
1076   // should not get GC'd.  Skip the first few bytes of oops on
1077   // not-entrant methods.
1078   address low_boundary = verified_entry_point();
1079   if (!is_in_use()) {
1080     low_boundary += NativeJump::instruction_size;
1081     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
1082     // This means that the low_boundary is going to be a little too high.


2512 public:
2513   VerifyOopsClosure(nmethod* nm) : _nm(nm), _ok(true) { }
2514   bool ok() { return _ok; }
2515   virtual void do_oop(oop* p) {
2516     if ((*p) == NULL || (*p)->is_oop())  return;
2517     if (_ok) {
2518       _nm->print_nmethod(true);
2519       _ok = false;
2520     }
2521     tty->print_cr("*** non-oop " PTR_FORMAT " found at " PTR_FORMAT " (offset %d)",
2522                   (void *)(*p), (intptr_t)p, (int)((intptr_t)p - (intptr_t)_nm));
2523   }
2524   virtual void do_oop(narrowOop* p) { ShouldNotReachHere(); }
2525 };
2526 
2527 void nmethod::verify() {
2528 
2529   // Hmm. OSR methods can be deopted but not marked as zombie or not_entrant
2530   // seems odd.
2531 
2532   if (is_zombie() || is_not_entrant() || is_unloaded())
2533     return;
2534 
2535   // Make sure all the entry points are correctly aligned for patching.
2536   NativeJump::check_verified_entry_alignment(entry_point(), verified_entry_point());
2537 
2538   // assert(method()->is_oop(), "must be valid");
2539 
2540   ResourceMark rm;
2541 
2542   if (!CodeCache::contains(this)) {
2543     fatal(err_msg("nmethod at " INTPTR_FORMAT " not in zone", this));
2544   }
2545 
2546   if(is_native_method() )
2547     return;
2548 
2549   nmethod* nm = CodeCache::find_nmethod(verified_entry_point());
2550   if (nm != this) {
2551     fatal(err_msg("findNMethod did not find this nmethod (" INTPTR_FORMAT ")",
2552                   this));


< prev index next >