< prev index next >

src/share/vm/code/nmethod.cpp

Print this page




1033     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
1034     // This means that the low_boundary is going to be a little too high.
1035     // This shouldn't matter, since oops of non-entrant methods are never used.
1036     // In fact, why are we bothering to look at oops in a non-entrant method??
1037   }
1038 
1039   // Find all calls in an nmethod and clear the ones that point to non-entrant,
1040   // zombie and unloaded nmethods.
1041   ResourceMark rm;
1042   RelocIterator iter(this, low_boundary);
1043   while(iter.next()) {
1044     switch(iter.type()) {
1045       case relocInfo::virtual_call_type:
1046       case relocInfo::opt_virtual_call_type: {
1047         CompiledIC *ic = CompiledIC_at(&iter);
1048         // Ok, to lookup references to zombies here
1049         CodeBlob *cb = CodeCache::find_blob_unsafe(ic->ic_destination());
1050         if( cb != NULL && cb->is_nmethod() ) {
1051           nmethod* nm = (nmethod*)cb;
1052           // Clean inline caches pointing to zombie, non-entrant and unloaded methods
1053           if (!nm->is_in_use() || (nm->method()->code() != nm)) ic->set_to_clean();
1054         }
1055         break;
1056       }
1057       case relocInfo::static_call_type: {
1058         CompiledStaticCall *csc = compiledStaticCall_at(iter.reloc());
1059         CodeBlob *cb = CodeCache::find_blob_unsafe(csc->destination());
1060         if( cb != NULL && cb->is_nmethod() ) {
1061           nmethod* nm = (nmethod*)cb;
1062           // Clean inline caches pointing to zombie, non-entrant and unloaded methods
1063           if (!nm->is_in_use() || (nm->method()->code() != nm)) csc->set_to_clean();
1064         }
1065         break;
1066       }
1067     }
1068   }
1069 }
1070 
1071 void nmethod::verify_clean_inline_caches() {
1072   assert_locked_or_safepoint(CompiledIC_lock);
1073 


1133         assert(ic->cached_icholder() != NULL, "must be non-NULL");
1134         count++;
1135       }
1136     }
1137   }
1138 
1139   return count;
1140 }
1141 
1142 // This is a private interface with the sweeper.
1143 void nmethod::mark_as_seen_on_stack() {
1144   assert(is_alive(), "Must be an alive method");
1145   // Set the traversal mark to ensure that the sweeper does 2
1146   // cleaning passes before moving to zombie.
1147   set_stack_traversal_mark(NMethodSweeper::traversal_count());
1148 }
1149 
1150 // Tell if a non-entrant method can be converted to a zombie (i.e.,
1151 // there are no activations on the stack, not in use by the VM,
1152 // and not in use by the ServiceThread)
1153 bool nmethod::can_not_entrant_be_converted() {
1154   assert(is_not_entrant(), "must be a non-entrant method");
1155 
1156   // Since the nmethod sweeper only does partial sweep the sweeper's traversal
1157   // count can be greater than the stack traversal count before it hits the
1158   // nmethod for the second time.
1159   return stack_traversal_mark()+1 < NMethodSweeper::traversal_count() &&
1160          !is_locked_by_vm();
1161 }
1162 
1163 void nmethod::inc_decompile_count() {
1164   if (!is_compiled_by_c2()) return;
1165   // Could be gated by ProfileTraps, but do not bother...
1166   Method* m = method();
1167   if (m == NULL)  return;
1168   MethodData* mdo = m->method_data();
1169   if (mdo == NULL)  return;
1170   // There is a benign race here.  See comments in methodData.hpp.
1171   mdo->inc_decompile_count();
1172 }
1173 




1033     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
1034     // This means that the low_boundary is going to be a little too high.
1035     // This shouldn't matter, since oops of non-entrant methods are never used.
1036     // In fact, why are we bothering to look at oops in a non-entrant method??
1037   }
1038 
1039   // Find all calls in an nmethod and clear the ones that point to non-entrant,
1040   // zombie and unloaded nmethods.
1041   ResourceMark rm;
1042   RelocIterator iter(this, low_boundary);
1043   while(iter.next()) {
1044     switch(iter.type()) {
1045       case relocInfo::virtual_call_type:
1046       case relocInfo::opt_virtual_call_type: {
1047         CompiledIC *ic = CompiledIC_at(&iter);
1048         // Ok, to lookup references to zombies here
1049         CodeBlob *cb = CodeCache::find_blob_unsafe(ic->ic_destination());
1050         if( cb != NULL && cb->is_nmethod() ) {
1051           nmethod* nm = (nmethod*)cb;
1052           // Clean inline caches pointing to zombie, non-entrant and unloaded methods
1053           if (!nm->is_in_use() || (nm->method()->code() != nm)) ic->set_to_clean(is_alive());
1054         }
1055         break;
1056       }
1057       case relocInfo::static_call_type: {
1058         CompiledStaticCall *csc = compiledStaticCall_at(iter.reloc());
1059         CodeBlob *cb = CodeCache::find_blob_unsafe(csc->destination());
1060         if( cb != NULL && cb->is_nmethod() ) {
1061           nmethod* nm = (nmethod*)cb;
1062           // Clean inline caches pointing to zombie, non-entrant and unloaded methods
1063           if (!nm->is_in_use() || (nm->method()->code() != nm)) csc->set_to_clean();
1064         }
1065         break;
1066       }
1067     }
1068   }
1069 }
1070 
1071 void nmethod::verify_clean_inline_caches() {
1072   assert_locked_or_safepoint(CompiledIC_lock);
1073 


1133         assert(ic->cached_icholder() != NULL, "must be non-NULL");
1134         count++;
1135       }
1136     }
1137   }
1138 
1139   return count;
1140 }
1141 
1142 // This is a private interface with the sweeper.
1143 void nmethod::mark_as_seen_on_stack() {
1144   assert(is_alive(), "Must be an alive method");
1145   // Set the traversal mark to ensure that the sweeper does 2
1146   // cleaning passes before moving to zombie.
1147   set_stack_traversal_mark(NMethodSweeper::traversal_count());
1148 }
1149 
1150 // Tell if a non-entrant method can be converted to a zombie (i.e.,
1151 // there are no activations on the stack, not in use by the VM,
1152 // and not in use by the ServiceThread)
1153 bool nmethod::can_convert_to_zombie() {
1154   assert(is_not_entrant(), "must be a non-entrant method");
1155 
1156   // Since the nmethod sweeper only does partial sweep the sweeper's traversal
1157   // count can be greater than the stack traversal count before it hits the
1158   // nmethod for the second time.
1159   return stack_traversal_mark()+1 < NMethodSweeper::traversal_count() &&
1160          !is_locked_by_vm();
1161 }
1162 
1163 void nmethod::inc_decompile_count() {
1164   if (!is_compiled_by_c2()) return;
1165   // Could be gated by ProfileTraps, but do not bother...
1166   Method* m = method();
1167   if (m == NULL)  return;
1168   MethodData* mdo = m->method_data();
1169   if (mdo == NULL)  return;
1170   // There is a benign race here.  See comments in methodData.hpp.
1171   mdo->inc_decompile_count();
1172 }
1173 


< prev index next >