1033 // %%% Note: On SPARC we patch only a 4-byte trap, not a full NativeJump.
1034 // This means that the low_boundary is going to be a little too high.
1035 // This shouldn't matter, since oops of non-entrant methods are never used.
1036 // In fact, why are we bothering to look at oops in a non-entrant method??
1037 }
1038
1039 // Find all calls in an nmethod and clear the ones that point to non-entrant,
1040 // zombie and unloaded nmethods.
1041 ResourceMark rm;
1042 RelocIterator iter(this, low_boundary);
1043 while(iter.next()) {
1044 switch(iter.type()) {
1045 case relocInfo::virtual_call_type:
1046 case relocInfo::opt_virtual_call_type: {
1047 CompiledIC *ic = CompiledIC_at(&iter);
1048 // Ok, to lookup references to zombies here
1049 CodeBlob *cb = CodeCache::find_blob_unsafe(ic->ic_destination());
1050 if( cb != NULL && cb->is_nmethod() ) {
1051 nmethod* nm = (nmethod*)cb;
1052 // Clean inline caches pointing to zombie, non-entrant and unloaded methods
1053 if (!nm->is_in_use() || (nm->method()->code() != nm)) ic->set_to_clean();
1054 }
1055 break;
1056 }
1057 case relocInfo::static_call_type: {
1058 CompiledStaticCall *csc = compiledStaticCall_at(iter.reloc());
1059 CodeBlob *cb = CodeCache::find_blob_unsafe(csc->destination());
1060 if( cb != NULL && cb->is_nmethod() ) {
1061 nmethod* nm = (nmethod*)cb;
1062 // Clean inline caches pointing to zombie, non-entrant and unloaded methods
1063 if (!nm->is_in_use() || (nm->method()->code() != nm)) csc->set_to_clean();
1064 }
1065 break;
1066 }
1067 }
1068 }
1069 }
1070
1071 void nmethod::verify_clean_inline_caches() {
1072 assert_locked_or_safepoint(CompiledIC_lock);
1073
|
1033 // %%% Note: On SPARC we patch only a 4-byte trap, not a full NativeJump.
1034 // This means that the low_boundary is going to be a little too high.
1035 // This shouldn't matter, since oops of non-entrant methods are never used.
1036 // In fact, why are we bothering to look at oops in a non-entrant method??
1037 }
1038
1039 // Find all calls in an nmethod and clear the ones that point to non-entrant,
1040 // zombie and unloaded nmethods.
1041 ResourceMark rm;
1042 RelocIterator iter(this, low_boundary);
1043 while(iter.next()) {
1044 switch(iter.type()) {
1045 case relocInfo::virtual_call_type:
1046 case relocInfo::opt_virtual_call_type: {
1047 CompiledIC *ic = CompiledIC_at(&iter);
1048 // Ok, to lookup references to zombies here
1049 CodeBlob *cb = CodeCache::find_blob_unsafe(ic->ic_destination());
1050 if( cb != NULL && cb->is_nmethod() ) {
1051 nmethod* nm = (nmethod*)cb;
1052 // Clean inline caches pointing to zombie, non-entrant and unloaded methods
1053 if (!nm->is_in_use() || (nm->method()->code() != nm)) ic->set_to_clean(is_alive());
1054 }
1055 break;
1056 }
1057 case relocInfo::static_call_type: {
1058 CompiledStaticCall *csc = compiledStaticCall_at(iter.reloc());
1059 CodeBlob *cb = CodeCache::find_blob_unsafe(csc->destination());
1060 if( cb != NULL && cb->is_nmethod() ) {
1061 nmethod* nm = (nmethod*)cb;
1062 // Clean inline caches pointing to zombie, non-entrant and unloaded methods
1063 if (!nm->is_in_use() || (nm->method()->code() != nm)) csc->set_to_clean();
1064 }
1065 break;
1066 }
1067 }
1068 }
1069 }
1070
1071 void nmethod::verify_clean_inline_caches() {
1072 assert_locked_or_safepoint(CompiledIC_lock);
1073
|