< prev index next >

src/hotspot/share/c1/c1_Runtime1.cpp

Print this page
rev 59103 : imported patch hotspot


 225                                                  CodeOffsets::frame_never_safe,
 226                                                  frame_size,
 227                                                  oop_maps,
 228                                                  must_gc_arguments);
 229   assert(blob != NULL, "blob must exist");
 230   return blob;
 231 }
 232 
 233 void Runtime1::generate_blob_for(BufferBlob* buffer_blob, StubID id) {
 234   assert(0 <= id && id < number_of_ids, "illegal stub id");
 235   bool expect_oop_map = true;
 236 #ifdef ASSERT
 237   // Make sure that stubs that need oopmaps have them
 238   switch (id) {
 239     // These stubs don't need to have an oopmap
 240   case dtrace_object_alloc_id:
 241   case slow_subtype_check_id:
 242   case fpu2long_stub_id:
 243   case unwind_exception_id:
 244   case counter_overflow_id:
 245 #if defined(SPARC) || defined(PPC32)
 246   case handle_exception_nofpu_id:  // Unused on sparc
 247 #endif
 248     expect_oop_map = false;
 249     break;
 250   default:
 251     break;
 252   }
 253 #endif
 254   StubIDStubAssemblerCodeGenClosure cl(id);
 255   CodeBlob* blob = generate_blob(buffer_blob, id, name_for(id), expect_oop_map, &cl);
 256   // install blob
 257   _blobs[id] = blob;
 258 }
 259 
 260 void Runtime1::initialize(BufferBlob* blob) {
 261   // platform-dependent initialization
 262   initialize_pd();
 263   // generate stubs
 264   for (int id = 0; id < number_of_ids; id++) generate_blob_for(blob, (StubID)id);
 265   // printing


1116             }
1117 
1118             if (TracePatching) {
1119               Disassembler::decode(copy_buff, copy_buff + *byte_count, tty);
1120             }
1121           }
1122         } else if (stub_id == Runtime1::load_appendix_patching_id) {
1123           NativeMovConstReg* n_copy = nativeMovConstReg_at(copy_buff);
1124           assert(n_copy->data() == 0 ||
1125                  n_copy->data() == (intptr_t)Universe::non_oop_word(),
1126                  "illegal init value");
1127           n_copy->set_data(cast_from_oop<intx>(appendix()));
1128 
1129           if (TracePatching) {
1130             Disassembler::decode(copy_buff, copy_buff + *byte_count, tty);
1131           }
1132         } else {
1133           ShouldNotReachHere();
1134         }
1135 
1136 #if defined(SPARC) || defined(PPC32)
1137         if (load_klass_or_mirror_patch_id ||
1138             stub_id == Runtime1::load_appendix_patching_id) {
1139           // Update the location in the nmethod with the proper
1140           // metadata.  When the code was generated, a NULL was stuffed
1141           // in the metadata table and that table needs to be update to
1142           // have the right value.  On intel the value is kept
1143           // directly in the instruction instead of in the metadata
1144           // table, so set_data above effectively updated the value.
1145           nmethod* nm = CodeCache::find_nmethod(instr_pc);
1146           assert(nm != NULL, "invalid nmethod_pc");
1147           RelocIterator mds(nm, copy_buff, copy_buff + 1);
1148           bool found = false;
1149           while (mds.next() && !found) {
1150             if (mds.type() == relocInfo::oop_type) {
1151               assert(stub_id == Runtime1::load_mirror_patching_id ||
1152                      stub_id == Runtime1::load_appendix_patching_id, "wrong stub id");
1153               oop_Relocation* r = mds.oop_reloc();
1154               oop* oop_adr = r->oop_addr();
1155               *oop_adr = stub_id == Runtime1::load_mirror_patching_id ? mirror() : appendix();
1156               r->fix_oop_relocation();


1207           }
1208           ICache::invalidate_range(instr_pc, *byte_count);
1209           NativeGeneralJump::replace_mt_safe(instr_pc, copy_buff);
1210 
1211           if (load_klass_or_mirror_patch_id ||
1212               stub_id == Runtime1::load_appendix_patching_id) {
1213             relocInfo::relocType rtype =
1214               (stub_id == Runtime1::load_klass_patching_id) ?
1215                                    relocInfo::metadata_type :
1216                                    relocInfo::oop_type;
1217             // update relocInfo to metadata
1218             nmethod* nm = CodeCache::find_nmethod(instr_pc);
1219             assert(nm != NULL, "invalid nmethod_pc");
1220 
1221             // The old patch site is now a move instruction so update
1222             // the reloc info so that it will get updated during
1223             // future GCs.
1224             RelocIterator iter(nm, (address)instr_pc, (address)(instr_pc + 1));
1225             relocInfo::change_reloc_info_for_address(&iter, (address) instr_pc,
1226                                                      relocInfo::none, rtype);
1227 #ifdef SPARC
1228             // Sparc takes two relocations for an metadata so update the second one.
1229             address instr_pc2 = instr_pc + NativeMovConstReg::add_offset;
1230             RelocIterator iter2(nm, instr_pc2, instr_pc2 + 1);
1231             relocInfo::change_reloc_info_for_address(&iter2, (address) instr_pc2,
1232                                                      relocInfo::none, rtype);
1233 #endif
1234 #ifdef PPC32
1235           { address instr_pc2 = instr_pc + NativeMovConstReg::lo_offset;
1236             RelocIterator iter2(nm, instr_pc2, instr_pc2 + 1);
1237             relocInfo::change_reloc_info_for_address(&iter2, (address) instr_pc2,
1238                                                      relocInfo::none, rtype);
1239           }
1240 #endif
1241           }
1242 
1243         } else {
1244           ICache::invalidate_range(copy_buff, *byte_count);
1245           NativeGeneralJump::insert_unconditional(instr_pc, being_initialized_entry);
1246         }
1247       }
1248     }
1249   }
1250 
1251   // If we are patching in a non-perm oop, make sure the nmethod
1252   // is on the right list.
1253   {




 225                                                  CodeOffsets::frame_never_safe,
 226                                                  frame_size,
 227                                                  oop_maps,
 228                                                  must_gc_arguments);
 229   assert(blob != NULL, "blob must exist");
 230   return blob;
 231 }
 232 
 233 void Runtime1::generate_blob_for(BufferBlob* buffer_blob, StubID id) {
 234   assert(0 <= id && id < number_of_ids, "illegal stub id");
 235   bool expect_oop_map = true;
 236 #ifdef ASSERT
 237   // Make sure that stubs that need oopmaps have them
 238   switch (id) {
 239     // These stubs don't need to have an oopmap
 240   case dtrace_object_alloc_id:
 241   case slow_subtype_check_id:
 242   case fpu2long_stub_id:
 243   case unwind_exception_id:
 244   case counter_overflow_id:
 245 #if defined(PPC32)
 246   case handle_exception_nofpu_id:  // Unused on sparc
 247 #endif
 248     expect_oop_map = false;
 249     break;
 250   default:
 251     break;
 252   }
 253 #endif
 254   StubIDStubAssemblerCodeGenClosure cl(id);
 255   CodeBlob* blob = generate_blob(buffer_blob, id, name_for(id), expect_oop_map, &cl);
 256   // install blob
 257   _blobs[id] = blob;
 258 }
 259 
 260 void Runtime1::initialize(BufferBlob* blob) {
 261   // platform-dependent initialization
 262   initialize_pd();
 263   // generate stubs
 264   for (int id = 0; id < number_of_ids; id++) generate_blob_for(blob, (StubID)id);
 265   // printing


1116             }
1117 
1118             if (TracePatching) {
1119               Disassembler::decode(copy_buff, copy_buff + *byte_count, tty);
1120             }
1121           }
1122         } else if (stub_id == Runtime1::load_appendix_patching_id) {
1123           NativeMovConstReg* n_copy = nativeMovConstReg_at(copy_buff);
1124           assert(n_copy->data() == 0 ||
1125                  n_copy->data() == (intptr_t)Universe::non_oop_word(),
1126                  "illegal init value");
1127           n_copy->set_data(cast_from_oop<intx>(appendix()));
1128 
1129           if (TracePatching) {
1130             Disassembler::decode(copy_buff, copy_buff + *byte_count, tty);
1131           }
1132         } else {
1133           ShouldNotReachHere();
1134         }
1135 
1136 #if defined(PPC32)
1137         if (load_klass_or_mirror_patch_id ||
1138             stub_id == Runtime1::load_appendix_patching_id) {
1139           // Update the location in the nmethod with the proper
1140           // metadata.  When the code was generated, a NULL was stuffed
1141           // in the metadata table and that table needs to be update to
1142           // have the right value.  On intel the value is kept
1143           // directly in the instruction instead of in the metadata
1144           // table, so set_data above effectively updated the value.
1145           nmethod* nm = CodeCache::find_nmethod(instr_pc);
1146           assert(nm != NULL, "invalid nmethod_pc");
1147           RelocIterator mds(nm, copy_buff, copy_buff + 1);
1148           bool found = false;
1149           while (mds.next() && !found) {
1150             if (mds.type() == relocInfo::oop_type) {
1151               assert(stub_id == Runtime1::load_mirror_patching_id ||
1152                      stub_id == Runtime1::load_appendix_patching_id, "wrong stub id");
1153               oop_Relocation* r = mds.oop_reloc();
1154               oop* oop_adr = r->oop_addr();
1155               *oop_adr = stub_id == Runtime1::load_mirror_patching_id ? mirror() : appendix();
1156               r->fix_oop_relocation();


1207           }
1208           ICache::invalidate_range(instr_pc, *byte_count);
1209           NativeGeneralJump::replace_mt_safe(instr_pc, copy_buff);
1210 
1211           if (load_klass_or_mirror_patch_id ||
1212               stub_id == Runtime1::load_appendix_patching_id) {
1213             relocInfo::relocType rtype =
1214               (stub_id == Runtime1::load_klass_patching_id) ?
1215                                    relocInfo::metadata_type :
1216                                    relocInfo::oop_type;
1217             // update relocInfo to metadata
1218             nmethod* nm = CodeCache::find_nmethod(instr_pc);
1219             assert(nm != NULL, "invalid nmethod_pc");
1220 
1221             // The old patch site is now a move instruction so update
1222             // the reloc info so that it will get updated during
1223             // future GCs.
1224             RelocIterator iter(nm, (address)instr_pc, (address)(instr_pc + 1));
1225             relocInfo::change_reloc_info_for_address(&iter, (address) instr_pc,
1226                                                      relocInfo::none, rtype);







1227 #ifdef PPC32
1228           { address instr_pc2 = instr_pc + NativeMovConstReg::lo_offset;
1229             RelocIterator iter2(nm, instr_pc2, instr_pc2 + 1);
1230             relocInfo::change_reloc_info_for_address(&iter2, (address) instr_pc2,
1231                                                      relocInfo::none, rtype);
1232           }
1233 #endif
1234           }
1235 
1236         } else {
1237           ICache::invalidate_range(copy_buff, *byte_count);
1238           NativeGeneralJump::insert_unconditional(instr_pc, being_initialized_entry);
1239         }
1240       }
1241     }
1242   }
1243 
1244   // If we are patching in a non-perm oop, make sure the nmethod
1245   // is on the right list.
1246   {


< prev index next >