< prev index next >

src/hotspot/share/c1/c1_Runtime1.cpp

Print this page
rev 59383 : [mq]: final


 226                                                  CodeOffsets::frame_never_safe,
 227                                                  frame_size,
 228                                                  oop_maps,
 229                                                  must_gc_arguments);
 230   assert(blob != NULL, "blob must exist");
 231   return blob;
 232 }
 233 
 234 void Runtime1::generate_blob_for(BufferBlob* buffer_blob, StubID id) {
 235   assert(0 <= id && id < number_of_ids, "illegal stub id");
 236   bool expect_oop_map = true;
 237 #ifdef ASSERT
 238   // Make sure that stubs that need oopmaps have them
 239   switch (id) {
 240     // These stubs don't need to have an oopmap
 241   case dtrace_object_alloc_id:
 242   case slow_subtype_check_id:
 243   case fpu2long_stub_id:
 244   case unwind_exception_id:
 245   case counter_overflow_id:
 246 #if defined(SPARC) || defined(PPC32)
 247   case handle_exception_nofpu_id:  // Unused on sparc
 248 #endif
 249     expect_oop_map = false;
 250     break;
 251   default:
 252     break;
 253   }
 254 #endif
 255   StubIDStubAssemblerCodeGenClosure cl(id);
 256   CodeBlob* blob = generate_blob(buffer_blob, id, name_for(id), expect_oop_map, &cl);
 257   // install blob
 258   _blobs[id] = blob;
 259 }
 260 
 261 void Runtime1::initialize(BufferBlob* blob) {
 262   // platform-dependent initialization
 263   initialize_pd();
 264   // generate stubs
 265   for (int id = 0; id < number_of_ids; id++) generate_blob_for(blob, (StubID)id);
 266   // printing


1117             }
1118 
1119             if (TracePatching) {
1120               Disassembler::decode(copy_buff, copy_buff + *byte_count, tty);
1121             }
1122           }
1123         } else if (stub_id == Runtime1::load_appendix_patching_id) {
1124           NativeMovConstReg* n_copy = nativeMovConstReg_at(copy_buff);
1125           assert(n_copy->data() == 0 ||
1126                  n_copy->data() == (intptr_t)Universe::non_oop_word(),
1127                  "illegal init value");
1128           n_copy->set_data(cast_from_oop<intx>(appendix()));
1129 
1130           if (TracePatching) {
1131             Disassembler::decode(copy_buff, copy_buff + *byte_count, tty);
1132           }
1133         } else {
1134           ShouldNotReachHere();
1135         }
1136 
1137 #if defined(SPARC) || defined(PPC32)
1138         if (load_klass_or_mirror_patch_id ||
1139             stub_id == Runtime1::load_appendix_patching_id) {
1140           // Update the location in the nmethod with the proper
1141           // metadata.  When the code was generated, a NULL was stuffed
1142           // in the metadata table and that table needs to be update to
1143           // have the right value.  On intel the value is kept
1144           // directly in the instruction instead of in the metadata
1145           // table, so set_data above effectively updated the value.
1146           nmethod* nm = CodeCache::find_nmethod(instr_pc);
1147           assert(nm != NULL, "invalid nmethod_pc");
1148           RelocIterator mds(nm, copy_buff, copy_buff + 1);
1149           bool found = false;
1150           while (mds.next() && !found) {
1151             if (mds.type() == relocInfo::oop_type) {
1152               assert(stub_id == Runtime1::load_mirror_patching_id ||
1153                      stub_id == Runtime1::load_appendix_patching_id, "wrong stub id");
1154               oop_Relocation* r = mds.oop_reloc();
1155               oop* oop_adr = r->oop_addr();
1156               *oop_adr = stub_id == Runtime1::load_mirror_patching_id ? mirror() : appendix();
1157               r->fix_oop_relocation();


1208           }
1209           ICache::invalidate_range(instr_pc, *byte_count);
1210           NativeGeneralJump::replace_mt_safe(instr_pc, copy_buff);
1211 
1212           if (load_klass_or_mirror_patch_id ||
1213               stub_id == Runtime1::load_appendix_patching_id) {
1214             relocInfo::relocType rtype =
1215               (stub_id == Runtime1::load_klass_patching_id) ?
1216                                    relocInfo::metadata_type :
1217                                    relocInfo::oop_type;
1218             // update relocInfo to metadata
1219             nmethod* nm = CodeCache::find_nmethod(instr_pc);
1220             assert(nm != NULL, "invalid nmethod_pc");
1221 
1222             // The old patch site is now a move instruction so update
1223             // the reloc info so that it will get updated during
1224             // future GCs.
1225             RelocIterator iter(nm, (address)instr_pc, (address)(instr_pc + 1));
1226             relocInfo::change_reloc_info_for_address(&iter, (address) instr_pc,
1227                                                      relocInfo::none, rtype);
1228 #ifdef SPARC
1229             // Sparc takes two relocations for an metadata so update the second one.
1230             address instr_pc2 = instr_pc + NativeMovConstReg::add_offset;
1231             RelocIterator iter2(nm, instr_pc2, instr_pc2 + 1);
1232             relocInfo::change_reloc_info_for_address(&iter2, (address) instr_pc2,
1233                                                      relocInfo::none, rtype);
1234 #endif
1235 #ifdef PPC32
1236           { address instr_pc2 = instr_pc + NativeMovConstReg::lo_offset;
1237             RelocIterator iter2(nm, instr_pc2, instr_pc2 + 1);
1238             relocInfo::change_reloc_info_for_address(&iter2, (address) instr_pc2,
1239                                                      relocInfo::none, rtype);
1240           }
1241 #endif
1242           }
1243 
1244         } else {
1245           ICache::invalidate_range(copy_buff, *byte_count);
1246           NativeGeneralJump::insert_unconditional(instr_pc, being_initialized_entry);
1247         }
1248       }
1249     }
1250   }
1251 
1252   // If we are patching in a non-perm oop, make sure the nmethod
1253   // is on the right list.
1254   {




 226                                                  CodeOffsets::frame_never_safe,
 227                                                  frame_size,
 228                                                  oop_maps,
 229                                                  must_gc_arguments);
 230   assert(blob != NULL, "blob must exist");
 231   return blob;
 232 }
 233 
 234 void Runtime1::generate_blob_for(BufferBlob* buffer_blob, StubID id) {
 235   assert(0 <= id && id < number_of_ids, "illegal stub id");
 236   bool expect_oop_map = true;
 237 #ifdef ASSERT
 238   // Make sure that stubs that need oopmaps have them
 239   switch (id) {
 240     // These stubs don't need to have an oopmap
 241   case dtrace_object_alloc_id:
 242   case slow_subtype_check_id:
 243   case fpu2long_stub_id:
 244   case unwind_exception_id:
 245   case counter_overflow_id:
 246 #if defined(PPC32)
 247   case handle_exception_nofpu_id:  // Unused on sparc
 248 #endif
 249     expect_oop_map = false;
 250     break;
 251   default:
 252     break;
 253   }
 254 #endif
 255   StubIDStubAssemblerCodeGenClosure cl(id);
 256   CodeBlob* blob = generate_blob(buffer_blob, id, name_for(id), expect_oop_map, &cl);
 257   // install blob
 258   _blobs[id] = blob;
 259 }
 260 
 261 void Runtime1::initialize(BufferBlob* blob) {
 262   // platform-dependent initialization
 263   initialize_pd();
 264   // generate stubs
 265   for (int id = 0; id < number_of_ids; id++) generate_blob_for(blob, (StubID)id);
 266   // printing


1117             }
1118 
1119             if (TracePatching) {
1120               Disassembler::decode(copy_buff, copy_buff + *byte_count, tty);
1121             }
1122           }
1123         } else if (stub_id == Runtime1::load_appendix_patching_id) {
1124           NativeMovConstReg* n_copy = nativeMovConstReg_at(copy_buff);
1125           assert(n_copy->data() == 0 ||
1126                  n_copy->data() == (intptr_t)Universe::non_oop_word(),
1127                  "illegal init value");
1128           n_copy->set_data(cast_from_oop<intx>(appendix()));
1129 
1130           if (TracePatching) {
1131             Disassembler::decode(copy_buff, copy_buff + *byte_count, tty);
1132           }
1133         } else {
1134           ShouldNotReachHere();
1135         }
1136 
1137 #if defined(PPC32)
1138         if (load_klass_or_mirror_patch_id ||
1139             stub_id == Runtime1::load_appendix_patching_id) {
1140           // Update the location in the nmethod with the proper
1141           // metadata.  When the code was generated, a NULL was stuffed
1142           // in the metadata table and that table needs to be update to
1143           // have the right value.  On intel the value is kept
1144           // directly in the instruction instead of in the metadata
1145           // table, so set_data above effectively updated the value.
1146           nmethod* nm = CodeCache::find_nmethod(instr_pc);
1147           assert(nm != NULL, "invalid nmethod_pc");
1148           RelocIterator mds(nm, copy_buff, copy_buff + 1);
1149           bool found = false;
1150           while (mds.next() && !found) {
1151             if (mds.type() == relocInfo::oop_type) {
1152               assert(stub_id == Runtime1::load_mirror_patching_id ||
1153                      stub_id == Runtime1::load_appendix_patching_id, "wrong stub id");
1154               oop_Relocation* r = mds.oop_reloc();
1155               oop* oop_adr = r->oop_addr();
1156               *oop_adr = stub_id == Runtime1::load_mirror_patching_id ? mirror() : appendix();
1157               r->fix_oop_relocation();


1208           }
1209           ICache::invalidate_range(instr_pc, *byte_count);
1210           NativeGeneralJump::replace_mt_safe(instr_pc, copy_buff);
1211 
1212           if (load_klass_or_mirror_patch_id ||
1213               stub_id == Runtime1::load_appendix_patching_id) {
1214             relocInfo::relocType rtype =
1215               (stub_id == Runtime1::load_klass_patching_id) ?
1216                                    relocInfo::metadata_type :
1217                                    relocInfo::oop_type;
1218             // update relocInfo to metadata
1219             nmethod* nm = CodeCache::find_nmethod(instr_pc);
1220             assert(nm != NULL, "invalid nmethod_pc");
1221 
1222             // The old patch site is now a move instruction so update
1223             // the reloc info so that it will get updated during
1224             // future GCs.
1225             RelocIterator iter(nm, (address)instr_pc, (address)(instr_pc + 1));
1226             relocInfo::change_reloc_info_for_address(&iter, (address) instr_pc,
1227                                                      relocInfo::none, rtype);







1228 #ifdef PPC32
1229           { address instr_pc2 = instr_pc + NativeMovConstReg::lo_offset;
1230             RelocIterator iter2(nm, instr_pc2, instr_pc2 + 1);
1231             relocInfo::change_reloc_info_for_address(&iter2, (address) instr_pc2,
1232                                                      relocInfo::none, rtype);
1233           }
1234 #endif
1235           }
1236 
1237         } else {
1238           ICache::invalidate_range(copy_buff, *byte_count);
1239           NativeGeneralJump::insert_unconditional(instr_pc, being_initialized_entry);
1240         }
1241       }
1242     }
1243   }
1244 
1245   // If we are patching in a non-perm oop, make sure the nmethod
1246   // is on the right list.
1247   {


< prev index next >