193 Compilation::setup_code_buffer(&code, 0);
194
195 // create assembler for code generation
196 StubAssembler* sasm = new StubAssembler(&code, name_for(id), id);
197 // generate code for runtime stub
198 oop_maps = generate_code_for(id, sasm);
199 assert(oop_maps == NULL || sasm->frame_size() != no_frame_size,
200 "if stub has an oop map it must have a valid frame size");
201
202 #ifdef ASSERT
203 // Make sure that stubs that need oopmaps have them
204 switch (id) {
205 // These stubs don't need to have an oopmap
206 case dtrace_object_alloc_id:
207 case g1_pre_barrier_slow_id:
208 case g1_post_barrier_slow_id:
209 case slow_subtype_check_id:
210 case fpu2long_stub_id:
211 case unwind_exception_id:
212 case counter_overflow_id:
213 #if defined(SPARC) || defined(PPC)
214 case handle_exception_nofpu_id: // Unused on sparc
215 #endif
216 break;
217
218 // All other stubs should have oopmaps
219 default:
220 assert(oop_maps != NULL, "must have an oopmap");
221 }
222 #endif
223
224 // align so printing shows nop's instead of random code at the end (SimpleStubs are aligned)
225 sasm->align(BytesPerWord);
226 // make sure all code is in code buffer
227 sasm->flush();
228
229 frame_size = sasm->frame_size();
230 must_gc_arguments = sasm->must_gc_arguments();
231 } else {
232 /* ignored values */
233 oop_maps = NULL;
1080 }
1081
1082 if (TracePatching) {
1083 Disassembler::decode(copy_buff, copy_buff + *byte_count, tty);
1084 }
1085 }
1086 } else if (stub_id == Runtime1::load_appendix_patching_id) {
1087 NativeMovConstReg* n_copy = nativeMovConstReg_at(copy_buff);
1088 assert(n_copy->data() == 0 ||
1089 n_copy->data() == (intptr_t)Universe::non_oop_word(),
1090 "illegal init value");
1091 n_copy->set_data(cast_from_oop<intx>(appendix()));
1092
1093 if (TracePatching) {
1094 Disassembler::decode(copy_buff, copy_buff + *byte_count, tty);
1095 }
1096 } else {
1097 ShouldNotReachHere();
1098 }
1099
1100 #if defined(SPARC) || defined(PPC)
1101 if (load_klass_or_mirror_patch_id ||
1102 stub_id == Runtime1::load_appendix_patching_id) {
1103 // Update the location in the nmethod with the proper
1104 // metadata. When the code was generated, a NULL was stuffed
1105 // in the metadata table and that table needs to be update to
1106 // have the right value. On intel the value is kept
1107 // directly in the instruction instead of in the metadata
1108 // table, so set_data above effectively updated the value.
1109 nmethod* nm = CodeCache::find_nmethod(instr_pc);
1110 assert(nm != NULL, "invalid nmethod_pc");
1111 RelocIterator mds(nm, copy_buff, copy_buff + 1);
1112 bool found = false;
1113 while (mds.next() && !found) {
1114 if (mds.type() == relocInfo::oop_type) {
1115 assert(stub_id == Runtime1::load_mirror_patching_id ||
1116 stub_id == Runtime1::load_appendix_patching_id, "wrong stub id");
1117 oop_Relocation* r = mds.oop_reloc();
1118 oop* oop_adr = r->oop_addr();
1119 *oop_adr = stub_id == Runtime1::load_mirror_patching_id ? mirror() : appendix();
1120 r->fix_oop_relocation();
1178 (stub_id == Runtime1::load_klass_patching_id) ?
1179 relocInfo::metadata_type :
1180 relocInfo::oop_type;
1181 // update relocInfo to metadata
1182 nmethod* nm = CodeCache::find_nmethod(instr_pc);
1183 assert(nm != NULL, "invalid nmethod_pc");
1184
1185 // The old patch site is now a move instruction so update
1186 // the reloc info so that it will get updated during
1187 // future GCs.
1188 RelocIterator iter(nm, (address)instr_pc, (address)(instr_pc + 1));
1189 relocInfo::change_reloc_info_for_address(&iter, (address) instr_pc,
1190 relocInfo::none, rtype);
1191 #ifdef SPARC
1192 // Sparc takes two relocations for an metadata so update the second one.
1193 address instr_pc2 = instr_pc + NativeMovConstReg::add_offset;
1194 RelocIterator iter2(nm, instr_pc2, instr_pc2 + 1);
1195 relocInfo::change_reloc_info_for_address(&iter2, (address) instr_pc2,
1196 relocInfo::none, rtype);
1197 #endif
1198 #ifdef PPC
1199 { address instr_pc2 = instr_pc + NativeMovConstReg::lo_offset;
1200 RelocIterator iter2(nm, instr_pc2, instr_pc2 + 1);
1201 relocInfo::change_reloc_info_for_address(&iter2, (address) instr_pc2,
1202 relocInfo::none, rtype);
1203 }
1204 #endif
1205 }
1206
1207 } else {
1208 ICache::invalidate_range(copy_buff, *byte_count);
1209 NativeGeneralJump::insert_unconditional(instr_pc, being_initialized_entry);
1210 }
1211 }
1212 }
1213 }
1214
1215 // If we are patching in a non-perm oop, make sure the nmethod
1216 // is on the right list.
1217 if (ScavengeRootsInCode && ((mirror.not_null() && mirror()->is_scavengable()) ||
1218 (appendix.not_null() && appendix->is_scavengable()))) {
|
193 Compilation::setup_code_buffer(&code, 0);
194
195 // create assembler for code generation
196 StubAssembler* sasm = new StubAssembler(&code, name_for(id), id);
197 // generate code for runtime stub
198 oop_maps = generate_code_for(id, sasm);
199 assert(oop_maps == NULL || sasm->frame_size() != no_frame_size,
200 "if stub has an oop map it must have a valid frame size");
201
202 #ifdef ASSERT
203 // Make sure that stubs that need oopmaps have them
204 switch (id) {
205 // These stubs don't need to have an oopmap
206 case dtrace_object_alloc_id:
207 case g1_pre_barrier_slow_id:
208 case g1_post_barrier_slow_id:
209 case slow_subtype_check_id:
210 case fpu2long_stub_id:
211 case unwind_exception_id:
212 case counter_overflow_id:
213 #if defined(SPARC) || defined(PPC32)
214 case handle_exception_nofpu_id: // Unused on sparc
215 #endif
216 break;
217
218 // All other stubs should have oopmaps
219 default:
220 assert(oop_maps != NULL, "must have an oopmap");
221 }
222 #endif
223
224 // align so printing shows nop's instead of random code at the end (SimpleStubs are aligned)
225 sasm->align(BytesPerWord);
226 // make sure all code is in code buffer
227 sasm->flush();
228
229 frame_size = sasm->frame_size();
230 must_gc_arguments = sasm->must_gc_arguments();
231 } else {
232 /* ignored values */
233 oop_maps = NULL;
1080 }
1081
1082 if (TracePatching) {
1083 Disassembler::decode(copy_buff, copy_buff + *byte_count, tty);
1084 }
1085 }
1086 } else if (stub_id == Runtime1::load_appendix_patching_id) {
1087 NativeMovConstReg* n_copy = nativeMovConstReg_at(copy_buff);
1088 assert(n_copy->data() == 0 ||
1089 n_copy->data() == (intptr_t)Universe::non_oop_word(),
1090 "illegal init value");
1091 n_copy->set_data(cast_from_oop<intx>(appendix()));
1092
1093 if (TracePatching) {
1094 Disassembler::decode(copy_buff, copy_buff + *byte_count, tty);
1095 }
1096 } else {
1097 ShouldNotReachHere();
1098 }
1099
1100 #if defined(SPARC) || defined(PPC32)
1101 if (load_klass_or_mirror_patch_id ||
1102 stub_id == Runtime1::load_appendix_patching_id) {
1103 // Update the location in the nmethod with the proper
1104 // metadata. When the code was generated, a NULL was stuffed
1105 // in the metadata table and that table needs to be update to
1106 // have the right value. On intel the value is kept
1107 // directly in the instruction instead of in the metadata
1108 // table, so set_data above effectively updated the value.
1109 nmethod* nm = CodeCache::find_nmethod(instr_pc);
1110 assert(nm != NULL, "invalid nmethod_pc");
1111 RelocIterator mds(nm, copy_buff, copy_buff + 1);
1112 bool found = false;
1113 while (mds.next() && !found) {
1114 if (mds.type() == relocInfo::oop_type) {
1115 assert(stub_id == Runtime1::load_mirror_patching_id ||
1116 stub_id == Runtime1::load_appendix_patching_id, "wrong stub id");
1117 oop_Relocation* r = mds.oop_reloc();
1118 oop* oop_adr = r->oop_addr();
1119 *oop_adr = stub_id == Runtime1::load_mirror_patching_id ? mirror() : appendix();
1120 r->fix_oop_relocation();
1178 (stub_id == Runtime1::load_klass_patching_id) ?
1179 relocInfo::metadata_type :
1180 relocInfo::oop_type;
1181 // update relocInfo to metadata
1182 nmethod* nm = CodeCache::find_nmethod(instr_pc);
1183 assert(nm != NULL, "invalid nmethod_pc");
1184
1185 // The old patch site is now a move instruction so update
1186 // the reloc info so that it will get updated during
1187 // future GCs.
1188 RelocIterator iter(nm, (address)instr_pc, (address)(instr_pc + 1));
1189 relocInfo::change_reloc_info_for_address(&iter, (address) instr_pc,
1190 relocInfo::none, rtype);
1191 #ifdef SPARC
1192 // Sparc takes two relocations for an metadata so update the second one.
1193 address instr_pc2 = instr_pc + NativeMovConstReg::add_offset;
1194 RelocIterator iter2(nm, instr_pc2, instr_pc2 + 1);
1195 relocInfo::change_reloc_info_for_address(&iter2, (address) instr_pc2,
1196 relocInfo::none, rtype);
1197 #endif
1198 #ifdef PPC32
1199 { address instr_pc2 = instr_pc + NativeMovConstReg::lo_offset;
1200 RelocIterator iter2(nm, instr_pc2, instr_pc2 + 1);
1201 relocInfo::change_reloc_info_for_address(&iter2, (address) instr_pc2,
1202 relocInfo::none, rtype);
1203 }
1204 #endif
1205 }
1206
1207 } else {
1208 ICache::invalidate_range(copy_buff, *byte_count);
1209 NativeGeneralJump::insert_unconditional(instr_pc, being_initialized_entry);
1210 }
1211 }
1212 }
1213 }
1214
1215 // If we are patching in a non-perm oop, make sure the nmethod
1216 // is on the right list.
1217 if (ScavengeRootsInCode && ((mirror.not_null() && mirror()->is_scavengable()) ||
1218 (appendix.not_null() && appendix->is_scavengable()))) {
|