193 Compilation::setup_code_buffer(&code, 0);
194
195 // create assembler for code generation
196 StubAssembler* sasm = new StubAssembler(&code, name_for(id), id);
197 // generate code for runtime stub
198 oop_maps = generate_code_for(id, sasm);
199 assert(oop_maps == NULL || sasm->frame_size() != no_frame_size,
200 "if stub has an oop map it must have a valid frame size");
201
202 #ifdef ASSERT
203 // Make sure that stubs that need oopmaps have them
204 switch (id) {
205 // These stubs don't need to have an oopmap
206 case dtrace_object_alloc_id:
207 case g1_pre_barrier_slow_id:
208 case g1_post_barrier_slow_id:
209 case slow_subtype_check_id:
210 case fpu2long_stub_id:
211 case unwind_exception_id:
212 case counter_overflow_id:
213 #if defined(SPARC) || defined(PPC)
214 case handle_exception_nofpu_id: // Unused on sparc
215 #endif
216 break;
217
218 // All other stubs should have oopmaps
219 default:
220 assert(oop_maps != NULL, "must have an oopmap");
221 }
222 #endif
223
224 // align so printing shows nop's instead of random code at the end (SimpleStubs are aligned)
225 sasm->align(BytesPerWord);
226 // make sure all code is in code buffer
227 sasm->flush();
228
229 frame_size = sasm->frame_size();
230 must_gc_arguments = sasm->must_gc_arguments();
231 } else {
232 /* ignored values */
233 oop_maps = NULL;
1079 }
1080
1081 if (TracePatching) {
1082 Disassembler::decode(copy_buff, copy_buff + *byte_count, tty);
1083 }
1084 }
1085 } else if (stub_id == Runtime1::load_appendix_patching_id) {
1086 NativeMovConstReg* n_copy = nativeMovConstReg_at(copy_buff);
1087 assert(n_copy->data() == 0 ||
1088 n_copy->data() == (intptr_t)Universe::non_oop_word(),
1089 "illegal init value");
1090 n_copy->set_data(cast_from_oop<intx>(appendix()));
1091
1092 if (TracePatching) {
1093 Disassembler::decode(copy_buff, copy_buff + *byte_count, tty);
1094 }
1095 } else {
1096 ShouldNotReachHere();
1097 }
1098
1099 #if defined(SPARC) || defined(PPC)
1100 if (load_klass_or_mirror_patch_id ||
1101 stub_id == Runtime1::load_appendix_patching_id) {
1102 // Update the location in the nmethod with the proper
1103 // metadata. When the code was generated, a NULL was stuffed
1104 // in the metadata table and that table needs to be update to
1105 // have the right value. On intel the value is kept
1106 // directly in the instruction instead of in the metadata
1107 // table, so set_data above effectively updated the value.
1108 nmethod* nm = CodeCache::find_nmethod(instr_pc);
1109 assert(nm != NULL, "invalid nmethod_pc");
1110 RelocIterator mds(nm, copy_buff, copy_buff + 1);
1111 bool found = false;
1112 while (mds.next() && !found) {
1113 if (mds.type() == relocInfo::oop_type) {
1114 assert(stub_id == Runtime1::load_mirror_patching_id ||
1115 stub_id == Runtime1::load_appendix_patching_id, "wrong stub id");
1116 oop_Relocation* r = mds.oop_reloc();
1117 oop* oop_adr = r->oop_addr();
1118 *oop_adr = stub_id == Runtime1::load_mirror_patching_id ? mirror() : appendix();
1119 r->fix_oop_relocation();
1177 (stub_id == Runtime1::load_klass_patching_id) ?
1178 relocInfo::metadata_type :
1179 relocInfo::oop_type;
1180 // update relocInfo to metadata
1181 nmethod* nm = CodeCache::find_nmethod(instr_pc);
1182 assert(nm != NULL, "invalid nmethod_pc");
1183
1184 // The old patch site is now a move instruction so update
1185 // the reloc info so that it will get updated during
1186 // future GCs.
1187 RelocIterator iter(nm, (address)instr_pc, (address)(instr_pc + 1));
1188 relocInfo::change_reloc_info_for_address(&iter, (address) instr_pc,
1189 relocInfo::none, rtype);
1190 #ifdef SPARC
1191 // Sparc takes two relocations for an metadata so update the second one.
1192 address instr_pc2 = instr_pc + NativeMovConstReg::add_offset;
1193 RelocIterator iter2(nm, instr_pc2, instr_pc2 + 1);
1194 relocInfo::change_reloc_info_for_address(&iter2, (address) instr_pc2,
1195 relocInfo::none, rtype);
1196 #endif
1197 #ifdef PPC
1198 { address instr_pc2 = instr_pc + NativeMovConstReg::lo_offset;
1199 RelocIterator iter2(nm, instr_pc2, instr_pc2 + 1);
1200 relocInfo::change_reloc_info_for_address(&iter2, (address) instr_pc2,
1201 relocInfo::none, rtype);
1202 }
1203 #endif
1204 }
1205
1206 } else {
1207 ICache::invalidate_range(copy_buff, *byte_count);
1208 NativeGeneralJump::insert_unconditional(instr_pc, being_initialized_entry);
1209 }
1210 }
1211 }
1212 }
1213
1214 // If we are patching in a non-perm oop, make sure the nmethod
1215 // is on the right list.
1216 if (ScavengeRootsInCode && ((mirror.not_null() && mirror()->is_scavengable()) ||
1217 (appendix.not_null() && appendix->is_scavengable()))) {
|
193 Compilation::setup_code_buffer(&code, 0);
194
195 // create assembler for code generation
196 StubAssembler* sasm = new StubAssembler(&code, name_for(id), id);
197 // generate code for runtime stub
198 oop_maps = generate_code_for(id, sasm);
199 assert(oop_maps == NULL || sasm->frame_size() != no_frame_size,
200 "if stub has an oop map it must have a valid frame size");
201
202 #ifdef ASSERT
203 // Make sure that stubs that need oopmaps have them
204 switch (id) {
205 // These stubs don't need to have an oopmap
206 case dtrace_object_alloc_id:
207 case g1_pre_barrier_slow_id:
208 case g1_post_barrier_slow_id:
209 case slow_subtype_check_id:
210 case fpu2long_stub_id:
211 case unwind_exception_id:
212 case counter_overflow_id:
213 #if defined(SPARC) || defined(PPC32)
214 case handle_exception_nofpu_id: // Unused on sparc
215 #endif
216 break;
217
218 // All other stubs should have oopmaps
219 default:
220 assert(oop_maps != NULL, "must have an oopmap");
221 }
222 #endif
223
224 // align so printing shows nop's instead of random code at the end (SimpleStubs are aligned)
225 sasm->align(BytesPerWord);
226 // make sure all code is in code buffer
227 sasm->flush();
228
229 frame_size = sasm->frame_size();
230 must_gc_arguments = sasm->must_gc_arguments();
231 } else {
232 /* ignored values */
233 oop_maps = NULL;
1079 }
1080
1081 if (TracePatching) {
1082 Disassembler::decode(copy_buff, copy_buff + *byte_count, tty);
1083 }
1084 }
1085 } else if (stub_id == Runtime1::load_appendix_patching_id) {
1086 NativeMovConstReg* n_copy = nativeMovConstReg_at(copy_buff);
1087 assert(n_copy->data() == 0 ||
1088 n_copy->data() == (intptr_t)Universe::non_oop_word(),
1089 "illegal init value");
1090 n_copy->set_data(cast_from_oop<intx>(appendix()));
1091
1092 if (TracePatching) {
1093 Disassembler::decode(copy_buff, copy_buff + *byte_count, tty);
1094 }
1095 } else {
1096 ShouldNotReachHere();
1097 }
1098
1099 #if defined(SPARC) || defined(PPC32)
1100 if (load_klass_or_mirror_patch_id ||
1101 stub_id == Runtime1::load_appendix_patching_id) {
1102 // Update the location in the nmethod with the proper
1103 // metadata. When the code was generated, a NULL was stuffed
1104 // in the metadata table and that table needs to be update to
1105 // have the right value. On intel the value is kept
1106 // directly in the instruction instead of in the metadata
1107 // table, so set_data above effectively updated the value.
1108 nmethod* nm = CodeCache::find_nmethod(instr_pc);
1109 assert(nm != NULL, "invalid nmethod_pc");
1110 RelocIterator mds(nm, copy_buff, copy_buff + 1);
1111 bool found = false;
1112 while (mds.next() && !found) {
1113 if (mds.type() == relocInfo::oop_type) {
1114 assert(stub_id == Runtime1::load_mirror_patching_id ||
1115 stub_id == Runtime1::load_appendix_patching_id, "wrong stub id");
1116 oop_Relocation* r = mds.oop_reloc();
1117 oop* oop_adr = r->oop_addr();
1118 *oop_adr = stub_id == Runtime1::load_mirror_patching_id ? mirror() : appendix();
1119 r->fix_oop_relocation();
1177 (stub_id == Runtime1::load_klass_patching_id) ?
1178 relocInfo::metadata_type :
1179 relocInfo::oop_type;
1180 // update relocInfo to metadata
1181 nmethod* nm = CodeCache::find_nmethod(instr_pc);
1182 assert(nm != NULL, "invalid nmethod_pc");
1183
1184 // The old patch site is now a move instruction so update
1185 // the reloc info so that it will get updated during
1186 // future GCs.
1187 RelocIterator iter(nm, (address)instr_pc, (address)(instr_pc + 1));
1188 relocInfo::change_reloc_info_for_address(&iter, (address) instr_pc,
1189 relocInfo::none, rtype);
1190 #ifdef SPARC
1191 // Sparc takes two relocations for an metadata so update the second one.
1192 address instr_pc2 = instr_pc + NativeMovConstReg::add_offset;
1193 RelocIterator iter2(nm, instr_pc2, instr_pc2 + 1);
1194 relocInfo::change_reloc_info_for_address(&iter2, (address) instr_pc2,
1195 relocInfo::none, rtype);
1196 #endif
1197 #ifdef PPC32
1198 { address instr_pc2 = instr_pc + NativeMovConstReg::lo_offset;
1199 RelocIterator iter2(nm, instr_pc2, instr_pc2 + 1);
1200 relocInfo::change_reloc_info_for_address(&iter2, (address) instr_pc2,
1201 relocInfo::none, rtype);
1202 }
1203 #endif
1204 }
1205
1206 } else {
1207 ICache::invalidate_range(copy_buff, *byte_count);
1208 NativeGeneralJump::insert_unconditional(instr_pc, being_initialized_entry);
1209 }
1210 }
1211 }
1212 }
1213
1214 // If we are patching in a non-perm oop, make sure the nmethod
1215 // is on the right list.
1216 if (ScavengeRootsInCode && ((mirror.not_null() && mirror()->is_scavengable()) ||
1217 (appendix.not_null() && appendix->is_scavengable()))) {
|