277
278 void PatchingStub::align_patch_site(MacroAssembler* masm) {
279 #ifndef PRODUCT
280 const char* bc;
281 switch (_id) {
282 case access_field_id: bc = "patch site (access_field)"; break;
283 case load_klass_id: bc = "patch site (load_klass)"; break;
284 case load_mirror_id: bc = "patch site (load_mirror)"; break;
285 case load_appendix_id: bc = "patch site (load_appendix)"; break;
286 default: bc = "patch site (unknown patch id)"; break;
287 }
288 masm->block_comment(bc);
289 #endif
290
291 masm->align(align_up((int)NativeGeneralJump::instruction_size, wordSize));
292 }
293
294 void PatchingStub::emit_code(LIR_Assembler* ce) {
295 // Copy original code here.
296 assert(NativeGeneralJump::instruction_size <= _bytes_to_copy && _bytes_to_copy <= 0xFF,
297 "not enough room for call");
298
299 NearLabel call_patch;
300
301 int being_initialized_entry = __ offset();
302
303 if (_id == load_klass_id) {
304 // Produce a copy of the load klass instruction for use by the case being initialized.
305 #ifdef ASSERT
306 address start = __ pc();
307 #endif
308 AddressLiteral addrlit((intptr_t)0, metadata_Relocation::spec(_index));
309 __ load_const(_obj, addrlit);
310
311 #ifdef ASSERT
312 for (int i = 0; i < _bytes_to_copy; i++) {
313 address ptr = (address)(_pc_start + i);
314 int a_byte = (*ptr) & 0xFF;
315 assert(a_byte == *start++, "should be the same code");
316 }
317 #endif
318 } else if (_id == load_mirror_id || _id == load_appendix_id) {
319 // Produce a copy of the load mirror instruction for use by the case being initialized.
320 #ifdef ASSERT
321 address start = __ pc();
322 #endif
323 AddressLiteral addrlit((intptr_t)0, oop_Relocation::spec(_index));
324 __ load_const(_obj, addrlit);
325
326 #ifdef ASSERT
327 for (int i = 0; i < _bytes_to_copy; i++) {
328 address ptr = (address)(_pc_start + i);
329 int a_byte = (*ptr) & 0xFF;
330 assert(a_byte == *start++, "should be the same code");
331 }
332 #endif
333 } else {
334 // Make a copy the code which is going to be patched.
335 for (int i = 0; i < _bytes_to_copy; i++) {
336 address ptr = (address)(_pc_start + i);
337 int a_byte = (*ptr) & 0xFF;
338 __ emit_int8 (a_byte);
339 }
340 }
341
342 address end_of_patch = __ pc();
343 int bytes_to_skip = 0;
344 if (_id == load_mirror_id) {
345 int offset = __ offset();
346 if (CommentedAssembly) {
347 __ block_comment(" being_initialized check");
348 }
349
350 // Static field accesses have special semantics while the class
351 // initializer is being run, so we emit a test which can be used to
352 // check that this code is being executed by the initializing
353 // thread.
354 assert(_obj != noreg, "must be a valid register");
|
277
278 void PatchingStub::align_patch_site(MacroAssembler* masm) {
279 #ifndef PRODUCT
280 const char* bc;
281 switch (_id) {
282 case access_field_id: bc = "patch site (access_field)"; break;
283 case load_klass_id: bc = "patch site (load_klass)"; break;
284 case load_mirror_id: bc = "patch site (load_mirror)"; break;
285 case load_appendix_id: bc = "patch site (load_appendix)"; break;
286 default: bc = "patch site (unknown patch id)"; break;
287 }
288 masm->block_comment(bc);
289 #endif
290
291 masm->align(align_up((int)NativeGeneralJump::instruction_size, wordSize));
292 }
293
294 void PatchingStub::emit_code(LIR_Assembler* ce) {
295 // Copy original code here.
296 assert(NativeGeneralJump::instruction_size <= _bytes_to_copy && _bytes_to_copy <= 0xFF,
297 "not enough room for call, need %d", _bytes_to_copy);
298
299 NearLabel call_patch;
300
301 int being_initialized_entry = __ offset();
302
303 if (_id == load_klass_id) {
304 // Produce a copy of the load klass instruction for use by the case being initialized.
305 #ifdef ASSERT
306 address start = __ pc();
307 #endif
308 AddressLiteral addrlit((intptr_t)0, metadata_Relocation::spec(_index));
309 __ load_const(_obj, addrlit);
310
311 #ifdef ASSERT
312 for (int i = 0; i < _bytes_to_copy; i++) {
313 address ptr = (address)(_pc_start + i);
314 int a_byte = (*ptr) & 0xFF;
315 assert(a_byte == *start++, "should be the same code");
316 }
317 #endif
318 } else if (_id == load_mirror_id || _id == load_appendix_id) {
319 // Produce a copy of the load mirror instruction for use by the case being initialized.
320 #ifdef ASSERT
321 address start = __ pc();
322 #endif
323 AddressLiteral addrlit((intptr_t)0, oop_Relocation::spec(_index));
324 __ load_const(_obj, addrlit);
325
326 #ifdef ASSERT
327 for (int i = 0; i < _bytes_to_copy; i++) {
328 address ptr = (address)(_pc_start + i);
329 int a_byte = (*ptr) & 0xFF;
330 assert(a_byte == *start++, "should be the same code");
331 }
332 #endif
333 } else {
334 // Make a copy of the code which is going to be patched.
335 for (int i = 0; i < _bytes_to_copy; i++) {
336 address ptr = (address)(_pc_start + i);
337 int a_byte = (*ptr) & 0xFF;
338 __ emit_int8 (a_byte);
339 }
340 }
341
342 address end_of_patch = __ pc();
343 int bytes_to_skip = 0;
344 if (_id == load_mirror_id) {
345 int offset = __ offset();
346 if (CommentedAssembly) {
347 __ block_comment(" being_initialized check");
348 }
349
350 // Static field accesses have special semantics while the class
351 // initializer is being run, so we emit a test which can be used to
352 // check that this code is being executed by the initializing
353 // thread.
354 assert(_obj != noreg, "must be a valid register");
|