305 int PatchingStub::_patch_info_offset = -(5 * BytesPerInstWord);
306
307 void PatchingStub::align_patch_site(MacroAssembler* ) {
308 // Patch sites on ppc are always properly aligned.
309 }
310
311 #ifdef ASSERT
312 inline void compare_with_patch_site(address template_start, address pc_start, int bytes_to_copy) {
313 address start = template_start;
314 for (int i = 0; i < bytes_to_copy; i++) {
315 address ptr = (address)(pc_start + i);
316 int a_byte = (*ptr) & 0xFF;
317 assert(a_byte == *start++, "should be the same code");
318 }
319 }
320 #endif
321
322 void PatchingStub::emit_code(LIR_Assembler* ce) {
323 // copy original code here
324 assert(NativeGeneralJump::instruction_size <= _bytes_to_copy && _bytes_to_copy <= 0xFF,
325 "not enough room for call");
326 assert((_bytes_to_copy & 0x3) == 0, "must copy a multiple of four bytes");
327
328 Label call_patch;
329
330 int being_initialized_entry = __ offset();
331
332 if (_id == load_klass_id) {
333 // Produce a copy of the load klass instruction for use by the being initialized case.
334 AddressLiteral addrlit((address)NULL, metadata_Relocation::spec(_index));
335 __ load_const(_obj, addrlit, R0);
336 DEBUG_ONLY( compare_with_patch_site(__ code_section()->start() + being_initialized_entry, _pc_start, _bytes_to_copy); )
337 } else if (_id == load_mirror_id || _id == load_appendix_id) {
338 // Produce a copy of the load mirror instruction for use by the being initialized case.
339 AddressLiteral addrlit((address)NULL, oop_Relocation::spec(_index));
340 __ load_const(_obj, addrlit, R0);
341 DEBUG_ONLY( compare_with_patch_site(__ code_section()->start() + being_initialized_entry, _pc_start, _bytes_to_copy); )
342 } else {
343 // Make a copy the code which is going to be patched.
344 for (int i = 0; i < _bytes_to_copy; i++) {
345 address ptr = (address)(_pc_start + i);
346 int a_byte = (*ptr) & 0xFF;
347 __ emit_int8 (a_byte);
348 }
349 }
350
351 address end_of_patch = __ pc();
352 int bytes_to_skip = 0;
353 if (_id == load_mirror_id) {
354 int offset = __ offset();
355 __ block_comment(" being_initialized check");
356
357 // Static field accesses have special semantics while the class
358 // initializer is being run so we emit a test which can be used to
359 // check that this code is being executed by the initializing
360 // thread.
361 assert(_obj != noreg, "must be a valid register");
362 assert(_index >= 0, "must have oop index");
363 __ mr(R0, _obj); // spill
|
305 int PatchingStub::_patch_info_offset = -(5 * BytesPerInstWord);
306
307 void PatchingStub::align_patch_site(MacroAssembler* ) {
308 // Patch sites on ppc are always properly aligned.
309 }
310
311 #ifdef ASSERT
312 inline void compare_with_patch_site(address template_start, address pc_start, int bytes_to_copy) {
313 address start = template_start;
314 for (int i = 0; i < bytes_to_copy; i++) {
315 address ptr = (address)(pc_start + i);
316 int a_byte = (*ptr) & 0xFF;
317 assert(a_byte == *start++, "should be the same code");
318 }
319 }
320 #endif
321
322 void PatchingStub::emit_code(LIR_Assembler* ce) {
323 // copy original code here
324 assert(NativeGeneralJump::instruction_size <= _bytes_to_copy && _bytes_to_copy <= 0xFF,
325 "not enough room for call, need %d", _bytes_to_copy);
326 assert((_bytes_to_copy & 0x3) == 0, "must copy a multiple of four bytes");
327
328 Label call_patch;
329
330 int being_initialized_entry = __ offset();
331
332 if (_id == load_klass_id) {
333 // Produce a copy of the load klass instruction for use by the being initialized case.
334 AddressLiteral addrlit((address)NULL, metadata_Relocation::spec(_index));
335 __ load_const(_obj, addrlit, R0);
336 DEBUG_ONLY( compare_with_patch_site(__ code_section()->start() + being_initialized_entry, _pc_start, _bytes_to_copy); )
337 } else if (_id == load_mirror_id || _id == load_appendix_id) {
338 // Produce a copy of the load mirror instruction for use by the being initialized case.
339 AddressLiteral addrlit((address)NULL, oop_Relocation::spec(_index));
340 __ load_const(_obj, addrlit, R0);
341 DEBUG_ONLY( compare_with_patch_site(__ code_section()->start() + being_initialized_entry, _pc_start, _bytes_to_copy); )
342 } else {
343 // Make a copy of the code which is going to be patched.
344 for (int i = 0; i < _bytes_to_copy; i++) {
345 address ptr = (address)(_pc_start + i);
346 int a_byte = (*ptr) & 0xFF;
347 __ emit_int8 (a_byte);
348 }
349 }
350
351 address end_of_patch = __ pc();
352 int bytes_to_skip = 0;
353 if (_id == load_mirror_id) {
354 int offset = __ offset();
355 __ block_comment(" being_initialized check");
356
357 // Static field accesses have special semantics while the class
358 // initializer is being run so we emit a test which can be used to
359 // check that this code is being executed by the initializing
360 // thread.
361 assert(_obj != noreg, "must be a valid register");
362 assert(_index >= 0, "must have oop index");
363 __ mr(R0, _obj); // spill
|