21 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 22 * or visit www.oracle.com if you need additional information or have any 23 * questions. 24 * 25 */ 26 27 #include "precompiled.hpp" 28 #include "asm/macroAssembler.hpp" 29 #include "code/codeCache.hpp" 30 #include "memory/resourceArea.hpp" 31 #include "nativeInst_aarch32.hpp" 32 #include "oops/oop.inline.hpp" 33 #include "runtime/handles.hpp" 34 #include "runtime/sharedRuntime.hpp" 35 #include "runtime/stubRoutines.hpp" 36 #include "utilities/ostream.hpp" 37 #ifdef COMPILER1 38 #include "c1/c1_Runtime1.hpp" 39 #endif 40 41 NativeInstruction* NativeInstruction::from(address addr) { 42 return (NativeInstruction*) addr; 43 } 44 45 //------------------------------------------------------------------- 46 47 void NativeCall::verify() { 48 if (!is_call()) { 49 fatal("not a call"); 50 } 51 } 52 53 address NativeCall::destination() const { 54 assert(is_call(), "not a call"); 55 if (NativeImmCall::is_at(addr())) { 56 return NativeImmCall::from(addr())->destination(); 57 } else if (NativeMovConstReg::is_at(addr())) { 58 return address(NativeMovConstReg::from(addr())->data()); 59 } else if (NativeTrampolineCall::is_at(addr())) { 60 return NativeTrampolineCall::from(addr())->destination(); 544 *(juint*)code_pos = 0xe7fdeafd; // udf #0xdead 545 } 546 547 //------------------------------------------------------------------- 548 549 void NativeGeneralJump::verify() { } 550 551 void NativeGeneralJump::insert_unconditional(address code_pos, address entry) { 552 NativeGeneralJump* n_jump = (NativeGeneralJump*)code_pos; 553 554 CodeBuffer cb(code_pos, instruction_size); 555 MacroAssembler a(&cb); 556 557 a.b(entry); 558 559 ICache::invalidate_range(code_pos, instruction_size); 560 } 561 562 // MT-safe patching of a long jump instruction. 563 void NativeGeneralJump::replace_mt_safe(address instr_addr, address code_buffer) { 564 // FIXME NativeCall from patching_epilog nops filling 565 const int bytes_to_copy = NativeCall::instruction_size; 566 const address patching_switch_addr = code_buffer + bytes_to_copy; 567 NativeImmJump* patching_switch = NativeImmJump::from(patching_switch_addr); 568 assert(patching_switch->destination() == patching_switch_addr + NativeInstruction::arm_insn_sz, 569 "switch should be branch to next instr at this point"); 570 patching_switch->set_destination(instr_addr + bytes_to_copy); 571 ICache::invalidate_word(patching_switch_addr); 572 573 NativeImmJump* nj = NativeImmJump::from(instr_addr); // checking that it is a jump 574 nj->set_destination(code_buffer); 575 ICache::invalidate_word(instr_addr); 576 577 } | 21 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 22 * or visit www.oracle.com if you need additional information or have any 23 * questions. 24 * 25 */ 26 27 #include "precompiled.hpp" 28 #include "asm/macroAssembler.hpp" 29 #include "code/codeCache.hpp" 30 #include "memory/resourceArea.hpp" 31 #include "nativeInst_aarch32.hpp" 32 #include "oops/oop.inline.hpp" 33 #include "runtime/handles.hpp" 34 #include "runtime/sharedRuntime.hpp" 35 #include "runtime/stubRoutines.hpp" 36 #include "utilities/ostream.hpp" 37 #ifdef COMPILER1 38 #include "c1/c1_Runtime1.hpp" 39 #endif 40 41 // LIRAssembler fills patching site with nops up to NativeCall::instruction_size 42 static const int patching_copy_buff_len = NativeCall::instruction_size; 43 44 NativeInstruction* NativeInstruction::from(address addr) { 45 return (NativeInstruction*) addr; 46 } 47 48 //------------------------------------------------------------------- 49 50 void NativeCall::verify() { 51 if (!is_call()) { 52 fatal("not a call"); 53 } 54 } 55 56 address NativeCall::destination() const { 57 assert(is_call(), "not a call"); 58 if (NativeImmCall::is_at(addr())) { 59 return NativeImmCall::from(addr())->destination(); 60 } else if (NativeMovConstReg::is_at(addr())) { 61 return address(NativeMovConstReg::from(addr())->data()); 62 } else if (NativeTrampolineCall::is_at(addr())) { 63 return NativeTrampolineCall::from(addr())->destination(); 547 *(juint*)code_pos = 0xe7fdeafd; // udf #0xdead 548 } 549 550 //------------------------------------------------------------------- 551 552 void NativeGeneralJump::verify() { } 553 554 void NativeGeneralJump::insert_unconditional(address code_pos, address entry) { 555 NativeGeneralJump* n_jump = (NativeGeneralJump*)code_pos; 556 557 CodeBuffer cb(code_pos, instruction_size); 558 MacroAssembler a(&cb); 559 560 a.b(entry); 561 562 ICache::invalidate_range(code_pos, instruction_size); 563 } 564 565 // MT-safe patching of a long jump instruction. 566 void NativeGeneralJump::replace_mt_safe(address instr_addr, address code_buffer) { 567 const address patching_switch_addr = code_buffer + patching_copy_buff_len; 568 NativeImmJump* patching_switch = NativeImmJump::from(patching_switch_addr); 569 assert(!NativeInstruction::from(instr_addr)->is_patched_already(), "not patched yet"); 570 assert(patching_switch->destination() == patching_switch_addr + NativeInstruction::arm_insn_sz, 571 "switch should be branch to next instr at this point"); 572 patching_switch->set_destination(instr_addr + patching_copy_buff_len); 573 ICache::invalidate_word(patching_switch_addr); 574 575 NativeImmJump* nj = NativeImmJump::from(instr_addr); // checking that it is a jump 576 nj->set_destination(code_buffer); 577 ICache::invalidate_word(instr_addr); 578 579 assert(NativeInstruction::from(instr_addr)->is_patched_already(), "should patched already"); 580 } 581 582 bool NativeInstruction::is_patched_already() const { 583 if (NativeImmJump::is_at(addr())) { 584 address maybe_copy_buff = NativeImmJump::from(addr())->destination(); 585 address maybe_patching_switch = maybe_copy_buff + patching_copy_buff_len; 586 if (NativeImmJump::is_at(maybe_patching_switch)) { 587 return NativeImmJump::from(maybe_patching_switch)->destination() == addr() + patching_copy_buff_len; 588 } 589 } 590 return false; 591 } |