< prev index next >

src/hotspot/cpu/aarch64/compiledIC_aarch64.cpp

Print this page




  66   return base;
  67 }
  68 #undef __
  69 
  70 int CompiledStaticCall::to_interp_stub_size() {
  71   return 7 * NativeInstruction::instruction_size;
  72 }
  73 
  74 int CompiledStaticCall::to_trampoline_stub_size() {
  75   // Somewhat pessimistically, we count 3 instructions here (although
  76   // there are only two) because we sometimes emit an alignment nop.
  77   // Trampoline stubs are always word aligned.
  78   return 3 * NativeInstruction::instruction_size + wordSize;
  79 }
  80 
  81 // Relocation entries for call stub, compiled java to interpreter.
  82 int CompiledStaticCall::reloc_to_interp_stub() {
  83   return 4; // 3 in emit_to_interp_stub + 1 in emit_call
  84 }
  85 























































  86 void CompiledDirectStaticCall::set_to_interpreted(const methodHandle& callee, address entry) {
  87   address stub = find_stub(false /* is_aot */);
  88   guarantee(stub != NULL, "stub not found");
  89 
  90   if (TraceICs) {
  91     ResourceMark rm;
  92     tty->print_cr("CompiledDirectStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s",
  93                   p2i(instruction_address()),
  94                   callee->name_and_sig_as_C_string());
  95   }
  96 
  97   // Creation also verifies the object.
  98   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
  99 #ifndef PRODUCT
 100   NativeGeneralJump* jump = nativeGeneralJump_at(method_holder->next_instruction_address());
 101 
 102   // read the value once
 103   volatile intptr_t data = method_holder->data();
 104   assert(data == 0 || data == (intptr_t)callee(),
 105          "a) MT-unsafe modification of inline cache");




  66   return base;
  67 }
  68 #undef __
  69 
  70 int CompiledStaticCall::to_interp_stub_size() {
  71   return 7 * NativeInstruction::instruction_size;
  72 }
  73 
  74 int CompiledStaticCall::to_trampoline_stub_size() {
  75   // Somewhat pessimistically, we count 3 instructions here (although
  76   // there are only two) because we sometimes emit an alignment nop.
  77   // Trampoline stubs are always word aligned.
  78   return 3 * NativeInstruction::instruction_size + wordSize;
  79 }
  80 
  81 // Relocation entries for call stub, compiled java to interpreter.
  82 int CompiledStaticCall::reloc_to_interp_stub() {
  83   return 4; // 3 in emit_to_interp_stub + 1 in emit_call
  84 }
  85 
  86 #if INCLUDE_AOT
  87 #define __ _masm.
  88 void CompiledStaticCall::emit_to_aot_stub(CodeBuffer &cbuf, address mark) {
  89   if (!UseAOT) {
  90     return;
  91   }
  92   // Stub is fixed up when the corresponding call is converted from
  93   // calling compiled code to calling aot code.
  94   // mov r, imm64_aot_code_address
  95   // jmp r
  96 
  97   if (mark == NULL) {
  98     mark = cbuf.insts_mark();  // Get mark within main instrs section.
  99   }
 100 
 101   // Note that the code buffer's insts_mark is always relative to insts.
 102   // That's why we must use the macroassembler to generate a stub.
 103   MacroAssembler _masm(&cbuf);
 104 
 105   address base =
 106   __ start_a_stub(to_aot_stub_size());
 107   guarantee(base != NULL, "out of space");
 108 
 109   // Static stub relocation stores the instruction address of the call.
 110   __ relocate(static_stub_Relocation::spec(mark, true /* is_aot */));
 111   // Load destination AOT code address.
 112   __ movptr(rscratch1, 0);  // address is zapped till fixup time.
 113   // This is recognized as unresolved by relocs/nativeinst/ic code.
 114   __ br(rscratch1);
 115 
 116   assert(__ pc() - base <= to_aot_stub_size(), "wrong stub size");
 117 
 118   // Update current stubs pointer and restore insts_end.
 119   __ end_a_stub();
 120 }
 121 #undef __
 122 
 123 int CompiledStaticCall::to_aot_stub_size() {
 124   if (UseAOT) {
 125     return 5 * 4;  // movz; movk; movk; movk; br
 126   } else {
 127     return 0;
 128   }
 129 }
 130 
 131 // Relocation entries for call stub, compiled java to aot.
 132 int CompiledStaticCall::reloc_to_aot_stub() {
 133   if (UseAOT) {
 134     return 5 * 4;  // movz; movk; movk; movk; br
 135   } else {
 136     return 0;
 137   }
 138 }
 139 #endif // INCLUDE_AOT
 140 
 141 void CompiledDirectStaticCall::set_to_interpreted(const methodHandle& callee, address entry) {
 142   address stub = find_stub(false /* is_aot */);
 143   guarantee(stub != NULL, "stub not found");
 144 
 145   if (TraceICs) {
 146     ResourceMark rm;
 147     tty->print_cr("CompiledDirectStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s",
 148                   p2i(instruction_address()),
 149                   callee->name_and_sig_as_C_string());
 150   }
 151 
 152   // Creation also verifies the object.
 153   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
 154 #ifndef PRODUCT
 155   NativeGeneralJump* jump = nativeGeneralJump_at(method_holder->next_instruction_address());
 156 
 157   // read the value once
 158   volatile intptr_t data = method_holder->data();
 159   assert(data == 0 || data == (intptr_t)callee(),
 160          "a) MT-unsafe modification of inline cache");


< prev index next >