1 /* 2 * Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved. 3 * Copyright (c) 2014, 2018, Red Hat Inc. All rights reserved. 4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 5 * 6 * This code is free software; you can redistribute it and/or modify it 7 * under the terms of the GNU General Public License version 2 only, as 8 * published by the Free Software Foundation. 9 * 10 * This code is distributed in the hope that it will be useful, but WITHOUT 11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 13 * version 2 for more details (a copy is included in the LICENSE file that 14 * accompanied this code). 15 * 16 * You should have received a copy of the GNU General Public License version 17 * 2 along with this work; if not, write to the Free Software Foundation, 18 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 19 * 20 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 21 * or visit www.oracle.com if you need additional information or have any 22 * questions. 23 * 24 */ 25 26 #include "precompiled.hpp" 27 #include "asm/macroAssembler.inline.hpp" 28 #include "code/compiledIC.hpp" 29 #include "code/icBuffer.hpp" 30 #include "code/nmethod.hpp" 31 #include "memory/resourceArea.hpp" 32 #include "runtime/mutexLocker.hpp" 33 #include "runtime/safepoint.hpp" 34 35 // ---------------------------------------------------------------------------- 36 37 #define __ _masm. 38 address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf, address mark) { 39 // Stub is fixed up when the corresponding call is converted from 40 // calling compiled code to calling interpreted code. 41 // mov rmethod, 0 42 // jmp -4 # to self 43 44 if (mark == NULL) { 45 mark = cbuf.insts_mark(); // Get mark within main instrs section. 46 } 47 48 // Note that the code buffer's insts_mark is always relative to insts. 49 // That's why we must use the macroassembler to generate a stub. 50 MacroAssembler _masm(&cbuf); 51 52 address base = __ start_a_stub(to_interp_stub_size()); 53 int offset = __ offset(); 54 if (base == NULL) { 55 return NULL; // CodeBuffer::expand failed 56 } 57 // static stub relocation stores the instruction address of the call 58 __ relocate(static_stub_Relocation::spec(mark)); 59 60 #if INCLUDE_AOT 61 // Don't create a Metadata reloc if we're generating immutable PIC. 62 if (cbuf.immutable_PIC()) { 63 __ movptr(rmethod, 0); 64 __ movptr(rscratch1, 0); 65 __ br(rscratch1); 66 67 } else 68 #endif 69 { 70 __ emit_static_call_stub(); 71 } 72 73 assert((__ offset() - offset) <= (int)to_interp_stub_size(), "stub too big"); 74 __ end_a_stub(); 75 return base; 76 } 77 #undef __ 78 79 int CompiledStaticCall::to_interp_stub_size() { 80 // isb; movk; movz; movz; movk; movz; movz; br 81 if (!Use64BitLiteralAddresses) { 82 return 8 * NativeInstruction::instruction_size; 83 } else { 84 // emit_to_interp_stub will emit 2 extra movk instructions. 85 return 10 * NativeInstruction::instruction_size; 86 } 87 } 88 89 int CompiledStaticCall::to_trampoline_stub_size() { 90 // Somewhat pessimistically, we count 3 instructions here (although 91 // there are only two) because we sometimes emit an alignment nop. 92 // Trampoline stubs are always word aligned. 93 return 3 * NativeInstruction::instruction_size + wordSize; 94 } 95 96 // Relocation entries for call stub, compiled java to interpreter. 97 int CompiledStaticCall::reloc_to_interp_stub() { 98 return 4; // 3 in emit_to_interp_stub + 1 in emit_call 99 } 100 101 #if INCLUDE_AOT 102 #define __ _masm. 103 void CompiledStaticCall::emit_to_aot_stub(CodeBuffer &cbuf, address mark) { 104 if (!UseAOT) { 105 return; 106 } 107 // Stub is fixed up when the corresponding call is converted from 108 // calling compiled code to calling aot code. 109 // mov r, imm64_aot_code_address 110 // jmp r 111 112 if (mark == NULL) { 113 mark = cbuf.insts_mark(); // Get mark within main instrs section. 114 } 115 116 // Note that the code buffer's insts_mark is always relative to insts. 117 // That's why we must use the macroassembler to generate a stub. 118 MacroAssembler _masm(&cbuf); 119 120 address base = 121 __ start_a_stub(to_aot_stub_size()); 122 guarantee(base != NULL, "out of space"); 123 124 // Static stub relocation stores the instruction address of the call. 125 __ relocate(static_stub_Relocation::spec(mark, true /* is_aot */)); 126 // Load destination AOT code address. 127 __ movptr(rscratch1, 0); // address is zapped till fixup time. 128 // This is recognized as unresolved by relocs/nativeinst/ic code. 129 __ br(rscratch1); 130 131 assert(__ pc() - base <= to_aot_stub_size(), "wrong stub size"); 132 133 // Update current stubs pointer and restore insts_end. 134 __ end_a_stub(); 135 } 136 #undef __ 137 138 int CompiledStaticCall::to_aot_stub_size() { 139 if (UseAOT) { 140 return 5 * 4; // movz; movk; movk; movk; br 141 } else { 142 return 0; 143 } 144 } 145 146 // Relocation entries for call stub, compiled java to aot. 147 int CompiledStaticCall::reloc_to_aot_stub() { 148 if (UseAOT) { 149 return 5 * 4; // movz; movk; movk; movk; br 150 } else { 151 return 0; 152 } 153 } 154 #endif // INCLUDE_AOT 155 156 void CompiledDirectStaticCall::set_to_interpreted(const methodHandle& callee, address entry) { 157 address stub = find_stub(false /* is_aot */); 158 guarantee(stub != NULL, "stub not found"); 159 160 if (TraceICs) { 161 ResourceMark rm; 162 tty->print_cr("CompiledDirectStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s", 163 p2i(instruction_address()), 164 callee->name_and_sig_as_C_string()); 165 } 166 167 // Creation also verifies the object. 168 NativeMovConstReg* method_holder 169 = nativeMovConstReg_at(stub + NativeInstruction::instruction_size); 170 #ifndef PRODUCT 171 NativeGeneralJump* jump = nativeGeneralJump_at(method_holder->next_instruction_address()); 172 173 // read the value once 174 volatile intptr_t data = method_holder->data(); 175 assert(data == 0 || data == (intptr_t)callee(), 176 "a) MT-unsafe modification of inline cache"); 177 assert(data == 0 || jump->jump_destination() == entry, 178 "b) MT-unsafe modification of inline cache"); 179 #endif 180 // Update stub. 181 method_holder->set_data((intptr_t)callee()); 182 NativeGeneralJump::insert_unconditional(method_holder->next_instruction_address(), entry); 183 ICache::invalidate_range(stub, to_interp_stub_size()); 184 // Update jump to call. 185 set_destination_mt_safe(stub); 186 } 187 188 void CompiledDirectStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) { 189 // Reset stub. 190 address stub = static_stub->addr(); 191 assert(stub != NULL, "stub not found"); 192 assert(CompiledICLocker::is_safe(stub), "mt unsafe call"); 193 // Creation also verifies the object. 194 NativeMovConstReg* method_holder 195 = nativeMovConstReg_at(stub + NativeInstruction::instruction_size); 196 method_holder->set_data(0); 197 } 198 199 //----------------------------------------------------------------------------- 200 // Non-product mode code 201 #ifndef PRODUCT 202 203 void CompiledDirectStaticCall::verify() { 204 // Verify call. 205 _call->verify(); 206 _call->verify_alignment(); 207 208 // Verify stub. 209 address stub = find_stub(false /* is_aot */); 210 assert(stub != NULL, "no stub found for static call"); 211 // Creation also verifies the object. 212 NativeMovConstReg* method_holder 213 = nativeMovConstReg_at(stub + NativeInstruction::instruction_size); 214 NativeJump* jump = nativeJump_at(method_holder->next_instruction_address()); 215 216 // Verify state. 217 assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check"); 218 } 219 220 #endif // !PRODUCT