1 /* 2 * Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "asm/macroAssembler.inline.hpp" 27 #include "code/compiledIC.hpp" 28 #include "code/icBuffer.hpp" 29 #include "code/nmethod.hpp" 30 #include "memory/resourceArea.hpp" 31 #include "runtime/mutexLocker.hpp" 32 #include "runtime/safepoint.hpp" 33 #ifdef COMPILER2 34 #include "opto/matcher.hpp" 35 #endif 36 37 // ---------------------------------------------------------------------------- 38 39 #define __ _masm. 40 address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf, address mark) { 41 // Stub is fixed up when the corresponding call is converted from calling 42 // compiled code to calling interpreted code. 43 // set (empty), G5 44 // jmp -1 45 46 if (mark == NULL) { 47 mark = cbuf.insts_mark(); // Get mark within main instrs section. 48 } 49 50 MacroAssembler _masm(&cbuf); 51 52 address base = __ start_a_stub(to_interp_stub_size()); 53 if (base == NULL) { 54 return NULL; // CodeBuffer::expand failed. 55 } 56 57 // Static stub relocation stores the instruction address of the call. 58 __ relocate(static_stub_Relocation::spec(mark)); 59 60 __ set_metadata(NULL, as_Register(Matcher::inline_cache_reg_encode())); 61 62 __ set_inst_mark(); 63 AddressLiteral addrlit(-1); 64 __ JUMP(addrlit, G3, 0); 65 66 __ delayed()->nop(); 67 68 assert(__ pc() - base <= to_interp_stub_size(), "wrong stub size"); 69 70 // Update current stubs pointer and restore code_end. 71 __ end_a_stub(); 72 return base; 73 } 74 #undef __ 75 76 int CompiledStaticCall::to_trampoline_stub_size() { 77 // SPARC doesn't use trampolines. 78 return 0; 79 } 80 81 int CompiledStaticCall::to_interp_stub_size() { 82 // This doesn't need to be accurate but it must be larger or equal to 83 // the real size of the stub. 84 return (NativeMovConstReg::instruction_size + // sethi/setlo; 85 NativeJump::instruction_size); // sethi; jmp; nop 86 } 87 88 // Relocation entries for call stub, compiled java to interpreter. 89 int CompiledStaticCall::reloc_to_interp_stub() { 90 return 10; // 4 in emit_java_to_interp + 1 in Java_Static_Call 91 } 92 93 void CompiledDirectStaticCall::set_to_interpreted(const methodHandle& callee, address entry) { 94 address stub = find_stub(/*is_aot*/ false); 95 guarantee(stub != NULL, "stub not found"); 96 97 if (TraceICs) { 98 ResourceMark rm; 99 tty->print_cr("CompiledDirectStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s", 100 p2i(instruction_address()), 101 callee->name_and_sig_as_C_string()); 102 } 103 104 // Creation also verifies the object. 105 NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); 106 NativeJump* jump = nativeJump_at(method_holder->next_instruction_address()); 107 108 #ifdef ASSERT 109 // A generated lambda form might be deleted from the Lambdaform 110 // cache in MethodTypeForm. If a jit compiled lambdaform method 111 // becomes not entrant and the cache access returns null, the new 112 // resolve will lead to a new generated LambdaForm. 113 Method* old_method = reinterpret_cast<Method*>(method_holder->data()); 114 assert(old_method == NULL || old_method == callee() || 115 callee->is_compiled_lambda_form() || 116 !old_method->method_holder()->is_loader_alive() || 117 old_method->is_old(), // may be race patching deoptimized nmethod due to redefinition. 118 "a) MT-unsafe modification of inline cache"); 119 120 address destination = jump->jump_destination(); 121 assert(destination == (address)-1 || destination == entry, 122 "b) MT-unsafe modification of inline cache"); 123 #endif 124 125 // Update stub. 126 method_holder->set_data((intptr_t)callee()); 127 jump->set_jump_destination(entry); 128 129 // Update jump to call. 130 set_destination_mt_safe(stub); 131 } 132 133 void CompiledDirectStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) { 134 // Reset stub. 135 address stub = static_stub->addr(); 136 assert(stub != NULL, "stub not found"); 137 assert(CompiledICLocker::is_safe(stub), "mt unsafe call"); 138 // Creation also verifies the object. 139 NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); 140 NativeJump* jump = nativeJump_at(method_holder->next_instruction_address()); 141 method_holder->set_data(0); 142 jump->set_jump_destination((address)-1); 143 } 144 145 //----------------------------------------------------------------------------- 146 // Non-product mode code 147 #ifndef PRODUCT 148 149 void CompiledDirectStaticCall::verify() { 150 // Verify call. 151 _call->verify(); 152 _call->verify_alignment(); 153 154 // Verify stub. 155 address stub = find_stub(/*is_aot*/ false); 156 assert(stub != NULL, "no stub found for static call"); 157 // Creation also verifies the object. 158 NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); 159 NativeJump* jump = nativeJump_at(method_holder->next_instruction_address()); 160 161 // Verify state. 162 assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check"); 163 } 164 165 #endif // !PRODUCT