1 //
2 // Copyright (c) 1998, 2010, Oracle and/or its affiliates. All rights reserved.
3 // DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 //
5 // This code is free software; you can redistribute it and/or modify it
6 // under the terms of the GNU General Public License version 2 only, as
7 // published by the Free Software Foundation.
8 //
9 // This code is distributed in the hope that it will be useful, but WITHOUT
10 // ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 // FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 // version 2 for more details (a copy is included in the LICENSE file that
13 // accompanied this code).
14 //
15 // You should have received a copy of the GNU General Public License version
16 // 2 along with this work; if not, write to the Free Software Foundation,
17 // Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 //
19 // Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 // or visit www.oracle.com if you need additional information or have any
21 // questions.
22 //
924
925 #ifdef ASSERT
926 {
927 MacroAssembler _masm(&cbuf);
928 if (is_verified_oop_base) {
929 __ verify_oop(reg_to_register_object(src1_enc));
930 }
931 if (is_verified_oop_store) {
932 __ verify_oop(reg_to_register_object(dst_enc));
933 }
934 if (tmp_enc != -1) {
935 __ mov(O7, reg_to_register_object(tmp_enc));
936 }
937 if (is_verified_oop_load) {
938 __ verify_oop(reg_to_register_object(dst_enc));
939 }
940 }
941 #endif
942 }
943
944 void emit_call_reloc(CodeBuffer &cbuf, intptr_t entry_point, relocInfo::relocType rtype, bool preserve_g2 = false, bool force_far_call = false) {
945 // The method which records debug information at every safepoint
946 // expects the call to be the first instruction in the snippet as
947 // it creates a PcDesc structure which tracks the offset of a call
948 // from the start of the codeBlob. This offset is computed as
949 // code_end() - code_begin() of the code which has been emitted
950 // so far.
951 // In this particular case we have skirted around the problem by
952 // putting the "mov" instruction in the delay slot but the problem
953 // may bite us again at some other point and a cleaner/generic
954 // solution using relocations would be needed.
955 MacroAssembler _masm(&cbuf);
956 __ set_inst_mark();
957
958 // We flush the current window just so that there is a valid stack copy
959 // the fact that the current window becomes active again instantly is
960 // not a problem there is nothing live in it.
961
962 #ifdef ASSERT
963 int startpos = __ offset();
964 #endif /* ASSERT */
965
966 #ifdef _LP64
967 // Calls to the runtime or native may not be reachable from compiled code,
968 // so we generate the far call sequence on 64 bit sparc.
969 // This code sequence is relocatable to any address, even on LP64.
970 if ( force_far_call ) {
971 __ relocate(rtype);
972 AddressLiteral dest(entry_point);
973 __ jumpl_to(dest, O7, O7);
974 }
975 else
976 #endif
977 {
978 __ call((address)entry_point, rtype);
979 }
980
981 if (preserve_g2) __ delayed()->mov(G2, L7);
982 else __ delayed()->nop();
983
984 if (preserve_g2) __ mov(L7, G2);
985
986 #ifdef ASSERT
987 if (preserve_g2 && (VerifyCompiledCode || VerifyOops)) {
988 #ifdef _LP64
989 // Trash argument dump slots.
990 __ set(0xb0b8ac0db0b8ac0d, G1);
991 __ mov(G1, G5);
992 __ stx(G1, SP, STACK_BIAS + 0x80);
993 __ stx(G1, SP, STACK_BIAS + 0x88);
994 __ stx(G1, SP, STACK_BIAS + 0x90);
2490 // Long values come back from native calls in O0:O1 in the 32-bit VM, copy the value
2491 // to G1 so the register allocator will not have to deal with the misaligned register
2492 // pair.
2493 enc_class adjust_long_from_native_call %{
2494 #ifndef _LP64
2495 if (returns_long()) {
2496 // sllx O0,32,O0
2497 emit3_simm13( cbuf, Assembler::arith_op, R_O0_enc, Assembler::sllx_op3, R_O0_enc, 0x1020 );
2498 // srl O1,0,O1
2499 emit3_simm13( cbuf, Assembler::arith_op, R_O1_enc, Assembler::srl_op3, R_O1_enc, 0x0000 );
2500 // or O0,O1,G1
2501 emit3 ( cbuf, Assembler::arith_op, R_G1_enc, Assembler:: or_op3, R_O0_enc, 0, R_O1_enc );
2502 }
2503 #endif
2504 %}
2505
2506 enc_class Java_To_Runtime (method meth) %{ // CALL Java_To_Runtime
2507 // CALL directly to the runtime
2508 // The user of this is responsible for ensuring that R_L7 is empty (killed).
2509 emit_call_reloc(cbuf, $meth$$method, relocInfo::runtime_call_type,
2510 /*preserve_g2=*/true, /*force far call*/true);
2511 %}
2512
2513 enc_class preserve_SP %{
2514 MacroAssembler _masm(&cbuf);
2515 __ mov(SP, L7_mh_SP_save);
2516 %}
2517
2518 enc_class restore_SP %{
2519 MacroAssembler _masm(&cbuf);
2520 __ mov(L7_mh_SP_save, SP);
2521 %}
2522
2523 enc_class Java_Static_Call (method meth) %{ // JAVA STATIC CALL
2524 // CALL to fixup routine. Fixup routine uses ScopeDesc info to determine
2525 // who we intended to call.
2526 if ( !_method ) {
2527 emit_call_reloc(cbuf, $meth$$method, relocInfo::runtime_call_type);
2528 } else if (_optimized_virtual) {
2529 emit_call_reloc(cbuf, $meth$$method, relocInfo::opt_virtual_call_type);
2530 } else {
|
1 //
2 // Copyright (c) 1998, 2011, Oracle and/or its affiliates. All rights reserved.
3 // DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 //
5 // This code is free software; you can redistribute it and/or modify it
6 // under the terms of the GNU General Public License version 2 only, as
7 // published by the Free Software Foundation.
8 //
9 // This code is distributed in the hope that it will be useful, but WITHOUT
10 // ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 // FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 // version 2 for more details (a copy is included in the LICENSE file that
13 // accompanied this code).
14 //
15 // You should have received a copy of the GNU General Public License version
16 // 2 along with this work; if not, write to the Free Software Foundation,
17 // Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 //
19 // Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 // or visit www.oracle.com if you need additional information or have any
21 // questions.
22 //
924
925 #ifdef ASSERT
926 {
927 MacroAssembler _masm(&cbuf);
928 if (is_verified_oop_base) {
929 __ verify_oop(reg_to_register_object(src1_enc));
930 }
931 if (is_verified_oop_store) {
932 __ verify_oop(reg_to_register_object(dst_enc));
933 }
934 if (tmp_enc != -1) {
935 __ mov(O7, reg_to_register_object(tmp_enc));
936 }
937 if (is_verified_oop_load) {
938 __ verify_oop(reg_to_register_object(dst_enc));
939 }
940 }
941 #endif
942 }
943
944 void emit_call_reloc(CodeBuffer &cbuf, intptr_t entry_point, relocInfo::relocType rtype, bool preserve_g2 = false, bool force_near_call = true) {
945 // The method which records debug information at every safepoint
946 // expects the call to be the first instruction in the snippet as
947 // it creates a PcDesc structure which tracks the offset of a call
948 // from the start of the codeBlob. This offset is computed as
949 // code_end() - code_begin() of the code which has been emitted
950 // so far.
951 // In this particular case we have skirted around the problem by
952 // putting the "mov" instruction in the delay slot but the problem
953 // may bite us again at some other point and a cleaner/generic
954 // solution using relocations would be needed.
955 MacroAssembler _masm(&cbuf);
956 __ set_inst_mark();
957
958 // We flush the current window just so that there is a valid stack copy
959 // the fact that the current window becomes active again instantly is
960 // not a problem there is nothing live in it.
961
962 #ifdef ASSERT
963 int startpos = __ offset();
964 #endif /* ASSERT */
965
966 #ifdef _LP64
967 // Calls to the runtime or native may not be reachable from compiled code,
968 // so we generate the far call sequence on 64 bit sparc.
969 // This code sequence is relocatable to any address, even on LP64.
970 if (!force_near_call) {
971 if (__ is_in_wdisp30_range((address)entry_point, CodeCache::low_bound()) &&
972 __ is_in_wdisp30_range((address)entry_point, CodeCache::high_bound())) {
973 __ call((address)entry_point, rtype);
974 } else {
975 __ relocate(rtype);
976 AddressLiteral dest(entry_point);
977 __ jumpl_to(dest, O7, O7);
978 }
979 }
980 else
981 #endif
982 {
983 __ call((address)entry_point, rtype);
984 }
985
986 if (preserve_g2) __ delayed()->mov(G2, L7);
987 else __ delayed()->nop();
988
989 if (preserve_g2) __ mov(L7, G2);
990
991 #ifdef ASSERT
992 if (preserve_g2 && (VerifyCompiledCode || VerifyOops)) {
993 #ifdef _LP64
994 // Trash argument dump slots.
995 __ set(0xb0b8ac0db0b8ac0d, G1);
996 __ mov(G1, G5);
997 __ stx(G1, SP, STACK_BIAS + 0x80);
998 __ stx(G1, SP, STACK_BIAS + 0x88);
999 __ stx(G1, SP, STACK_BIAS + 0x90);
2495 // Long values come back from native calls in O0:O1 in the 32-bit VM, copy the value
2496 // to G1 so the register allocator will not have to deal with the misaligned register
2497 // pair.
2498 enc_class adjust_long_from_native_call %{
2499 #ifndef _LP64
2500 if (returns_long()) {
2501 // sllx O0,32,O0
2502 emit3_simm13( cbuf, Assembler::arith_op, R_O0_enc, Assembler::sllx_op3, R_O0_enc, 0x1020 );
2503 // srl O1,0,O1
2504 emit3_simm13( cbuf, Assembler::arith_op, R_O1_enc, Assembler::srl_op3, R_O1_enc, 0x0000 );
2505 // or O0,O1,G1
2506 emit3 ( cbuf, Assembler::arith_op, R_G1_enc, Assembler:: or_op3, R_O0_enc, 0, R_O1_enc );
2507 }
2508 #endif
2509 %}
2510
2511 enc_class Java_To_Runtime (method meth) %{ // CALL Java_To_Runtime
2512 // CALL directly to the runtime
2513 // The user of this is responsible for ensuring that R_L7 is empty (killed).
2514 emit_call_reloc(cbuf, $meth$$method, relocInfo::runtime_call_type,
2515 /*preserve_g2=*/true, /*force near call*/false);
2516 %}
2517
2518 enc_class preserve_SP %{
2519 MacroAssembler _masm(&cbuf);
2520 __ mov(SP, L7_mh_SP_save);
2521 %}
2522
2523 enc_class restore_SP %{
2524 MacroAssembler _masm(&cbuf);
2525 __ mov(L7_mh_SP_save, SP);
2526 %}
2527
2528 enc_class Java_Static_Call (method meth) %{ // JAVA STATIC CALL
2529 // CALL to fixup routine. Fixup routine uses ScopeDesc info to determine
2530 // who we intended to call.
2531 if ( !_method ) {
2532 emit_call_reloc(cbuf, $meth$$method, relocInfo::runtime_call_type);
2533 } else if (_optimized_virtual) {
2534 emit_call_reloc(cbuf, $meth$$method, relocInfo::opt_virtual_call_type);
2535 } else {
|