1 #ifdef USE_PRAGMA_IDENT_SRC
2 #pragma ident "@(#)templateTable_sparc.cpp 1.262 07/08/29 13:42:19 JVM"
3 #endif
4 /*
5 * Copyright 1997-2007 Sun Microsystems, Inc. All Rights Reserved.
6 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
7 *
8 * This code is free software; you can redistribute it and/or modify it
9 * under the terms of the GNU General Public License version 2 only, as
10 * published by the Free Software Foundation.
11 *
12 * This code is distributed in the hope that it will be useful, but WITHOUT
13 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 * version 2 for more details (a copy is included in the LICENSE file that
16 * accompanied this code).
17 *
18 * You should have received a copy of the GNU General Public License version
19 * 2 along with this work; if not, write to the Free Software Foundation,
20 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
21 *
22 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
23 * CA 95054 USA or visit www.sun.com if you need additional information or
24 * have any questions.
25 *
26 */
27
28 #include "incls/_precompiled.incl"
29 #include "incls/_templateTable_sparc.cpp.incl"
30
31 #ifndef CC_INTERP
32 #define __ _masm->
33
34
35 //----------------------------------------------------------------------------------------------------
36 // Platform-dependent initialization
37
38 void TemplateTable::pd_initialize() {
39 // (none)
40 }
41
42
43 //----------------------------------------------------------------------------------------------------
44 // Condition conversion
45 Assembler::Condition ccNot(TemplateTable::Condition cc) {
46 switch (cc) {
47 case TemplateTable::equal : return Assembler::notEqual;
48 case TemplateTable::not_equal : return Assembler::equal;
49 case TemplateTable::less : return Assembler::greaterEqual;
50 case TemplateTable::less_equal : return Assembler::greater;
51 case TemplateTable::greater : return Assembler::lessEqual;
52 case TemplateTable::greater_equal: return Assembler::less;
53 }
448 // Otos_i: index
449 // O2: array
450 __ index_check(O2, Otos_i, LogBytesPerInt, G3_scratch, O3);
451 __ ldf(FloatRegisterImpl::S, O3, arrayOopDesc::base_offset_in_bytes(T_FLOAT), Ftos_f);
452 }
453
454
455 void TemplateTable::daload() {
456 transition(itos, dtos);
457 // Otos_i: index
458 // O2: array
459 __ index_check(O2, Otos_i, LogBytesPerLong, G3_scratch, O3);
460 __ ldf(FloatRegisterImpl::D, O3, arrayOopDesc::base_offset_in_bytes(T_DOUBLE), Ftos_d);
461 }
462
463
464 void TemplateTable::aaload() {
465 transition(itos, atos);
466 // Otos_i: index
467 // tos: array
468 __ index_check(O2, Otos_i, LogBytesPerWord, G3_scratch, O3);
469 __ ld_ptr(O3, arrayOopDesc::base_offset_in_bytes(T_OBJECT), Otos_i);
470 __ verify_oop(Otos_i);
471 }
472
473
474 void TemplateTable::baload() {
475 transition(itos, itos);
476 // Otos_i: index
477 // tos: array
478 __ index_check(O2, Otos_i, 0, G3_scratch, O3);
479 __ ldsb(O3, arrayOopDesc::base_offset_in_bytes(T_BYTE), Otos_i);
480 }
481
482
483 void TemplateTable::caload() {
484 transition(itos, itos);
485 // Otos_i: index
486 // tos: array
487 __ index_check(O2, Otos_i, LogBytesPerShort, G3_scratch, O3);
488 __ lduh(O3, arrayOopDesc::base_offset_in_bytes(T_CHAR), Otos_i);
489 }
722 void TemplateTable::dastore() {
723 transition(dtos, vtos);
724 __ pop_i(O2); // index
725 // Fos_d: val
726 // O3: array
727 __ index_check(O3, O2, LogBytesPerLong, G3_scratch, O2);
728 __ stf(FloatRegisterImpl::D, Ftos_d, O2, arrayOopDesc::base_offset_in_bytes(T_DOUBLE));
729 }
730
731
732 void TemplateTable::aastore() {
733 Label store_ok, is_null, done;
734 transition(vtos, vtos);
735 __ ld_ptr(Lesp, Interpreter::expr_offset_in_bytes(0), Otos_i);
736 __ ld(Lesp, Interpreter::expr_offset_in_bytes(1), O2); // get index
737 __ ld_ptr(Lesp, Interpreter::expr_offset_in_bytes(2), O3); // get array
738 // Otos_i: val
739 // O2: index
740 // O3: array
741 __ verify_oop(Otos_i);
742 __ index_check_without_pop(O3, O2, LogBytesPerWord, G3_scratch, O1);
743
744 // do array store check - check for NULL value first
745 __ br_null( Otos_i, false, Assembler::pn, is_null );
746 __ delayed()->
747 ld_ptr(O3, oopDesc::klass_offset_in_bytes(), O4); // get array klass
748
749 // do fast instanceof cache test
750 __ ld_ptr(Otos_i, oopDesc::klass_offset_in_bytes(), O5); // get value klass
751
752 __ ld_ptr(O4, sizeof(oopDesc) + objArrayKlass::element_klass_offset_in_bytes(), O4);
753
754 assert(Otos_i == O0, "just checking");
755
756 // Otos_i: value
757 // O1: addr - offset
758 // O2: index
759 // O3: array
760 // O4: array element klass
761 // O5: value klass
762
763 // Generate a fast subtype check. Branch to store_ok if no
764 // failure. Throw if failure.
765 __ gen_subtype_check( O5, O4, G3_scratch, G4_scratch, G1_scratch, store_ok );
766
767 // Not a subtype; so must throw exception
768 __ throw_if_not_x( Assembler::never, Interpreter::_throw_ArrayStoreException_entry, G3_scratch );
769
770 // Store is OK.
771 __ bind(store_ok);
772 __ st_ptr(Otos_i, O1, arrayOopDesc::base_offset_in_bytes(T_OBJECT));
773 // Quote from rememberedSet.hpp: For objArrays, the precise card
774 // corresponding to the pointer store is dirtied so we don't need to
775 // scavenge the entire array.
776 Address element(O1, 0, arrayOopDesc::base_offset_in_bytes(T_OBJECT));
777 __ add(element, O1); // address the element precisely
778 __ store_check(G3_scratch, O1);
779 __ ba(false,done);
780 __ delayed()->inc(Lesp, 3* Interpreter::stackElementSize()); // adj sp (pops array, index and value)
781
782 __ bind(is_null);
783 __ st_ptr(Otos_i, element);
784 __ profile_null_seen(G3_scratch);
785 __ inc(Lesp, 3* Interpreter::stackElementSize()); // adj sp (pops array, index and value)
786 __ bind(done);
787 }
788
789
790 void TemplateTable::bastore() {
791 transition(itos, vtos);
792 __ pop_i(O2); // index
793 // Otos_i: val
794 // O3: array
795 __ index_check(O3, O2, 0, G3_scratch, O2);
796 __ stb(Otos_i, O2, arrayOopDesc::base_offset_in_bytes(T_BYTE));
797 }
798
799
800 void TemplateTable::castore() {
801 transition(itos, vtos);
802 __ pop_i(O2); // index
803 // Otos_i: val
1819 __ delayed()->nop();
1820 }
1821
1822 __ bind(default_case); // fall through (if not profiling)
1823 __ profile_switch_default(Ri);
1824
1825 __ bind(continue_execution);
1826 __ add( Lbcp, Rj, Lbcp );
1827 __ dispatch_next( vtos );
1828 }
1829
1830
1831 void TemplateTable::_return(TosState state) {
1832 transition(state, state);
1833 assert(_desc->calls_vm(), "inconsistent calls_vm information");
1834
1835 if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
1836 assert(state == vtos, "only valid state");
1837 __ mov(G0, G3_scratch);
1838 __ access_local_ptr(G3_scratch, Otos_i);
1839 __ ld_ptr(Otos_i, oopDesc::klass_offset_in_bytes(), O2);
1840 __ set(JVM_ACC_HAS_FINALIZER, G3);
1841 __ ld(O2, Klass::access_flags_offset_in_bytes() + sizeof(oopDesc), O2);
1842 __ andcc(G3, O2, G0);
1843 Label skip_register_finalizer;
1844 __ br(Assembler::zero, false, Assembler::pn, skip_register_finalizer);
1845 __ delayed()->nop();
1846
1847 // Call out to do finalizer registration
1848 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::register_finalizer), Otos_i);
1849
1850 __ bind(skip_register_finalizer);
1851 }
1852
1853 __ remove_activation(state, /* throw_monitor_exception */ true);
1854
1855 // The caller's SP was adjusted upon method entry to accomodate
1856 // the callee's non-argument locals. Undo that adjustment.
1857 __ ret(); // return to caller
1858 __ delayed()->restore(I5_savedSP, G0, SP);
1859 }
1999 if (JvmtiExport::can_post_field_access()) {
2000 // Check to see if a field access watch has been set before we take
2001 // the time to call into the VM.
2002 Label Label1;
2003 assert_different_registers(Rcache, index, G1_scratch);
2004 Address get_field_access_count_addr(G1_scratch,
2005 (address)JvmtiExport::get_field_access_count_addr(),
2006 relocInfo::none);
2007 __ load_contents(get_field_access_count_addr, G1_scratch);
2008 __ tst(G1_scratch);
2009 __ br(Assembler::zero, false, Assembler::pt, Label1);
2010 __ delayed()->nop();
2011
2012 __ add(Rcache, in_bytes(cp_base_offset), Rcache);
2013
2014 if (is_static) {
2015 __ clr(Otos_i);
2016 } else {
2017 if (has_tos) {
2018 // save object pointer before call_VM() clobbers it
2019 __ mov(Otos_i, Lscratch);
2020 } else {
2021 // Load top of stack (do not pop the value off the stack);
2022 __ ld_ptr(Lesp, Interpreter::expr_offset_in_bytes(0), Otos_i);
2023 }
2024 __ verify_oop(Otos_i);
2025 }
2026 // Otos_i: object pointer or NULL if static
2027 // Rcache: cache entry pointer
2028 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
2029 Otos_i, Rcache);
2030 if (!is_static && has_tos) {
2031 __ mov(Lscratch, Otos_i); // restore object pointer
2032 __ verify_oop(Otos_i);
2033 }
2034 __ get_cache_and_index_at_bcp(Rcache, index, 1);
2035 __ bind(Label1);
2036 }
2037 }
2038
2039 void TemplateTable::getfield_or_static(int byte_no, bool is_static) {
2040 transition(vtos, vtos);
2041
2042 Register Rcache = G3_scratch;
2043 Register index = G4_scratch;
2044 Register Rclass = Rcache;
2045 Register Roffset= G4_scratch;
2046 Register Rflags = G1_scratch;
2047 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset();
2048
2049 resolve_cache_and_index(byte_no, Rcache, index);
2050 jvmti_post_field_access(Rcache, index, is_static, false);
2051 load_field_cp_cache_entry(Rclass, Rcache, index, Roffset, Rflags, is_static);
2064 if (__ membar_has_effect(membar_bits)) {
2065 // Get volatile flag
2066 __ set((1 << ConstantPoolCacheEntry::volatileField), Lscratch);
2067 __ and3(Rflags, Lscratch, Lscratch);
2068 }
2069
2070 Label checkVolatile;
2071
2072 // compute field type
2073 Label notByte, notInt, notShort, notChar, notLong, notFloat, notObj;
2074 __ srl(Rflags, ConstantPoolCacheEntry::tosBits, Rflags);
2075 // Make sure we don't need to mask Rflags for tosBits after the above shift
2076 ConstantPoolCacheEntry::verify_tosBits();
2077
2078 // Check atos before itos for getstatic, more likely (in Queens at least)
2079 __ cmp(Rflags, atos);
2080 __ br(Assembler::notEqual, false, Assembler::pt, notObj);
2081 __ delayed() ->cmp(Rflags, itos);
2082
2083 // atos
2084 __ ld_ptr(Rclass, Roffset, Otos_i);
2085 __ verify_oop(Otos_i);
2086 __ push(atos);
2087 if (!is_static) {
2088 patch_bytecode(Bytecodes::_fast_agetfield, G3_scratch, G4_scratch);
2089 }
2090 __ ba(false, checkVolatile);
2091 __ delayed()->tst(Lscratch);
2092
2093 __ bind(notObj);
2094
2095 // cmp(Rflags, itos);
2096 __ br(Assembler::notEqual, false, Assembler::pt, notInt);
2097 __ delayed() ->cmp(Rflags, ltos);
2098
2099 // itos
2100 __ ld(Rclass, Roffset, Otos_i);
2101 __ push(itos);
2102 if (!is_static) {
2103 patch_bytecode(Bytecodes::_fast_igetfield, G3_scratch, G4_scratch);
2104 }
2245 break;
2246 case Bytecodes::_fast_cgetfield:
2247 __ lduh(Otos_i, Roffset, Otos_i);
2248 break;
2249 case Bytecodes::_fast_sgetfield:
2250 __ ldsh(Otos_i, Roffset, Otos_i);
2251 break;
2252 case Bytecodes::_fast_igetfield:
2253 __ ld(Otos_i, Roffset, Otos_i);
2254 break;
2255 case Bytecodes::_fast_lgetfield:
2256 __ ld_long(Otos_i, Roffset, Otos_l);
2257 break;
2258 case Bytecodes::_fast_fgetfield:
2259 __ ldf(FloatRegisterImpl::S, Otos_i, Roffset, Ftos_f);
2260 break;
2261 case Bytecodes::_fast_dgetfield:
2262 __ ldf(FloatRegisterImpl::D, Otos_i, Roffset, Ftos_d);
2263 break;
2264 case Bytecodes::_fast_agetfield:
2265 __ ld_ptr(Otos_i, Roffset, Otos_i);
2266 break;
2267 default:
2268 ShouldNotReachHere();
2269 }
2270
2271 if (__ membar_has_effect(membar_bits)) {
2272 __ btst(Lscratch, Rflags);
2273 __ br(Assembler::zero, false, Assembler::pt, exit);
2274 __ delayed()->nop();
2275 volatile_barrier(membar_bits);
2276 __ bind(exit);
2277 }
2278
2279 if (state == atos) {
2280 __ verify_oop(Otos_i); // does not blow flags!
2281 }
2282 }
2283
2284 void TemplateTable::jvmti_post_fast_field_mod() {
2285 if (JvmtiExport::can_post_field_modification()) {
2434 __ bind(notVolatile);
2435 }
2436 }
2437
2438 __ srl(Rflags, ConstantPoolCacheEntry::tosBits, Rflags);
2439 // Make sure we don't need to mask Rflags for tosBits after the above shift
2440 ConstantPoolCacheEntry::verify_tosBits();
2441
2442 // compute field type
2443 Label notInt, notShort, notChar, notObj, notByte, notLong, notFloat;
2444
2445 if (is_static) {
2446 // putstatic with object type most likely, check that first
2447 __ cmp(Rflags, atos );
2448 __ br(Assembler::notEqual, false, Assembler::pt, notObj);
2449 __ delayed() ->cmp(Rflags, itos );
2450
2451 // atos
2452 __ pop_ptr();
2453 __ verify_oop(Otos_i);
2454 __ st_ptr(Otos_i, Rclass, Roffset);
2455 __ store_check(G1_scratch, Rclass, Roffset);
2456 __ ba(false, checkVolatile);
2457 __ delayed()->tst(Lscratch);
2458
2459 __ bind(notObj);
2460
2461 // cmp(Rflags, itos );
2462 __ br(Assembler::notEqual, false, Assembler::pt, notInt);
2463 __ delayed() ->cmp(Rflags, btos );
2464
2465 // itos
2466 __ pop_i();
2467 __ st(Otos_i, Rclass, Roffset);
2468 __ ba(false, checkVolatile);
2469 __ delayed()->tst(Lscratch);
2470
2471 __ bind(notInt);
2472
2473 } else {
2474 // putfield with int type most likely, check that first
2475 __ cmp(Rflags, itos );
2476 __ br(Assembler::notEqual, false, Assembler::pt, notInt);
2477 __ delayed() ->cmp(Rflags, atos );
2478
2479 // itos
2480 __ pop_i();
2481 pop_and_check_object(Rclass);
2482 __ st(Otos_i, Rclass, Roffset);
2483 patch_bytecode(Bytecodes::_fast_iputfield, G3_scratch, G4_scratch);
2484 __ ba(false, checkVolatile);
2485 __ delayed()->tst(Lscratch);
2486
2487 __ bind(notInt);
2488 // cmp(Rflags, atos );
2489 __ br(Assembler::notEqual, false, Assembler::pt, notObj);
2490 __ delayed() ->cmp(Rflags, btos );
2491
2492 // atos
2493 __ pop_ptr();
2494 pop_and_check_object(Rclass);
2495 __ verify_oop(Otos_i);
2496 __ st_ptr(Otos_i, Rclass, Roffset);
2497 __ store_check(G1_scratch, Rclass, Roffset);
2498 patch_bytecode(Bytecodes::_fast_aputfield, G3_scratch, G4_scratch);
2499 __ ba(false, checkVolatile);
2500 __ delayed()->tst(Lscratch);
2501
2502 __ bind(notObj);
2503 }
2504
2505 // cmp(Rflags, btos );
2506 __ br(Assembler::notEqual, false, Assembler::pt, notByte);
2507 __ delayed() ->cmp(Rflags, ltos );
2508
2509 // btos
2510 __ pop_i();
2511 if (!is_static) pop_and_check_object(Rclass);
2512 __ stb(Otos_i, Rclass, Roffset);
2513 if (!is_static) {
2514 patch_bytecode(Bytecodes::_fast_bputfield, G3_scratch, G4_scratch);
2515 }
2516 __ ba(false, checkVolatile);
2517 __ delayed()->tst(Lscratch);
2631 }
2632 }
2633
2634 __ ld_ptr(Address(Rcache, 0, in_bytes(cp_base_offset +
2635 ConstantPoolCacheEntry::f2_offset())), Roffset);
2636 pop_and_check_object(Rclass);
2637
2638 switch (bytecode()) {
2639 case Bytecodes::_fast_bputfield: __ stb(Otos_i, Rclass, Roffset); break;
2640 case Bytecodes::_fast_cputfield: /* fall through */
2641 case Bytecodes::_fast_sputfield: __ sth(Otos_i, Rclass, Roffset); break;
2642 case Bytecodes::_fast_iputfield: __ st(Otos_i, Rclass, Roffset); break;
2643 case Bytecodes::_fast_lputfield: __ st_long(Otos_l, Rclass, Roffset); break;
2644 case Bytecodes::_fast_fputfield:
2645 __ stf(FloatRegisterImpl::S, Ftos_f, Rclass, Roffset);
2646 break;
2647 case Bytecodes::_fast_dputfield:
2648 __ stf(FloatRegisterImpl::D, Ftos_d, Rclass, Roffset);
2649 break;
2650 case Bytecodes::_fast_aputfield:
2651 __ st_ptr(Otos_i, Rclass, Roffset);
2652 __ store_check(G1_scratch, Rclass, Roffset);
2653 break;
2654 default:
2655 ShouldNotReachHere();
2656 }
2657
2658 if (__ membar_has_effect(write_bits)) {
2659 __ tst(Lscratch);
2660 __ br(Assembler::zero, false, Assembler::pt, exit);
2661 __ delayed()->nop();
2662 volatile_barrier(Assembler::StoreLoad);
2663 __ bind(exit);
2664 }
2665 }
2666
2667
2668 void TemplateTable::putfield(int byte_no) {
2669 putfield_or_static(byte_no, false);
2670 }
2671
2672 void TemplateTable::putstatic(int byte_no) {
2674 }
2675
2676
2677 void TemplateTable::fast_xaccess(TosState state) {
2678 transition(vtos, state);
2679 Register Rcache = G3_scratch;
2680 Register Roffset = G4_scratch;
2681 Register Rflags = G4_scratch;
2682 Register Rreceiver = Lscratch;
2683
2684 __ ld_ptr(Llocals, Interpreter::value_offset_in_bytes(), Rreceiver);
2685
2686 // access constant pool cache (is resolved)
2687 __ get_cache_and_index_at_bcp(Rcache, G4_scratch, 2);
2688 __ ld_ptr(Address(Rcache, 0, in_bytes(constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f2_offset())), Roffset);
2689 __ add(Lbcp, 1, Lbcp); // needed to report exception at the correct bcp
2690
2691 __ verify_oop(Rreceiver);
2692 __ null_check(Rreceiver);
2693 if (state == atos) {
2694 __ ld_ptr(Rreceiver, Roffset, Otos_i);
2695 } else if (state == itos) {
2696 __ ld (Rreceiver, Roffset, Otos_i) ;
2697 } else if (state == ftos) {
2698 __ ldf(FloatRegisterImpl::S, Rreceiver, Roffset, Ftos_f);
2699 } else {
2700 ShouldNotReachHere();
2701 }
2702
2703 Assembler::Membar_mask_bits membar_bits =
2704 Assembler::Membar_mask_bits(Assembler::LoadLoad | Assembler::LoadStore);
2705 if (__ membar_has_effect(membar_bits)) {
2706
2707 // Get is_volatile value in Rflags and check if membar is needed
2708 __ ld_ptr(Address(Rcache, 0, in_bytes(constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::flags_offset())), Rflags);
2709
2710 // Test volatile
2711 Label notVolatile;
2712 __ set((1 << ConstantPoolCacheEntry::volatileField), Lscratch);
2713 __ btst(Rflags, Lscratch);
2714 __ br(Assembler::zero, false, Assembler::pt, notVolatile);
2776 invokevfinal_helper(Rscratch, Rret);
2777
2778 __ bind(notFinal);
2779
2780 __ mov(G5_method, Rscratch); // better scratch register
2781 __ load_receiver(G4_scratch, O0); // gets receiverOop
2782 // receiver is in O0
2783 __ verify_oop(O0);
2784
2785 // get return address
2786 Address table(Rtemp, (address)Interpreter::return_3_addrs_by_index_table());
2787 __ load_address(table);
2788 __ srl(Rret, ConstantPoolCacheEntry::tosBits, Rret); // get return type
2789 // Make sure we don't need to mask Rret for tosBits after the above shift
2790 ConstantPoolCacheEntry::verify_tosBits();
2791 __ sll(Rret, LogBytesPerWord, Rret);
2792 __ ld_ptr(Rtemp, Rret, Rret); // get return address
2793
2794 // get receiver klass
2795 __ null_check(O0, oopDesc::klass_offset_in_bytes());
2796 __ ld_ptr(Address(O0, 0, oopDesc::klass_offset_in_bytes()), Rrecv);
2797 __ verify_oop(Rrecv);
2798
2799 __ profile_virtual_call(Rrecv, O4);
2800
2801 generate_vtable_call(Rrecv, Rscratch, Rret);
2802 }
2803
2804 void TemplateTable::fast_invokevfinal(int byte_no) {
2805 transition(vtos, vtos);
2806
2807 load_invoke_cp_cache_entry(byte_no, G5_method, noreg, Lscratch, true,
2808 /*is_invokevfinal*/true);
2809 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore
2810 invokevfinal_helper(G3_scratch, Lscratch);
2811 }
2812
2813 void TemplateTable::invokevfinal_helper(Register Rscratch, Register Rret) {
2814 Register Rtemp = G4_scratch;
2815
2816 __ verify_oop(G5_method);
2944 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore
2945
2946 // get receiver
2947 __ and3(Rflags, 0xFF, Rscratch); // gets number of parameters
2948 __ load_receiver(Rscratch, O0);
2949 __ verify_oop(O0);
2950
2951 __ mov(Rflags, Rret);
2952
2953 // get return address
2954 Address table(Rscratch, (address)Interpreter::return_5_addrs_by_index_table());
2955 __ load_address(table);
2956 __ srl(Rret, ConstantPoolCacheEntry::tosBits, Rret); // get return type
2957 // Make sure we don't need to mask Rret for tosBits after the above shift
2958 ConstantPoolCacheEntry::verify_tosBits();
2959 __ sll(Rret, LogBytesPerWord, Rret);
2960 __ ld_ptr(Rscratch, Rret, Rret); // get return address
2961
2962 // get receiver klass
2963 __ null_check(O0, oopDesc::klass_offset_in_bytes());
2964 __ ld_ptr(O0, oopDesc::klass_offset_in_bytes(), RklassOop);
2965 __ verify_oop(RklassOop);
2966
2967 // Special case of invokeinterface called for virtual method of
2968 // java.lang.Object. See cpCacheOop.cpp for details.
2969 // This code isn't produced by javac, but could be produced by
2970 // another compliant java compiler.
2971 Label notMethod;
2972 __ set((1 << ConstantPoolCacheEntry::methodInterface), Rscratch);
2973 __ btst(Rflags, Rscratch);
2974 __ br(Assembler::zero, false, Assembler::pt, notMethod);
2975 __ delayed()->nop();
2976
2977 invokeinterface_object_method(RklassOop, Rinterface, Rret, Rflags);
2978
2979 __ bind(notMethod);
2980
2981 __ profile_virtual_call(RklassOop, O4);
2982
2983 //
2984 // find entry point to call
3207
3208 // slow case
3209 __ bind(slow_case);
3210 __ get_2_byte_integer_at_bcp(1, G3_scratch, O2, InterpreterMacroAssembler::Unsigned);
3211 __ get_constant_pool(O1);
3212
3213 call_VM(Otos_i, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), O1, O2);
3214
3215 __ ba(false, done);
3216 __ delayed()->nop();
3217
3218 // Initialize the header: mark, klass
3219 __ bind(initialize_header);
3220
3221 if (UseBiasedLocking) {
3222 __ ld_ptr(RinstanceKlass, Klass::prototype_header_offset_in_bytes() + sizeof(oopDesc), G4_scratch);
3223 } else {
3224 __ set((intptr_t)markOopDesc::prototype(), G4_scratch);
3225 }
3226 __ st_ptr(G4_scratch, RallocatedObject, oopDesc::mark_offset_in_bytes()); // mark
3227 __ st_ptr(RinstanceKlass, RallocatedObject, oopDesc::klass_offset_in_bytes()); // klass
3228
3229 {
3230 SkipIfEqual skip_if(
3231 _masm, G4_scratch, &DTraceAllocProbes, Assembler::zero);
3232 // Trigger dtrace event
3233 __ push(atos);
3234 __ call_VM_leaf(noreg,
3235 CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_object_alloc), O0);
3236 __ pop(atos);
3237 }
3238
3239 // continue
3240 __ bind(done);
3241 }
3242
3243
3244
3245 void TemplateTable::newarray() {
3246 transition(itos, atos);
3247 __ ldub(Lbcp, 1, O1);
3263 __ verify_oop(Otos_i);
3264 __ tst(Otos_i);
3265 __ throw_if_not_1_x( Assembler::notZero, ok );
3266 __ delayed()->ld(Otos_i, arrayOopDesc::length_offset_in_bytes(), Otos_i);
3267 __ throw_if_not_2( Interpreter::_throw_NullPointerException_entry, G3_scratch, ok);
3268 }
3269
3270
3271 void TemplateTable::checkcast() {
3272 transition(atos, atos);
3273 Label done, is_null, quicked, cast_ok, resolved;
3274 Register Roffset = G1_scratch;
3275 Register RobjKlass = O5;
3276 Register RspecifiedKlass = O4;
3277
3278 // Check for casting a NULL
3279 __ br_null(Otos_i, false, Assembler::pn, is_null);
3280 __ delayed()->nop();
3281
3282 // Get value klass in RobjKlass
3283 __ ld_ptr(Otos_i, oopDesc::klass_offset_in_bytes(), RobjKlass); // get value klass
3284
3285 // Get constant pool tag
3286 __ get_2_byte_integer_at_bcp(1, Lscratch, Roffset, InterpreterMacroAssembler::Unsigned);
3287
3288 // See if the checkcast has been quickened
3289 __ get_cpool_and_tags(Lscratch, G3_scratch);
3290 __ add(G3_scratch, typeArrayOopDesc::header_size(T_BYTE) * wordSize, G3_scratch);
3291 __ ldub(G3_scratch, Roffset, G3_scratch);
3292 __ cmp(G3_scratch, JVM_CONSTANT_Class);
3293 __ br(Assembler::equal, true, Assembler::pt, quicked);
3294 __ delayed()->sll(Roffset, LogBytesPerWord, Roffset);
3295
3296 __ push_ptr(); // save receiver for result, and for GC
3297 call_VM(RspecifiedKlass, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc) );
3298 __ pop_ptr(Otos_i, G3_scratch); // restore receiver
3299
3300 __ br(Assembler::always, false, Assembler::pt, resolved);
3301 __ delayed()->ld_ptr(Otos_i, oopDesc::klass_offset_in_bytes(), RobjKlass); // get value klass
3302
3303 // Extract target class from constant pool
3304 __ bind(quicked);
3305 __ add(Roffset, sizeof(constantPoolOopDesc), Roffset);
3306 __ ld_ptr(Lscratch, Roffset, RspecifiedKlass);
3307 __ bind(resolved);
3308
3309 // Generate a fast subtype check. Branch to cast_ok if no
3310 // failure. Throw exception if failure.
3311 __ gen_subtype_check( RobjKlass, RspecifiedKlass, G3_scratch, G4_scratch, G1_scratch, cast_ok );
3312
3313 // Not a subtype; so must throw exception
3314 __ throw_if_not_x( Assembler::never, Interpreter::_throw_ClassCastException_entry, G3_scratch );
3315
3316 __ bind(cast_ok);
3317
3318 if (ProfileInterpreter) {
3319 __ ba(false, done);
3320 __ delayed()->nop();
3321 }
3322 __ bind(is_null);
3323 __ profile_null_seen(G3_scratch);
3324 __ bind(done);
3325 }
3326
3327
3328 void TemplateTable::instanceof() {
3329 Label done, is_null, quicked, resolved;
3330 transition(atos, itos);
3331 Register Roffset = G1_scratch;
3332 Register RobjKlass = O5;
3333 Register RspecifiedKlass = O4;
3334
3335 // Check for casting a NULL
3336 __ br_null(Otos_i, false, Assembler::pt, is_null);
3337 __ delayed()->nop();
3338
3339 // Get value klass in RobjKlass
3340 __ ld_ptr(Otos_i, oopDesc::klass_offset_in_bytes(), RobjKlass); // get value klass
3341
3342 // Get constant pool tag
3343 __ get_2_byte_integer_at_bcp(1, Lscratch, Roffset, InterpreterMacroAssembler::Unsigned);
3344
3345 // See if the checkcast has been quickened
3346 __ get_cpool_and_tags(Lscratch, G3_scratch);
3347 __ add(G3_scratch, typeArrayOopDesc::header_size(T_BYTE) * wordSize, G3_scratch);
3348 __ ldub(G3_scratch, Roffset, G3_scratch);
3349 __ cmp(G3_scratch, JVM_CONSTANT_Class);
3350 __ br(Assembler::equal, true, Assembler::pt, quicked);
3351 __ delayed()->sll(Roffset, LogBytesPerWord, Roffset);
3352
3353 __ push_ptr(); // save receiver for result, and for GC
3354 call_VM(RspecifiedKlass, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc) );
3355 __ pop_ptr(Otos_i, G3_scratch); // restore receiver
3356
3357 __ br(Assembler::always, false, Assembler::pt, resolved);
3358 __ delayed()->ld_ptr(Otos_i, oopDesc::klass_offset_in_bytes(), RobjKlass); // get value klass
3359
3360
3361 // Extract target class from constant pool
3362 __ bind(quicked);
3363 __ add(Roffset, sizeof(constantPoolOopDesc), Roffset);
3364 __ get_constant_pool(Lscratch);
3365 __ ld_ptr(Lscratch, Roffset, RspecifiedKlass);
3366 __ bind(resolved);
3367
3368 // Generate a fast subtype check. Branch to cast_ok if no
3369 // failure. Return 0 if failure.
3370 __ or3(G0, 1, Otos_i); // set result assuming quick tests succeed
3371 __ gen_subtype_check( RobjKlass, RspecifiedKlass, G3_scratch, G4_scratch, G1_scratch, done );
3372 // Not a subtype; return 0;
3373 __ clr( Otos_i );
3374
3375 if (ProfileInterpreter) {
3376 __ ba(false, done);
3377 __ delayed()->nop();
3378 }
3379 __ bind(is_null);
3380 __ profile_null_seen(G3_scratch);
3381 __ bind(done);
3382 }
3383
3384 void TemplateTable::_breakpoint() {
3385
3386 // Note: We get here even if we are single stepping..
|
1 /*
2 * Copyright 1997-2008 Sun Microsystems, Inc. All Rights Reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
20 * CA 95054 USA or visit www.sun.com if you need additional information or
21 * have any questions.
22 *
23 */
24
25 #include "incls/_precompiled.incl"
26 #include "incls/_templateTable_sparc.cpp.incl"
27
28 #ifndef CC_INTERP
29 #define __ _masm->
30
31 // Misc helpers
32
33 // Do an oop store like *(base + index + offset) = val
34 // index can be noreg,
35 static void do_oop_store(InterpreterMacroAssembler* _masm,
36 Register base,
37 Register index,
38 int offset,
39 Register val,
40 Register tmp,
41 BarrierSet::Name barrier,
42 bool precise) {
43 assert(tmp != val && tmp != base && tmp != index, "register collision");
44 assert(index == noreg || offset == 0, "only one offset");
45 switch (barrier) {
46 #ifndef SERIALGC
47 case BarrierSet::G1SATBCT:
48 case BarrierSet::G1SATBCTLogging:
49 {
50 __ g1_write_barrier_pre( base, index, offset, tmp, /*preserve_o_regs*/true);
51 if (index == noreg ) {
52 assert(Assembler::is_simm13(offset), "fix this code");
53 __ store_heap_oop(val, base, offset);
54 } else {
55 __ store_heap_oop(val, base, index);
56 }
57
58 // No need for post barrier if storing NULL
59 if (val != G0) {
60 if (precise) {
61 if (index == noreg) {
62 __ add(base, offset, base);
63 } else {
64 __ add(base, index, base);
65 }
66 }
67 __ g1_write_barrier_post(base, val, tmp);
68 }
69 }
70 break;
71 #endif // SERIALGC
72 case BarrierSet::CardTableModRef:
73 case BarrierSet::CardTableExtension:
74 {
75 if (index == noreg ) {
76 assert(Assembler::is_simm13(offset), "fix this code");
77 __ store_heap_oop(val, base, offset);
78 } else {
79 __ store_heap_oop(val, base, index);
80 }
81 // No need for post barrier if storing NULL
82 if (val != G0) {
83 if (precise) {
84 if (index == noreg) {
85 __ add(base, offset, base);
86 } else {
87 __ add(base, index, base);
88 }
89 }
90 __ card_write_barrier_post(base, val, tmp);
91 }
92 }
93 break;
94 case BarrierSet::ModRef:
95 case BarrierSet::Other:
96 ShouldNotReachHere();
97 break;
98 default :
99 ShouldNotReachHere();
100
101 }
102 }
103
104
105 //----------------------------------------------------------------------------------------------------
106 // Platform-dependent initialization
107
108 void TemplateTable::pd_initialize() {
109 // (none)
110 }
111
112
113 //----------------------------------------------------------------------------------------------------
114 // Condition conversion
115 Assembler::Condition ccNot(TemplateTable::Condition cc) {
116 switch (cc) {
117 case TemplateTable::equal : return Assembler::notEqual;
118 case TemplateTable::not_equal : return Assembler::equal;
119 case TemplateTable::less : return Assembler::greaterEqual;
120 case TemplateTable::less_equal : return Assembler::greater;
121 case TemplateTable::greater : return Assembler::lessEqual;
122 case TemplateTable::greater_equal: return Assembler::less;
123 }
518 // Otos_i: index
519 // O2: array
520 __ index_check(O2, Otos_i, LogBytesPerInt, G3_scratch, O3);
521 __ ldf(FloatRegisterImpl::S, O3, arrayOopDesc::base_offset_in_bytes(T_FLOAT), Ftos_f);
522 }
523
524
525 void TemplateTable::daload() {
526 transition(itos, dtos);
527 // Otos_i: index
528 // O2: array
529 __ index_check(O2, Otos_i, LogBytesPerLong, G3_scratch, O3);
530 __ ldf(FloatRegisterImpl::D, O3, arrayOopDesc::base_offset_in_bytes(T_DOUBLE), Ftos_d);
531 }
532
533
534 void TemplateTable::aaload() {
535 transition(itos, atos);
536 // Otos_i: index
537 // tos: array
538 __ index_check(O2, Otos_i, UseCompressedOops ? 2 : LogBytesPerWord, G3_scratch, O3);
539 __ load_heap_oop(O3, arrayOopDesc::base_offset_in_bytes(T_OBJECT), Otos_i);
540 __ verify_oop(Otos_i);
541 }
542
543
544 void TemplateTable::baload() {
545 transition(itos, itos);
546 // Otos_i: index
547 // tos: array
548 __ index_check(O2, Otos_i, 0, G3_scratch, O3);
549 __ ldsb(O3, arrayOopDesc::base_offset_in_bytes(T_BYTE), Otos_i);
550 }
551
552
553 void TemplateTable::caload() {
554 transition(itos, itos);
555 // Otos_i: index
556 // tos: array
557 __ index_check(O2, Otos_i, LogBytesPerShort, G3_scratch, O3);
558 __ lduh(O3, arrayOopDesc::base_offset_in_bytes(T_CHAR), Otos_i);
559 }
792 void TemplateTable::dastore() {
793 transition(dtos, vtos);
794 __ pop_i(O2); // index
795 // Fos_d: val
796 // O3: array
797 __ index_check(O3, O2, LogBytesPerLong, G3_scratch, O2);
798 __ stf(FloatRegisterImpl::D, Ftos_d, O2, arrayOopDesc::base_offset_in_bytes(T_DOUBLE));
799 }
800
801
802 void TemplateTable::aastore() {
803 Label store_ok, is_null, done;
804 transition(vtos, vtos);
805 __ ld_ptr(Lesp, Interpreter::expr_offset_in_bytes(0), Otos_i);
806 __ ld(Lesp, Interpreter::expr_offset_in_bytes(1), O2); // get index
807 __ ld_ptr(Lesp, Interpreter::expr_offset_in_bytes(2), O3); // get array
808 // Otos_i: val
809 // O2: index
810 // O3: array
811 __ verify_oop(Otos_i);
812 __ index_check_without_pop(O3, O2, UseCompressedOops ? 2 : LogBytesPerWord, G3_scratch, O1);
813
814 // do array store check - check for NULL value first
815 __ br_null( Otos_i, false, Assembler::pn, is_null );
816 __ delayed()->nop();
817
818 __ load_klass(O3, O4); // get array klass
819 __ load_klass(Otos_i, O5); // get value klass
820
821 // do fast instanceof cache test
822
823 __ ld_ptr(O4, sizeof(oopDesc) + objArrayKlass::element_klass_offset_in_bytes(), O4);
824
825 assert(Otos_i == O0, "just checking");
826
827 // Otos_i: value
828 // O1: addr - offset
829 // O2: index
830 // O3: array
831 // O4: array element klass
832 // O5: value klass
833
834 // Address element(O1, 0, arrayOopDesc::base_offset_in_bytes(T_OBJECT));
835
836 // Generate a fast subtype check. Branch to store_ok if no
837 // failure. Throw if failure.
838 __ gen_subtype_check( O5, O4, G3_scratch, G4_scratch, G1_scratch, store_ok );
839
840 // Not a subtype; so must throw exception
841 __ throw_if_not_x( Assembler::never, Interpreter::_throw_ArrayStoreException_entry, G3_scratch );
842
843 // Store is OK.
844 __ bind(store_ok);
845 do_oop_store(_masm, O1, noreg, arrayOopDesc::base_offset_in_bytes(T_OBJECT), Otos_i, G3_scratch, _bs->kind(), true);
846
847 __ ba(false,done);
848 __ delayed()->inc(Lesp, 3* Interpreter::stackElementSize()); // adj sp (pops array, index and value)
849
850 __ bind(is_null);
851 do_oop_store(_masm, O1, noreg, arrayOopDesc::base_offset_in_bytes(T_OBJECT), G0, G4_scratch, _bs->kind(), true);
852
853 __ profile_null_seen(G3_scratch);
854 __ inc(Lesp, 3* Interpreter::stackElementSize()); // adj sp (pops array, index and value)
855 __ bind(done);
856 }
857
858
859 void TemplateTable::bastore() {
860 transition(itos, vtos);
861 __ pop_i(O2); // index
862 // Otos_i: val
863 // O3: array
864 __ index_check(O3, O2, 0, G3_scratch, O2);
865 __ stb(Otos_i, O2, arrayOopDesc::base_offset_in_bytes(T_BYTE));
866 }
867
868
869 void TemplateTable::castore() {
870 transition(itos, vtos);
871 __ pop_i(O2); // index
872 // Otos_i: val
1888 __ delayed()->nop();
1889 }
1890
1891 __ bind(default_case); // fall through (if not profiling)
1892 __ profile_switch_default(Ri);
1893
1894 __ bind(continue_execution);
1895 __ add( Lbcp, Rj, Lbcp );
1896 __ dispatch_next( vtos );
1897 }
1898
1899
1900 void TemplateTable::_return(TosState state) {
1901 transition(state, state);
1902 assert(_desc->calls_vm(), "inconsistent calls_vm information");
1903
1904 if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
1905 assert(state == vtos, "only valid state");
1906 __ mov(G0, G3_scratch);
1907 __ access_local_ptr(G3_scratch, Otos_i);
1908 __ load_klass(Otos_i, O2);
1909 __ set(JVM_ACC_HAS_FINALIZER, G3);
1910 __ ld(O2, Klass::access_flags_offset_in_bytes() + sizeof(oopDesc), O2);
1911 __ andcc(G3, O2, G0);
1912 Label skip_register_finalizer;
1913 __ br(Assembler::zero, false, Assembler::pn, skip_register_finalizer);
1914 __ delayed()->nop();
1915
1916 // Call out to do finalizer registration
1917 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::register_finalizer), Otos_i);
1918
1919 __ bind(skip_register_finalizer);
1920 }
1921
1922 __ remove_activation(state, /* throw_monitor_exception */ true);
1923
1924 // The caller's SP was adjusted upon method entry to accomodate
1925 // the callee's non-argument locals. Undo that adjustment.
1926 __ ret(); // return to caller
1927 __ delayed()->restore(I5_savedSP, G0, SP);
1928 }
2068 if (JvmtiExport::can_post_field_access()) {
2069 // Check to see if a field access watch has been set before we take
2070 // the time to call into the VM.
2071 Label Label1;
2072 assert_different_registers(Rcache, index, G1_scratch);
2073 Address get_field_access_count_addr(G1_scratch,
2074 (address)JvmtiExport::get_field_access_count_addr(),
2075 relocInfo::none);
2076 __ load_contents(get_field_access_count_addr, G1_scratch);
2077 __ tst(G1_scratch);
2078 __ br(Assembler::zero, false, Assembler::pt, Label1);
2079 __ delayed()->nop();
2080
2081 __ add(Rcache, in_bytes(cp_base_offset), Rcache);
2082
2083 if (is_static) {
2084 __ clr(Otos_i);
2085 } else {
2086 if (has_tos) {
2087 // save object pointer before call_VM() clobbers it
2088 __ push_ptr(Otos_i); // put object on tos where GC wants it.
2089 } else {
2090 // Load top of stack (do not pop the value off the stack);
2091 __ ld_ptr(Lesp, Interpreter::expr_offset_in_bytes(0), Otos_i);
2092 }
2093 __ verify_oop(Otos_i);
2094 }
2095 // Otos_i: object pointer or NULL if static
2096 // Rcache: cache entry pointer
2097 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
2098 Otos_i, Rcache);
2099 if (!is_static && has_tos) {
2100 __ pop_ptr(Otos_i); // restore object pointer
2101 __ verify_oop(Otos_i);
2102 }
2103 __ get_cache_and_index_at_bcp(Rcache, index, 1);
2104 __ bind(Label1);
2105 }
2106 }
2107
2108 void TemplateTable::getfield_or_static(int byte_no, bool is_static) {
2109 transition(vtos, vtos);
2110
2111 Register Rcache = G3_scratch;
2112 Register index = G4_scratch;
2113 Register Rclass = Rcache;
2114 Register Roffset= G4_scratch;
2115 Register Rflags = G1_scratch;
2116 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset();
2117
2118 resolve_cache_and_index(byte_no, Rcache, index);
2119 jvmti_post_field_access(Rcache, index, is_static, false);
2120 load_field_cp_cache_entry(Rclass, Rcache, index, Roffset, Rflags, is_static);
2133 if (__ membar_has_effect(membar_bits)) {
2134 // Get volatile flag
2135 __ set((1 << ConstantPoolCacheEntry::volatileField), Lscratch);
2136 __ and3(Rflags, Lscratch, Lscratch);
2137 }
2138
2139 Label checkVolatile;
2140
2141 // compute field type
2142 Label notByte, notInt, notShort, notChar, notLong, notFloat, notObj;
2143 __ srl(Rflags, ConstantPoolCacheEntry::tosBits, Rflags);
2144 // Make sure we don't need to mask Rflags for tosBits after the above shift
2145 ConstantPoolCacheEntry::verify_tosBits();
2146
2147 // Check atos before itos for getstatic, more likely (in Queens at least)
2148 __ cmp(Rflags, atos);
2149 __ br(Assembler::notEqual, false, Assembler::pt, notObj);
2150 __ delayed() ->cmp(Rflags, itos);
2151
2152 // atos
2153 __ load_heap_oop(Rclass, Roffset, Otos_i);
2154 __ verify_oop(Otos_i);
2155 __ push(atos);
2156 if (!is_static) {
2157 patch_bytecode(Bytecodes::_fast_agetfield, G3_scratch, G4_scratch);
2158 }
2159 __ ba(false, checkVolatile);
2160 __ delayed()->tst(Lscratch);
2161
2162 __ bind(notObj);
2163
2164 // cmp(Rflags, itos);
2165 __ br(Assembler::notEqual, false, Assembler::pt, notInt);
2166 __ delayed() ->cmp(Rflags, ltos);
2167
2168 // itos
2169 __ ld(Rclass, Roffset, Otos_i);
2170 __ push(itos);
2171 if (!is_static) {
2172 patch_bytecode(Bytecodes::_fast_igetfield, G3_scratch, G4_scratch);
2173 }
2314 break;
2315 case Bytecodes::_fast_cgetfield:
2316 __ lduh(Otos_i, Roffset, Otos_i);
2317 break;
2318 case Bytecodes::_fast_sgetfield:
2319 __ ldsh(Otos_i, Roffset, Otos_i);
2320 break;
2321 case Bytecodes::_fast_igetfield:
2322 __ ld(Otos_i, Roffset, Otos_i);
2323 break;
2324 case Bytecodes::_fast_lgetfield:
2325 __ ld_long(Otos_i, Roffset, Otos_l);
2326 break;
2327 case Bytecodes::_fast_fgetfield:
2328 __ ldf(FloatRegisterImpl::S, Otos_i, Roffset, Ftos_f);
2329 break;
2330 case Bytecodes::_fast_dgetfield:
2331 __ ldf(FloatRegisterImpl::D, Otos_i, Roffset, Ftos_d);
2332 break;
2333 case Bytecodes::_fast_agetfield:
2334 __ load_heap_oop(Otos_i, Roffset, Otos_i);
2335 break;
2336 default:
2337 ShouldNotReachHere();
2338 }
2339
2340 if (__ membar_has_effect(membar_bits)) {
2341 __ btst(Lscratch, Rflags);
2342 __ br(Assembler::zero, false, Assembler::pt, exit);
2343 __ delayed()->nop();
2344 volatile_barrier(membar_bits);
2345 __ bind(exit);
2346 }
2347
2348 if (state == atos) {
2349 __ verify_oop(Otos_i); // does not blow flags!
2350 }
2351 }
2352
2353 void TemplateTable::jvmti_post_fast_field_mod() {
2354 if (JvmtiExport::can_post_field_modification()) {
2503 __ bind(notVolatile);
2504 }
2505 }
2506
2507 __ srl(Rflags, ConstantPoolCacheEntry::tosBits, Rflags);
2508 // Make sure we don't need to mask Rflags for tosBits after the above shift
2509 ConstantPoolCacheEntry::verify_tosBits();
2510
2511 // compute field type
2512 Label notInt, notShort, notChar, notObj, notByte, notLong, notFloat;
2513
2514 if (is_static) {
2515 // putstatic with object type most likely, check that first
2516 __ cmp(Rflags, atos );
2517 __ br(Assembler::notEqual, false, Assembler::pt, notObj);
2518 __ delayed() ->cmp(Rflags, itos );
2519
2520 // atos
2521 __ pop_ptr();
2522 __ verify_oop(Otos_i);
2523
2524 do_oop_store(_masm, Rclass, Roffset, 0, Otos_i, G1_scratch, _bs->kind(), false);
2525
2526 __ ba(false, checkVolatile);
2527 __ delayed()->tst(Lscratch);
2528
2529 __ bind(notObj);
2530
2531 // cmp(Rflags, itos );
2532 __ br(Assembler::notEqual, false, Assembler::pt, notInt);
2533 __ delayed() ->cmp(Rflags, btos );
2534
2535 // itos
2536 __ pop_i();
2537 __ st(Otos_i, Rclass, Roffset);
2538 __ ba(false, checkVolatile);
2539 __ delayed()->tst(Lscratch);
2540
2541 __ bind(notInt);
2542
2543 } else {
2544 // putfield with int type most likely, check that first
2545 __ cmp(Rflags, itos );
2546 __ br(Assembler::notEqual, false, Assembler::pt, notInt);
2547 __ delayed() ->cmp(Rflags, atos );
2548
2549 // itos
2550 __ pop_i();
2551 pop_and_check_object(Rclass);
2552 __ st(Otos_i, Rclass, Roffset);
2553 patch_bytecode(Bytecodes::_fast_iputfield, G3_scratch, G4_scratch);
2554 __ ba(false, checkVolatile);
2555 __ delayed()->tst(Lscratch);
2556
2557 __ bind(notInt);
2558 // cmp(Rflags, atos );
2559 __ br(Assembler::notEqual, false, Assembler::pt, notObj);
2560 __ delayed() ->cmp(Rflags, btos );
2561
2562 // atos
2563 __ pop_ptr();
2564 pop_and_check_object(Rclass);
2565 __ verify_oop(Otos_i);
2566
2567 do_oop_store(_masm, Rclass, Roffset, 0, Otos_i, G1_scratch, _bs->kind(), false);
2568
2569 patch_bytecode(Bytecodes::_fast_aputfield, G3_scratch, G4_scratch);
2570 __ ba(false, checkVolatile);
2571 __ delayed()->tst(Lscratch);
2572
2573 __ bind(notObj);
2574 }
2575
2576 // cmp(Rflags, btos );
2577 __ br(Assembler::notEqual, false, Assembler::pt, notByte);
2578 __ delayed() ->cmp(Rflags, ltos );
2579
2580 // btos
2581 __ pop_i();
2582 if (!is_static) pop_and_check_object(Rclass);
2583 __ stb(Otos_i, Rclass, Roffset);
2584 if (!is_static) {
2585 patch_bytecode(Bytecodes::_fast_bputfield, G3_scratch, G4_scratch);
2586 }
2587 __ ba(false, checkVolatile);
2588 __ delayed()->tst(Lscratch);
2702 }
2703 }
2704
2705 __ ld_ptr(Address(Rcache, 0, in_bytes(cp_base_offset +
2706 ConstantPoolCacheEntry::f2_offset())), Roffset);
2707 pop_and_check_object(Rclass);
2708
2709 switch (bytecode()) {
2710 case Bytecodes::_fast_bputfield: __ stb(Otos_i, Rclass, Roffset); break;
2711 case Bytecodes::_fast_cputfield: /* fall through */
2712 case Bytecodes::_fast_sputfield: __ sth(Otos_i, Rclass, Roffset); break;
2713 case Bytecodes::_fast_iputfield: __ st(Otos_i, Rclass, Roffset); break;
2714 case Bytecodes::_fast_lputfield: __ st_long(Otos_l, Rclass, Roffset); break;
2715 case Bytecodes::_fast_fputfield:
2716 __ stf(FloatRegisterImpl::S, Ftos_f, Rclass, Roffset);
2717 break;
2718 case Bytecodes::_fast_dputfield:
2719 __ stf(FloatRegisterImpl::D, Ftos_d, Rclass, Roffset);
2720 break;
2721 case Bytecodes::_fast_aputfield:
2722 do_oop_store(_masm, Rclass, Roffset, 0, Otos_i, G1_scratch, _bs->kind(), false);
2723 break;
2724 default:
2725 ShouldNotReachHere();
2726 }
2727
2728 if (__ membar_has_effect(write_bits)) {
2729 __ tst(Lscratch);
2730 __ br(Assembler::zero, false, Assembler::pt, exit);
2731 __ delayed()->nop();
2732 volatile_barrier(Assembler::StoreLoad);
2733 __ bind(exit);
2734 }
2735 }
2736
2737
2738 void TemplateTable::putfield(int byte_no) {
2739 putfield_or_static(byte_no, false);
2740 }
2741
2742 void TemplateTable::putstatic(int byte_no) {
2744 }
2745
2746
2747 void TemplateTable::fast_xaccess(TosState state) {
2748 transition(vtos, state);
2749 Register Rcache = G3_scratch;
2750 Register Roffset = G4_scratch;
2751 Register Rflags = G4_scratch;
2752 Register Rreceiver = Lscratch;
2753
2754 __ ld_ptr(Llocals, Interpreter::value_offset_in_bytes(), Rreceiver);
2755
2756 // access constant pool cache (is resolved)
2757 __ get_cache_and_index_at_bcp(Rcache, G4_scratch, 2);
2758 __ ld_ptr(Address(Rcache, 0, in_bytes(constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f2_offset())), Roffset);
2759 __ add(Lbcp, 1, Lbcp); // needed to report exception at the correct bcp
2760
2761 __ verify_oop(Rreceiver);
2762 __ null_check(Rreceiver);
2763 if (state == atos) {
2764 __ load_heap_oop(Rreceiver, Roffset, Otos_i);
2765 } else if (state == itos) {
2766 __ ld (Rreceiver, Roffset, Otos_i) ;
2767 } else if (state == ftos) {
2768 __ ldf(FloatRegisterImpl::S, Rreceiver, Roffset, Ftos_f);
2769 } else {
2770 ShouldNotReachHere();
2771 }
2772
2773 Assembler::Membar_mask_bits membar_bits =
2774 Assembler::Membar_mask_bits(Assembler::LoadLoad | Assembler::LoadStore);
2775 if (__ membar_has_effect(membar_bits)) {
2776
2777 // Get is_volatile value in Rflags and check if membar is needed
2778 __ ld_ptr(Address(Rcache, 0, in_bytes(constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::flags_offset())), Rflags);
2779
2780 // Test volatile
2781 Label notVolatile;
2782 __ set((1 << ConstantPoolCacheEntry::volatileField), Lscratch);
2783 __ btst(Rflags, Lscratch);
2784 __ br(Assembler::zero, false, Assembler::pt, notVolatile);
2846 invokevfinal_helper(Rscratch, Rret);
2847
2848 __ bind(notFinal);
2849
2850 __ mov(G5_method, Rscratch); // better scratch register
2851 __ load_receiver(G4_scratch, O0); // gets receiverOop
2852 // receiver is in O0
2853 __ verify_oop(O0);
2854
2855 // get return address
2856 Address table(Rtemp, (address)Interpreter::return_3_addrs_by_index_table());
2857 __ load_address(table);
2858 __ srl(Rret, ConstantPoolCacheEntry::tosBits, Rret); // get return type
2859 // Make sure we don't need to mask Rret for tosBits after the above shift
2860 ConstantPoolCacheEntry::verify_tosBits();
2861 __ sll(Rret, LogBytesPerWord, Rret);
2862 __ ld_ptr(Rtemp, Rret, Rret); // get return address
2863
2864 // get receiver klass
2865 __ null_check(O0, oopDesc::klass_offset_in_bytes());
2866 __ load_klass(O0, Rrecv);
2867 __ verify_oop(Rrecv);
2868
2869 __ profile_virtual_call(Rrecv, O4);
2870
2871 generate_vtable_call(Rrecv, Rscratch, Rret);
2872 }
2873
2874 void TemplateTable::fast_invokevfinal(int byte_no) {
2875 transition(vtos, vtos);
2876
2877 load_invoke_cp_cache_entry(byte_no, G5_method, noreg, Lscratch, true,
2878 /*is_invokevfinal*/true);
2879 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore
2880 invokevfinal_helper(G3_scratch, Lscratch);
2881 }
2882
2883 void TemplateTable::invokevfinal_helper(Register Rscratch, Register Rret) {
2884 Register Rtemp = G4_scratch;
2885
2886 __ verify_oop(G5_method);
3014 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore
3015
3016 // get receiver
3017 __ and3(Rflags, 0xFF, Rscratch); // gets number of parameters
3018 __ load_receiver(Rscratch, O0);
3019 __ verify_oop(O0);
3020
3021 __ mov(Rflags, Rret);
3022
3023 // get return address
3024 Address table(Rscratch, (address)Interpreter::return_5_addrs_by_index_table());
3025 __ load_address(table);
3026 __ srl(Rret, ConstantPoolCacheEntry::tosBits, Rret); // get return type
3027 // Make sure we don't need to mask Rret for tosBits after the above shift
3028 ConstantPoolCacheEntry::verify_tosBits();
3029 __ sll(Rret, LogBytesPerWord, Rret);
3030 __ ld_ptr(Rscratch, Rret, Rret); // get return address
3031
3032 // get receiver klass
3033 __ null_check(O0, oopDesc::klass_offset_in_bytes());
3034 __ load_klass(O0, RklassOop);
3035 __ verify_oop(RklassOop);
3036
3037 // Special case of invokeinterface called for virtual method of
3038 // java.lang.Object. See cpCacheOop.cpp for details.
3039 // This code isn't produced by javac, but could be produced by
3040 // another compliant java compiler.
3041 Label notMethod;
3042 __ set((1 << ConstantPoolCacheEntry::methodInterface), Rscratch);
3043 __ btst(Rflags, Rscratch);
3044 __ br(Assembler::zero, false, Assembler::pt, notMethod);
3045 __ delayed()->nop();
3046
3047 invokeinterface_object_method(RklassOop, Rinterface, Rret, Rflags);
3048
3049 __ bind(notMethod);
3050
3051 __ profile_virtual_call(RklassOop, O4);
3052
3053 //
3054 // find entry point to call
3277
3278 // slow case
3279 __ bind(slow_case);
3280 __ get_2_byte_integer_at_bcp(1, G3_scratch, O2, InterpreterMacroAssembler::Unsigned);
3281 __ get_constant_pool(O1);
3282
3283 call_VM(Otos_i, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), O1, O2);
3284
3285 __ ba(false, done);
3286 __ delayed()->nop();
3287
3288 // Initialize the header: mark, klass
3289 __ bind(initialize_header);
3290
3291 if (UseBiasedLocking) {
3292 __ ld_ptr(RinstanceKlass, Klass::prototype_header_offset_in_bytes() + sizeof(oopDesc), G4_scratch);
3293 } else {
3294 __ set((intptr_t)markOopDesc::prototype(), G4_scratch);
3295 }
3296 __ st_ptr(G4_scratch, RallocatedObject, oopDesc::mark_offset_in_bytes()); // mark
3297 __ store_klass_gap(G0, RallocatedObject); // klass gap if compressed
3298 __ store_klass(RinstanceKlass, RallocatedObject); // klass (last for cms)
3299
3300 {
3301 SkipIfEqual skip_if(
3302 _masm, G4_scratch, &DTraceAllocProbes, Assembler::zero);
3303 // Trigger dtrace event
3304 __ push(atos);
3305 __ call_VM_leaf(noreg,
3306 CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_object_alloc), O0);
3307 __ pop(atos);
3308 }
3309
3310 // continue
3311 __ bind(done);
3312 }
3313
3314
3315
3316 void TemplateTable::newarray() {
3317 transition(itos, atos);
3318 __ ldub(Lbcp, 1, O1);
3334 __ verify_oop(Otos_i);
3335 __ tst(Otos_i);
3336 __ throw_if_not_1_x( Assembler::notZero, ok );
3337 __ delayed()->ld(Otos_i, arrayOopDesc::length_offset_in_bytes(), Otos_i);
3338 __ throw_if_not_2( Interpreter::_throw_NullPointerException_entry, G3_scratch, ok);
3339 }
3340
3341
3342 void TemplateTable::checkcast() {
3343 transition(atos, atos);
3344 Label done, is_null, quicked, cast_ok, resolved;
3345 Register Roffset = G1_scratch;
3346 Register RobjKlass = O5;
3347 Register RspecifiedKlass = O4;
3348
3349 // Check for casting a NULL
3350 __ br_null(Otos_i, false, Assembler::pn, is_null);
3351 __ delayed()->nop();
3352
3353 // Get value klass in RobjKlass
3354 __ load_klass(Otos_i, RobjKlass); // get value klass
3355
3356 // Get constant pool tag
3357 __ get_2_byte_integer_at_bcp(1, Lscratch, Roffset, InterpreterMacroAssembler::Unsigned);
3358
3359 // See if the checkcast has been quickened
3360 __ get_cpool_and_tags(Lscratch, G3_scratch);
3361 __ add(G3_scratch, typeArrayOopDesc::header_size(T_BYTE) * wordSize, G3_scratch);
3362 __ ldub(G3_scratch, Roffset, G3_scratch);
3363 __ cmp(G3_scratch, JVM_CONSTANT_Class);
3364 __ br(Assembler::equal, true, Assembler::pt, quicked);
3365 __ delayed()->sll(Roffset, LogBytesPerWord, Roffset);
3366
3367 __ push_ptr(); // save receiver for result, and for GC
3368 call_VM(RspecifiedKlass, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc) );
3369 __ pop_ptr(Otos_i, G3_scratch); // restore receiver
3370
3371 __ br(Assembler::always, false, Assembler::pt, resolved);
3372 __ delayed()->nop();
3373
3374 // Extract target class from constant pool
3375 __ bind(quicked);
3376 __ add(Roffset, sizeof(constantPoolOopDesc), Roffset);
3377 __ ld_ptr(Lscratch, Roffset, RspecifiedKlass);
3378 __ bind(resolved);
3379 __ load_klass(Otos_i, RobjKlass); // get value klass
3380
3381 // Generate a fast subtype check. Branch to cast_ok if no
3382 // failure. Throw exception if failure.
3383 __ gen_subtype_check( RobjKlass, RspecifiedKlass, G3_scratch, G4_scratch, G1_scratch, cast_ok );
3384
3385 // Not a subtype; so must throw exception
3386 __ throw_if_not_x( Assembler::never, Interpreter::_throw_ClassCastException_entry, G3_scratch );
3387
3388 __ bind(cast_ok);
3389
3390 if (ProfileInterpreter) {
3391 __ ba(false, done);
3392 __ delayed()->nop();
3393 }
3394 __ bind(is_null);
3395 __ profile_null_seen(G3_scratch);
3396 __ bind(done);
3397 }
3398
3399
3400 void TemplateTable::instanceof() {
3401 Label done, is_null, quicked, resolved;
3402 transition(atos, itos);
3403 Register Roffset = G1_scratch;
3404 Register RobjKlass = O5;
3405 Register RspecifiedKlass = O4;
3406
3407 // Check for casting a NULL
3408 __ br_null(Otos_i, false, Assembler::pt, is_null);
3409 __ delayed()->nop();
3410
3411 // Get value klass in RobjKlass
3412 __ load_klass(Otos_i, RobjKlass); // get value klass
3413
3414 // Get constant pool tag
3415 __ get_2_byte_integer_at_bcp(1, Lscratch, Roffset, InterpreterMacroAssembler::Unsigned);
3416
3417 // See if the checkcast has been quickened
3418 __ get_cpool_and_tags(Lscratch, G3_scratch);
3419 __ add(G3_scratch, typeArrayOopDesc::header_size(T_BYTE) * wordSize, G3_scratch);
3420 __ ldub(G3_scratch, Roffset, G3_scratch);
3421 __ cmp(G3_scratch, JVM_CONSTANT_Class);
3422 __ br(Assembler::equal, true, Assembler::pt, quicked);
3423 __ delayed()->sll(Roffset, LogBytesPerWord, Roffset);
3424
3425 __ push_ptr(); // save receiver for result, and for GC
3426 call_VM(RspecifiedKlass, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc) );
3427 __ pop_ptr(Otos_i, G3_scratch); // restore receiver
3428
3429 __ br(Assembler::always, false, Assembler::pt, resolved);
3430 __ delayed()->nop();
3431
3432
3433 // Extract target class from constant pool
3434 __ bind(quicked);
3435 __ add(Roffset, sizeof(constantPoolOopDesc), Roffset);
3436 __ get_constant_pool(Lscratch);
3437 __ ld_ptr(Lscratch, Roffset, RspecifiedKlass);
3438 __ bind(resolved);
3439 __ load_klass(Otos_i, RobjKlass); // get value klass
3440
3441 // Generate a fast subtype check. Branch to cast_ok if no
3442 // failure. Return 0 if failure.
3443 __ or3(G0, 1, Otos_i); // set result assuming quick tests succeed
3444 __ gen_subtype_check( RobjKlass, RspecifiedKlass, G3_scratch, G4_scratch, G1_scratch, done );
3445 // Not a subtype; return 0;
3446 __ clr( Otos_i );
3447
3448 if (ProfileInterpreter) {
3449 __ ba(false, done);
3450 __ delayed()->nop();
3451 }
3452 __ bind(is_null);
3453 __ profile_null_seen(G3_scratch);
3454 __ bind(done);
3455 }
3456
3457 void TemplateTable::_breakpoint() {
3458
3459 // Note: We get here even if we are single stepping..
|