130 case TemplateTable::equal : return Assembler::NE;
131 case TemplateTable::not_equal : return Assembler::EQ;
132 case TemplateTable::less : return Assembler::GE;
133 case TemplateTable::less_equal : return Assembler::GT;
134 case TemplateTable::greater : return Assembler::LE;
135 case TemplateTable::greater_equal: return Assembler::LT;
136 }
137 ShouldNotReachHere();
138 return Assembler::EQ;
139 }
140
141
142 // Miscelaneous helper routines
143 // Store an oop (or NULL) at the Address described by obj.
144 // If val == noreg this means store a NULL
145 static void do_oop_store(InterpreterMacroAssembler* _masm,
146 Address dst,
147 Register val,
148 DecoratorSet decorators) {
149 assert(val == noreg || val == r0, "parameter is just for looks");
150 __ store_heap_oop(dst, val, r10, r1, decorators);
151 }
152
153 static void do_oop_load(InterpreterMacroAssembler* _masm,
154 Address src,
155 Register dst,
156 DecoratorSet decorators) {
157 __ load_heap_oop(dst, src, r10, r1, decorators);
158 }
159
160 Address TemplateTable::at_bcp(int offset) {
161 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
162 return Address(rbcp, offset);
163 }
164
165 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
166 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
167 int byte_no)
168 {
169 if (!RewriteBytecodes) return;
170 Label L_patch_done;
171
172 switch (bc) {
173 case Bytecodes::_fast_aputfield:
174 case Bytecodes::_fast_bputfield:
175 case Bytecodes::_fast_zputfield:
176 case Bytecodes::_fast_cputfield:
177 case Bytecodes::_fast_dputfield:
178 case Bytecodes::_fast_fputfield:
179 case Bytecodes::_fast_iputfield:
180 case Bytecodes::_fast_lputfield:
181 case Bytecodes::_fast_sputfield:
182 {
183 // We skip bytecode quickening for putfield instructions when
184 // the put_code written to the constant pool cache is zero.
185 // This is required so that every execution of this instruction
186 // calls out to InterpreterRuntime::resolve_get_put to do
187 // additional, required work.
188 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
189 assert(load_bc_into_bc_reg, "we use bc_reg as temp");
190 __ get_cache_and_index_and_bytecode_at_bcp(temp_reg, bc_reg, temp_reg, byte_no, 1);
191 __ movw(bc_reg, bc);
192 __ cbzw(temp_reg, L_patch_done); // don't patch
791 void TemplateTable::daload()
792 {
793 transition(itos, dtos);
794 __ mov(r1, r0);
795 __ pop_ptr(r0);
796 // r0: array
797 // r1: index
798 index_check(r0, r1); // leaves index in r1, kills rscratch1
799 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
800 __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(3)), noreg, noreg);
801 }
802
803 void TemplateTable::aaload()
804 {
805 transition(itos, atos);
806 __ mov(r1, r0);
807 __ pop_ptr(r0);
808 // r0: array
809 // r1: index
810 index_check(r0, r1); // leaves index in r1, kills rscratch1
811 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
812 do_oop_load(_masm,
813 Address(r0, r1, Address::uxtw(LogBytesPerHeapOop)),
814 r0,
815 IS_ARRAY);
816 }
817
818 void TemplateTable::baload()
819 {
820 transition(itos, itos);
821 __ mov(r1, r0);
822 __ pop_ptr(r0);
823 // r0: array
824 // r1: index
825 index_check(r0, r1); // leaves index in r1, kills rscratch1
826 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_BYTE) >> 0);
827 __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(0)), noreg, noreg);
828 }
829
830 void TemplateTable::caload()
831 {
832 transition(itos, itos);
833 __ mov(r1, r0);
834 __ pop_ptr(r0);
835 // r0: array
1092 __ pop_ptr(r3);
1093 // v0: value
1094 // r1: index
1095 // r3: array
1096 index_check(r3, r1); // prefer index in r1
1097 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
1098 __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(3)), noreg /* dtos */, noreg, noreg);
1099 }
1100
1101 void TemplateTable::aastore() {
1102 Label is_null, ok_is_subtype, done;
1103 transition(vtos, vtos);
1104 // stack: ..., array, index, value
1105 __ ldr(r0, at_tos()); // value
1106 __ ldr(r2, at_tos_p1()); // index
1107 __ ldr(r3, at_tos_p2()); // array
1108
1109 Address element_address(r3, r4, Address::uxtw(LogBytesPerHeapOop));
1110
1111 index_check(r3, r2); // kills r1
1112 __ add(r4, r2, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
1113
1114 // do array store check - check for NULL value first
1115 __ cbz(r0, is_null);
1116
1117 // Move subklass into r1
1118 __ load_klass(r1, r0);
1119 // Move superklass into r0
1120 __ load_klass(r0, r3);
1121 __ ldr(r0, Address(r0,
1122 ObjArrayKlass::element_klass_offset()));
1123 // Compress array + index*oopSize + 12 into a single register. Frees r2.
1124
1125 // Generate subtype check. Blows r2, r5
1126 // Superklass in r0. Subklass in r1.
1127 __ gen_subtype_check(r1, ok_is_subtype);
1128
1129 // Come here on failure
1130 // object is at TOS
1131 __ b(Interpreter::_throw_ArrayStoreException_entry);
1132
1133 // Come here on success
1134 __ bind(ok_is_subtype);
1135
1136 // Get the value we will store
1137 __ ldr(r0, at_tos());
1138 // Now store using the appropriate barrier
1139 do_oop_store(_masm, element_address, r0, IS_ARRAY);
1140 __ b(done);
1141
1142 // Have a NULL in r0, r3=array, r2=index. Store NULL at ary[idx]
1143 __ bind(is_null);
1144 __ profile_null_seen(r2);
1145
1146 // Store a NULL
1147 do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1148
1149 // Pop stack arguments
1150 __ bind(done);
1151 __ add(esp, esp, 3 * Interpreter::stackElementSize);
1152 }
1153
1154 void TemplateTable::bastore()
1155 {
1156 transition(itos, vtos);
1157 __ pop_i(r1);
1158 __ pop_ptr(r3);
1159 // r0: value
1160 // r1: index
1161 // r3: array
1162 index_check(r3, r1); // prefer index in r1
1163
1164 // Need to check whether array is boolean or byte
1165 // since both types share the bastore bytecode.
1166 __ load_klass(r2, r3);
1167 __ ldrw(r2, Address(r2, Klass::layout_helper_offset()));
2004 __ br(j_not(cc), not_taken);
2005 branch(false, false);
2006 __ bind(not_taken);
2007 __ profile_not_taken_branch(r0);
2008 }
2009
2010 void TemplateTable::if_nullcmp(Condition cc)
2011 {
2012 transition(atos, vtos);
2013 // assume branch is more often taken than not (loops use backward branches)
2014 Label not_taken;
2015 if (cc == equal)
2016 __ cbnz(r0, not_taken);
2017 else
2018 __ cbz(r0, not_taken);
2019 branch(false, false);
2020 __ bind(not_taken);
2021 __ profile_not_taken_branch(r0);
2022 }
2023
2024 void TemplateTable::if_acmp(Condition cc)
2025 {
2026 transition(atos, vtos);
2027 // assume branch is more often taken than not (loops use backward branches)
2028 Label not_taken;
2029 __ pop_ptr(r1);
2030 __ cmpoop(r1, r0);
2031 __ br(j_not(cc), not_taken);
2032 branch(false, false);
2033 __ bind(not_taken);
2034 __ profile_not_taken_branch(r0);
2035 }
2036
2037 void TemplateTable::ret() {
2038 transition(vtos, vtos);
2039 // We might be moving to a safepoint. The thread which calls
2040 // Interpreter::notice_safepoints() will effectively flush its cache
2041 // when it makes a system call, but we need to do something to
2042 // ensure that we see the changed dispatch table.
2043 __ membar(MacroAssembler::LoadLoad);
2044
2045 locals_index(r1);
2046 __ ldr(r1, aaddress(r1)); // get return bci, compute return bcp
2047 __ profile_ret(r1, r2);
2048 __ ldr(rbcp, Address(rmethod, Method::const_offset()));
2049 __ lea(rbcp, Address(rbcp, r1));
2050 __ add(rbcp, rbcp, in_bytes(ConstMethod::codes_offset()));
2051 __ dispatch_next(vtos, 0, /*generate_poll*/true);
2052 }
2053
2054 void TemplateTable::wide_ret() {
2055 transition(vtos, vtos);
2056 locals_index_wide(r1);
2480 // 8179954: We need to make sure that the code generated for
2481 // volatile accesses forms a sequentially-consistent set of
2482 // operations when combined with STLR and LDAR. Without a leading
2483 // membar it's possible for a simple Dekker test to fail if loads
2484 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
2485 // the stores in one method and we interpret the loads in another.
2486 if (! UseBarriersForVolatile) {
2487 Label notVolatile;
2488 __ tbz(raw_flags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
2489 __ membar(MacroAssembler::AnyAny);
2490 __ bind(notVolatile);
2491 }
2492
2493 const Address field(obj, off);
2494
2495 Label Done, notByte, notBool, notInt, notShort, notChar,
2496 notLong, notFloat, notObj, notDouble;
2497
2498 // x86 uses a shift and mask or wings it with a shift plus assert
2499 // the mask is not needed. aarch64 just uses bitfield extract
2500 __ ubfxw(flags, raw_flags, ConstantPoolCacheEntry::tos_state_shift,
2501 ConstantPoolCacheEntry::tos_state_bits);
2502
2503 assert(btos == 0, "change code, btos != 0");
2504 __ cbnz(flags, notByte);
2505
2506 // Don't rewrite getstatic, only getfield
2507 if (is_static) rc = may_not_rewrite;
2508
2509 // btos
2510 __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg);
2511 __ push(btos);
2512 // Rewrite bytecode to be faster
2513 if (rc == may_rewrite) {
2514 patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2515 }
2516 __ b(Done);
2517
2518 __ bind(notByte);
2519 __ cmp(flags, (u1)ztos);
2520 __ br(Assembler::NE, notBool);
2521
2522 // ztos (same code as btos)
2523 __ access_load_at(T_BOOLEAN, IN_HEAP, r0, field, noreg, noreg);
2524 __ push(ztos);
2525 // Rewrite bytecode to be faster
2526 if (rc == may_rewrite) {
2527 // use btos rewriting, no truncating to t/f bit is needed for getfield.
2528 patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2529 }
2530 __ b(Done);
2531
2532 __ bind(notBool);
2533 __ cmp(flags, (u1)atos);
2534 __ br(Assembler::NE, notObj);
2535 // atos
2536 do_oop_load(_masm, field, r0, IN_HEAP);
2537 __ push(atos);
2538 if (rc == may_rewrite) {
2539 patch_bytecode(Bytecodes::_fast_agetfield, bc, r1);
2540 }
2541 __ b(Done);
2542
2543 __ bind(notObj);
2544 __ cmp(flags, (u1)itos);
2545 __ br(Assembler::NE, notInt);
2546 // itos
2547 __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
2548 __ push(itos);
2549 // Rewrite bytecode to be faster
2550 if (rc == may_rewrite) {
2551 patch_bytecode(Bytecodes::_fast_igetfield, bc, r1);
2552 }
2553 __ b(Done);
2554
2555 __ bind(notInt);
2556 __ cmp(flags, (u1)ctos);
2557 __ br(Assembler::NE, notChar);
2558 // ctos
2559 __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
2560 __ push(ctos);
2561 // Rewrite bytecode to be faster
2691 // c_rarg1: object pointer set up above (NULL if static)
2692 // c_rarg2: cache entry pointer
2693 // c_rarg3: jvalue object on the stack
2694 __ call_VM(noreg,
2695 CAST_FROM_FN_PTR(address,
2696 InterpreterRuntime::post_field_modification),
2697 c_rarg1, c_rarg2, c_rarg3);
2698 __ get_cache_and_index_at_bcp(cache, index, 1);
2699 __ bind(L1);
2700 }
2701 }
2702
2703 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2704 transition(vtos, vtos);
2705
2706 const Register cache = r2;
2707 const Register index = r3;
2708 const Register obj = r2;
2709 const Register off = r19;
2710 const Register flags = r0;
2711 const Register bc = r4;
2712
2713 resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2714 jvmti_post_field_mod(cache, index, is_static);
2715 load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2716
2717 Label Done;
2718 __ mov(r5, flags);
2719
2720 {
2721 Label notVolatile;
2722 __ tbz(r5, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
2723 __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
2724 __ bind(notVolatile);
2725 }
2726
2727 // field address
2728 const Address field(obj, off);
2729
2730 Label notByte, notBool, notInt, notShort, notChar,
2731 notLong, notFloat, notObj, notDouble;
2732
2733 // x86 uses a shift and mask or wings it with a shift plus assert
2734 // the mask is not needed. aarch64 just uses bitfield extract
2735 __ ubfxw(flags, flags, ConstantPoolCacheEntry::tos_state_shift, ConstantPoolCacheEntry::tos_state_bits);
2736
2737 assert(btos == 0, "change code, btos != 0");
2738 __ cbnz(flags, notByte);
2739
2740 // Don't rewrite putstatic, only putfield
2741 if (is_static) rc = may_not_rewrite;
2742
2743 // btos
2744 {
2745 __ pop(btos);
2746 if (!is_static) pop_and_check_object(obj);
2747 __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg);
2748 if (rc == may_rewrite) {
2749 patch_bytecode(Bytecodes::_fast_bputfield, bc, r1, true, byte_no);
2750 }
2751 __ b(Done);
2752 }
2755 __ cmp(flags, (u1)ztos);
2756 __ br(Assembler::NE, notBool);
2757
2758 // ztos
2759 {
2760 __ pop(ztos);
2761 if (!is_static) pop_and_check_object(obj);
2762 __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg);
2763 if (rc == may_rewrite) {
2764 patch_bytecode(Bytecodes::_fast_zputfield, bc, r1, true, byte_no);
2765 }
2766 __ b(Done);
2767 }
2768
2769 __ bind(notBool);
2770 __ cmp(flags, (u1)atos);
2771 __ br(Assembler::NE, notObj);
2772
2773 // atos
2774 {
2775 __ pop(atos);
2776 if (!is_static) pop_and_check_object(obj);
2777 // Store into the field
2778 do_oop_store(_masm, field, r0, IN_HEAP);
2779 if (rc == may_rewrite) {
2780 patch_bytecode(Bytecodes::_fast_aputfield, bc, r1, true, byte_no);
2781 }
2782 __ b(Done);
2783 }
2784
2785 __ bind(notObj);
2786 __ cmp(flags, (u1)itos);
2787 __ br(Assembler::NE, notInt);
2788
2789 // itos
2790 {
2791 __ pop(itos);
2792 if (!is_static) pop_and_check_object(obj);
2793 __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg);
2794 if (rc == may_rewrite) {
2795 patch_bytecode(Bytecodes::_fast_iputfield, bc, r1, true, byte_no);
2796 }
2797 __ b(Done);
2798 }
2799
2800 __ bind(notInt);
2801 __ cmp(flags, (u1)ctos);
2802 __ br(Assembler::NE, notChar);
2902 void TemplateTable::putstatic(int byte_no) {
2903 putfield_or_static(byte_no, true);
2904 }
2905
2906 void TemplateTable::jvmti_post_fast_field_mod()
2907 {
2908 if (JvmtiExport::can_post_field_modification()) {
2909 // Check to see if a field modification watch has been set before
2910 // we take the time to call into the VM.
2911 Label L2;
2912 __ lea(rscratch1, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
2913 __ ldrw(c_rarg3, Address(rscratch1));
2914 __ cbzw(c_rarg3, L2);
2915 __ pop_ptr(r19); // copy the object pointer from tos
2916 __ verify_oop(r19);
2917 __ push_ptr(r19); // put the object pointer back on tos
2918 // Save tos values before call_VM() clobbers them. Since we have
2919 // to do it for every data type, we use the saved values as the
2920 // jvalue object.
2921 switch (bytecode()) { // load values into the jvalue object
2922 case Bytecodes::_fast_aputfield: __ push_ptr(r0); break;
2923 case Bytecodes::_fast_bputfield: // fall through
2924 case Bytecodes::_fast_zputfield: // fall through
2925 case Bytecodes::_fast_sputfield: // fall through
2926 case Bytecodes::_fast_cputfield: // fall through
2927 case Bytecodes::_fast_iputfield: __ push_i(r0); break;
2928 case Bytecodes::_fast_dputfield: __ push_d(); break;
2929 case Bytecodes::_fast_fputfield: __ push_f(); break;
2930 case Bytecodes::_fast_lputfield: __ push_l(r0); break;
2931
2932 default:
2933 ShouldNotReachHere();
2934 }
2935 __ mov(c_rarg3, esp); // points to jvalue on the stack
2936 // access constant pool cache entry
2937 __ get_cache_entry_pointer_at_bcp(c_rarg2, r0, 1);
2938 __ verify_oop(r19);
2939 // r19: object pointer copied above
2940 // c_rarg2: cache entry pointer
2941 // c_rarg3: jvalue object on the stack
2942 __ call_VM(noreg,
2943 CAST_FROM_FN_PTR(address,
2944 InterpreterRuntime::post_field_modification),
2945 r19, c_rarg2, c_rarg3);
2946
2947 switch (bytecode()) { // restore tos values
2948 case Bytecodes::_fast_aputfield: __ pop_ptr(r0); break;
2949 case Bytecodes::_fast_bputfield: // fall through
2950 case Bytecodes::_fast_zputfield: // fall through
2951 case Bytecodes::_fast_sputfield: // fall through
2952 case Bytecodes::_fast_cputfield: // fall through
2953 case Bytecodes::_fast_iputfield: __ pop_i(r0); break;
2954 case Bytecodes::_fast_dputfield: __ pop_d(); break;
2955 case Bytecodes::_fast_fputfield: __ pop_f(); break;
2956 case Bytecodes::_fast_lputfield: __ pop_l(r0); break;
2957 default: break;
2958 }
2959 __ bind(L2);
2960 }
2961 }
2962
2963 void TemplateTable::fast_storefield(TosState state)
2964 {
2965 transition(state, vtos);
2966
2967 ByteSize base = ConstantPoolCache::base_offset();
2978 // replace index with field offset from cache entry
2979 __ ldr(r1, Address(r2, in_bytes(base + ConstantPoolCacheEntry::f2_offset())));
2980
2981 {
2982 Label notVolatile;
2983 __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
2984 __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
2985 __ bind(notVolatile);
2986 }
2987
2988 Label notVolatile;
2989
2990 // Get object from stack
2991 pop_and_check_object(r2);
2992
2993 // field address
2994 const Address field(r2, r1);
2995
2996 // access field
2997 switch (bytecode()) {
2998 case Bytecodes::_fast_aputfield:
2999 do_oop_store(_masm, field, r0, IN_HEAP);
3000 break;
3001 case Bytecodes::_fast_lputfield:
3002 __ access_store_at(T_LONG, IN_HEAP, field, r0, noreg, noreg);
3003 break;
3004 case Bytecodes::_fast_iputfield:
3005 __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg);
3006 break;
3007 case Bytecodes::_fast_zputfield:
3008 __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg);
3009 break;
3010 case Bytecodes::_fast_bputfield:
3011 __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg);
3012 break;
3013 case Bytecodes::_fast_sputfield:
3014 __ access_store_at(T_SHORT, IN_HEAP, field, r0, noreg, noreg);
3015 break;
3016 case Bytecodes::_fast_cputfield:
3017 __ access_store_at(T_CHAR, IN_HEAP, field, r0, noreg, noreg);
3071 // r0: object
3072 __ verify_oop(r0);
3073 __ null_check(r0);
3074 const Address field(r0, r1);
3075
3076 // 8179954: We need to make sure that the code generated for
3077 // volatile accesses forms a sequentially-consistent set of
3078 // operations when combined with STLR and LDAR. Without a leading
3079 // membar it's possible for a simple Dekker test to fail if loads
3080 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
3081 // the stores in one method and we interpret the loads in another.
3082 if (! UseBarriersForVolatile) {
3083 Label notVolatile;
3084 __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3085 __ membar(MacroAssembler::AnyAny);
3086 __ bind(notVolatile);
3087 }
3088
3089 // access field
3090 switch (bytecode()) {
3091 case Bytecodes::_fast_agetfield:
3092 do_oop_load(_masm, field, r0, IN_HEAP);
3093 __ verify_oop(r0);
3094 break;
3095 case Bytecodes::_fast_lgetfield:
3096 __ access_load_at(T_LONG, IN_HEAP, r0, field, noreg, noreg);
3097 break;
3098 case Bytecodes::_fast_igetfield:
3099 __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
3100 break;
3101 case Bytecodes::_fast_bgetfield:
3102 __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg);
3103 break;
3104 case Bytecodes::_fast_sgetfield:
3105 __ access_load_at(T_SHORT, IN_HEAP, r0, field, noreg, noreg);
3106 break;
3107 case Bytecodes::_fast_cgetfield:
3108 __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
3109 break;
3110 case Bytecodes::_fast_fgetfield:
3627 CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_object_alloc), r0);
3628 __ pop(atos); // restore the return value
3629
3630 }
3631 __ b(done);
3632 }
3633
3634 // slow case
3635 __ bind(slow_case);
3636 __ get_constant_pool(c_rarg1);
3637 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3638 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3639 __ verify_oop(r0);
3640
3641 // continue
3642 __ bind(done);
3643 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3644 __ membar(Assembler::StoreStore);
3645 }
3646
3647 void TemplateTable::newarray() {
3648 transition(itos, atos);
3649 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3650 __ mov(c_rarg2, r0);
3651 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3652 c_rarg1, c_rarg2);
3653 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3654 __ membar(Assembler::StoreStore);
3655 }
3656
3657 void TemplateTable::anewarray() {
3658 transition(itos, atos);
3659 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3660 __ get_constant_pool(c_rarg1);
3661 __ mov(c_rarg3, r0);
3662 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
3663 c_rarg1, c_rarg2, c_rarg3);
3664 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3665 __ membar(Assembler::StoreStore);
3666 }
3698 __ bind(quicked);
3699 __ mov(r3, r0); // Save object in r3; r0 needed for subtype check
3700 __ load_resolved_klass_at_offset(r2, r19, r0, rscratch1); // r0 = klass
3701
3702 __ bind(resolved);
3703 __ load_klass(r19, r3);
3704
3705 // Generate subtype check. Blows r2, r5. Object in r3.
3706 // Superklass in r0. Subklass in r19.
3707 __ gen_subtype_check(r19, ok_is_subtype);
3708
3709 // Come here on failure
3710 __ push(r3);
3711 // object is at TOS
3712 __ b(Interpreter::_throw_ClassCastException_entry);
3713
3714 // Come here on success
3715 __ bind(ok_is_subtype);
3716 __ mov(r0, r3); // Restore object in r3
3717
3718 // Collect counts on whether this test sees NULLs a lot or not.
3719 if (ProfileInterpreter) {
3720 __ b(done);
3721 __ bind(is_null);
3722 __ profile_null_seen(r2);
3723 } else {
3724 __ bind(is_null); // same as 'done'
3725 }
3726 __ bind(done);
3727 }
3728
3729 void TemplateTable::instanceof() {
3730 transition(atos, itos);
3731 Label done, is_null, ok_is_subtype, quicked, resolved;
3732 __ cbz(r0, is_null);
3733
3734 // Get cpool & tags index
3735 __ get_cpool_and_tags(r2, r3); // r2=cpool, r3=tags array
3736 __ get_unsigned_2_byte_index_at_bcp(r19, 1); // r19=index
3737 // See if bytecode has already been quicked
3738 __ add(rscratch1, r3, Array<u1>::base_offset_in_bytes());
3739 __ lea(r1, Address(rscratch1, r19));
3740 __ ldarb(r1, r1);
3741 __ cmp(r1, (u1)JVM_CONSTANT_Class);
3742 __ br(Assembler::EQ, quicked);
3743
3744 __ push(atos); // save receiver for result, and for GC
3745 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
|
130 case TemplateTable::equal : return Assembler::NE;
131 case TemplateTable::not_equal : return Assembler::EQ;
132 case TemplateTable::less : return Assembler::GE;
133 case TemplateTable::less_equal : return Assembler::GT;
134 case TemplateTable::greater : return Assembler::LE;
135 case TemplateTable::greater_equal: return Assembler::LT;
136 }
137 ShouldNotReachHere();
138 return Assembler::EQ;
139 }
140
141
142 // Miscelaneous helper routines
143 // Store an oop (or NULL) at the Address described by obj.
144 // If val == noreg this means store a NULL
145 static void do_oop_store(InterpreterMacroAssembler* _masm,
146 Address dst,
147 Register val,
148 DecoratorSet decorators) {
149 assert(val == noreg || val == r0, "parameter is just for looks");
150 __ store_heap_oop(dst, val, r10, r1, noreg, decorators);
151 }
152
153 static void do_oop_load(InterpreterMacroAssembler* _masm,
154 Address src,
155 Register dst,
156 DecoratorSet decorators) {
157 __ load_heap_oop(dst, src, r10, r1, decorators);
158 }
159
160 Address TemplateTable::at_bcp(int offset) {
161 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
162 return Address(rbcp, offset);
163 }
164
165 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
166 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
167 int byte_no)
168 {
169 if (!RewriteBytecodes) return;
170 Label L_patch_done;
171
172 switch (bc) {
173 case Bytecodes::_fast_qputfield:
174 case Bytecodes::_fast_aputfield:
175 case Bytecodes::_fast_bputfield:
176 case Bytecodes::_fast_zputfield:
177 case Bytecodes::_fast_cputfield:
178 case Bytecodes::_fast_dputfield:
179 case Bytecodes::_fast_fputfield:
180 case Bytecodes::_fast_iputfield:
181 case Bytecodes::_fast_lputfield:
182 case Bytecodes::_fast_sputfield:
183 {
184 // We skip bytecode quickening for putfield instructions when
185 // the put_code written to the constant pool cache is zero.
186 // This is required so that every execution of this instruction
187 // calls out to InterpreterRuntime::resolve_get_put to do
188 // additional, required work.
189 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
190 assert(load_bc_into_bc_reg, "we use bc_reg as temp");
191 __ get_cache_and_index_and_bytecode_at_bcp(temp_reg, bc_reg, temp_reg, byte_no, 1);
192 __ movw(bc_reg, bc);
193 __ cbzw(temp_reg, L_patch_done); // don't patch
792 void TemplateTable::daload()
793 {
794 transition(itos, dtos);
795 __ mov(r1, r0);
796 __ pop_ptr(r0);
797 // r0: array
798 // r1: index
799 index_check(r0, r1); // leaves index in r1, kills rscratch1
800 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
801 __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(3)), noreg, noreg);
802 }
803
804 void TemplateTable::aaload()
805 {
806 transition(itos, atos);
807 __ mov(r1, r0);
808 __ pop_ptr(r0);
809 // r0: array
810 // r1: index
811 index_check(r0, r1); // leaves index in r1, kills rscratch1
812 if (ValueArrayFlatten) {
813 Label is_flat_array, done;
814
815 __ test_flattened_array_oop(r0, r8 /*temp*/, is_flat_array);
816 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
817 do_oop_load(_masm, Address(r0, r1, Address::uxtw(LogBytesPerHeapOop)), r0, IS_ARRAY);
818
819 __ b(done);
820 __ bind(is_flat_array);
821 __ call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::value_array_load), r0, r1);
822 __ bind(done);
823 } else {
824 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
825 do_oop_load(_masm, Address(r0, r1, Address::uxtw(LogBytesPerHeapOop)), r0, IS_ARRAY);
826 }
827 }
828
829 void TemplateTable::baload()
830 {
831 transition(itos, itos);
832 __ mov(r1, r0);
833 __ pop_ptr(r0);
834 // r0: array
835 // r1: index
836 index_check(r0, r1); // leaves index in r1, kills rscratch1
837 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_BYTE) >> 0);
838 __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(0)), noreg, noreg);
839 }
840
841 void TemplateTable::caload()
842 {
843 transition(itos, itos);
844 __ mov(r1, r0);
845 __ pop_ptr(r0);
846 // r0: array
1103 __ pop_ptr(r3);
1104 // v0: value
1105 // r1: index
1106 // r3: array
1107 index_check(r3, r1); // prefer index in r1
1108 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
1109 __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(3)), noreg /* dtos */, noreg, noreg);
1110 }
1111
1112 void TemplateTable::aastore() {
1113 Label is_null, ok_is_subtype, done;
1114 transition(vtos, vtos);
1115 // stack: ..., array, index, value
1116 __ ldr(r0, at_tos()); // value
1117 __ ldr(r2, at_tos_p1()); // index
1118 __ ldr(r3, at_tos_p2()); // array
1119
1120 Address element_address(r3, r4, Address::uxtw(LogBytesPerHeapOop));
1121
1122 index_check(r3, r2); // kills r1
1123
1124 // DMS CHECK: what does line below do?
1125 __ add(r4, r2, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
1126
1127 // do array store check - check for NULL value first
1128 __ cbz(r0, is_null);
1129
1130 Label is_flat_array;
1131 if (ValueArrayFlatten) {
1132 __ test_flattened_array_oop(r3, r8 /*temp*/, is_flat_array);
1133 }
1134
1135 // Move subklass into r1
1136 __ load_klass(r1, r0);
1137
1138 // Move superklass into r0
1139 __ load_klass(r0, r3);
1140 __ ldr(r0, Address(r0, ObjArrayKlass::element_klass_offset()));
1141 // Compress array + index*oopSize + 12 into a single register. Frees r2.
1142
1143 // Generate subtype check. Blows r2, r5
1144 // Superklass in r0. Subklass in r1.
1145
1146 __ gen_subtype_check(r1, ok_is_subtype);
1147
1148 // Come here on failure
1149 // object is at TOS
1150 __ b(Interpreter::_throw_ArrayStoreException_entry);
1151
1152
1153 // Come here on success
1154 __ bind(ok_is_subtype);
1155
1156
1157 // Get the value we will store
1158 __ ldr(r0, at_tos());
1159 // Now store using the appropriate barrier
1160 do_oop_store(_masm, element_address, r0, IS_ARRAY);
1161 __ b(done);
1162
1163 // Have a NULL in r0, r3=array, r2=index. Store NULL at ary[idx]
1164 __ bind(is_null);
1165 __ profile_null_seen(r2);
1166
1167 if (EnableValhalla) {
1168 Label is_null_into_value_array_npe, store_null;
1169
1170 // No way to store null in flat array
1171 __ test_null_free_array_oop(r3, r8, is_null_into_value_array_npe);
1172 __ b(store_null);
1173
1174 __ bind(is_null_into_value_array_npe);
1175 __ b(ExternalAddress(Interpreter::_throw_NullPointerException_entry));
1176
1177 __ bind(store_null);
1178 }
1179
1180 // Store a NULL
1181 do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1182 __ b(done);
1183
1184 if (EnableValhalla) {
1185 Label is_type_ok;
1186
1187 // store non-null value
1188 __ bind(is_flat_array);
1189
1190 // Simplistic type check...
1191 // r0 - value, r2 - index, r3 - array.
1192
1193 // Profile the not-null value's klass.
1194 // Load value class
1195 __ load_klass(r1, r0);
1196 __ profile_typecheck(r2, r1, r0); // blows r2, and r0
1197
1198 // flat value array needs exact type match
1199 // is "r8 == r0" (value subclass == array element superclass)
1200
1201 // Move element klass into r0
1202
1203 __ load_klass(r0, r3);
1204
1205 __ ldr(r0, Address(r0, ArrayKlass::element_klass_offset()));
1206 __ cmp(r0, r1);
1207 __ br(Assembler::EQ, is_type_ok);
1208
1209 __ profile_typecheck_failed(r2);
1210 __ b(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
1211
1212 __ bind(is_type_ok);
1213
1214 // DMS CHECK: Reload from TOS to be safe, because of profile_typecheck that blows r2 and r0.
1215 // Should we really do it?
1216 __ ldr(r1, at_tos()); // value
1217 __ mov(r2, r3); // array, ldr(r2, at_tos_p2());
1218 __ ldr(r3, at_tos_p1()); // index
1219 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::value_array_store), r1, r2, r3);
1220 }
1221
1222
1223 // Pop stack arguments
1224 __ bind(done);
1225 __ add(esp, esp, 3 * Interpreter::stackElementSize);
1226 }
1227
1228 void TemplateTable::bastore()
1229 {
1230 transition(itos, vtos);
1231 __ pop_i(r1);
1232 __ pop_ptr(r3);
1233 // r0: value
1234 // r1: index
1235 // r3: array
1236 index_check(r3, r1); // prefer index in r1
1237
1238 // Need to check whether array is boolean or byte
1239 // since both types share the bastore bytecode.
1240 __ load_klass(r2, r3);
1241 __ ldrw(r2, Address(r2, Klass::layout_helper_offset()));
2078 __ br(j_not(cc), not_taken);
2079 branch(false, false);
2080 __ bind(not_taken);
2081 __ profile_not_taken_branch(r0);
2082 }
2083
2084 void TemplateTable::if_nullcmp(Condition cc)
2085 {
2086 transition(atos, vtos);
2087 // assume branch is more often taken than not (loops use backward branches)
2088 Label not_taken;
2089 if (cc == equal)
2090 __ cbnz(r0, not_taken);
2091 else
2092 __ cbz(r0, not_taken);
2093 branch(false, false);
2094 __ bind(not_taken);
2095 __ profile_not_taken_branch(r0);
2096 }
2097
2098 void TemplateTable::if_acmp(Condition cc) {
2099 transition(atos, vtos);
2100 // assume branch is more often taken than not (loops use backward branches)
2101 Label taken, not_taken;
2102 __ pop_ptr(r1);
2103
2104 Register is_value_mask = rscratch1;
2105 __ mov(is_value_mask, markOopDesc::always_locked_pattern);
2106
2107 if (EnableValhalla && ACmpOnValues == 3) {
2108 __ cmp(r1, r0);
2109 __ br(Assembler::EQ, (cc == equal) ? taken : not_taken);
2110
2111 // might be substitutable, test if either r0 or r1 is null
2112 __ andr(r2, r0, r1);
2113 __ cbz(r2, (cc == equal) ? not_taken : taken);
2114
2115 // and both are values ?
2116 __ ldr(r2, Address(r1, oopDesc::mark_offset_in_bytes()));
2117 __ andr(r2, r2, is_value_mask);
2118 __ ldr(r4, Address(r0, oopDesc::mark_offset_in_bytes()));
2119 __ andr(r4, r4, is_value_mask);
2120 __ andr(r2, r2, r4);
2121 __ cmp(r2, is_value_mask);
2122 __ br(Assembler::NE, (cc == equal) ? not_taken : taken);
2123
2124 // same value klass ?
2125 __ load_metadata(r2, r1);
2126 __ load_metadata(r4, r0);
2127 __ cmp(r2, r4);
2128 __ br(Assembler::NE, (cc == equal) ? not_taken : taken);
2129
2130 // Know both are the same type, let's test for substitutability...
2131 if (cc == equal) {
2132 invoke_is_substitutable(r0, r1, taken, not_taken);
2133 } else {
2134 invoke_is_substitutable(r0, r1, not_taken, taken);
2135 }
2136 __ stop("Not reachable");
2137 }
2138
2139 if (EnableValhalla && ACmpOnValues == 1) {
2140 Label is_null;
2141 __ cbz(r1, is_null);
2142 __ ldr(r2, Address(r1, oopDesc::mark_offset_in_bytes()));
2143 __ andr(r2, r2, is_value_mask);
2144 __ cmp(r2, is_value_mask);
2145 __ cset(r2, Assembler::EQ);
2146 __ orr(r1, r1, r2);
2147 __ bind(is_null);
2148 }
2149
2150 __ cmpoop(r1, r0);
2151
2152 if (EnableValhalla && ACmpOnValues == 2) {
2153 __ br(Assembler::NE, (cc == not_equal) ? taken : not_taken);
2154 __ cbz(r1, (cc == equal) ? taken : not_taken);
2155 __ ldr(r2, Address(r1, oopDesc::mark_offset_in_bytes()));
2156 __ andr(r2, r2, is_value_mask);
2157 __ cmp(r2, is_value_mask);
2158 cc = (cc == equal) ? not_equal : equal;
2159 }
2160
2161 __ br(j_not(cc), not_taken);
2162 __ bind(taken);
2163 branch(false, false);
2164 __ bind(not_taken);
2165 __ profile_not_taken_branch(r0);
2166 }
2167
2168 void TemplateTable::invoke_is_substitutable(Register aobj, Register bobj,
2169 Label& is_subst, Label& not_subst) {
2170
2171 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::is_substitutable), aobj, bobj);
2172 // Restored... r0 answer, jmp to outcome...
2173 __ cbz(r0, not_subst);
2174 __ b(is_subst);
2175 }
2176
2177
2178 void TemplateTable::ret() {
2179 transition(vtos, vtos);
2180 // We might be moving to a safepoint. The thread which calls
2181 // Interpreter::notice_safepoints() will effectively flush its cache
2182 // when it makes a system call, but we need to do something to
2183 // ensure that we see the changed dispatch table.
2184 __ membar(MacroAssembler::LoadLoad);
2185
2186 locals_index(r1);
2187 __ ldr(r1, aaddress(r1)); // get return bci, compute return bcp
2188 __ profile_ret(r1, r2);
2189 __ ldr(rbcp, Address(rmethod, Method::const_offset()));
2190 __ lea(rbcp, Address(rbcp, r1));
2191 __ add(rbcp, rbcp, in_bytes(ConstMethod::codes_offset()));
2192 __ dispatch_next(vtos, 0, /*generate_poll*/true);
2193 }
2194
2195 void TemplateTable::wide_ret() {
2196 transition(vtos, vtos);
2197 locals_index_wide(r1);
2621 // 8179954: We need to make sure that the code generated for
2622 // volatile accesses forms a sequentially-consistent set of
2623 // operations when combined with STLR and LDAR. Without a leading
2624 // membar it's possible for a simple Dekker test to fail if loads
2625 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
2626 // the stores in one method and we interpret the loads in another.
2627 if (! UseBarriersForVolatile) {
2628 Label notVolatile;
2629 __ tbz(raw_flags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
2630 __ membar(MacroAssembler::AnyAny);
2631 __ bind(notVolatile);
2632 }
2633
2634 const Address field(obj, off);
2635
2636 Label Done, notByte, notBool, notInt, notShort, notChar,
2637 notLong, notFloat, notObj, notDouble;
2638
2639 // x86 uses a shift and mask or wings it with a shift plus assert
2640 // the mask is not needed. aarch64 just uses bitfield extract
2641 __ ubfxw(flags, raw_flags, ConstantPoolCacheEntry::tos_state_shift, ConstantPoolCacheEntry::tos_state_bits);
2642
2643 assert(btos == 0, "change code, btos != 0");
2644 __ cbnz(flags, notByte);
2645
2646 // Don't rewrite getstatic, only getfield
2647 if (is_static) rc = may_not_rewrite;
2648
2649 // btos
2650 __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg);
2651 __ push(btos);
2652 // Rewrite bytecode to be faster
2653 if (rc == may_rewrite) {
2654 patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2655 }
2656 __ b(Done);
2657
2658 __ bind(notByte);
2659 __ cmp(flags, (u1)ztos);
2660 __ br(Assembler::NE, notBool);
2661
2662 // ztos (same code as btos)
2663 __ access_load_at(T_BOOLEAN, IN_HEAP, r0, field, noreg, noreg);
2664 __ push(ztos);
2665 // Rewrite bytecode to be faster
2666 if (rc == may_rewrite) {
2667 // use btos rewriting, no truncating to t/f bit is needed for getfield.
2668 patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2669 }
2670 __ b(Done);
2671
2672 __ bind(notBool);
2673 __ cmp(flags, (u1)atos);
2674 __ br(Assembler::NE, notObj);
2675 // atos
2676 if (!EnableValhalla) {
2677 do_oop_load(_masm, field, r0, IN_HEAP);
2678 __ push(atos);
2679 if (rc == may_rewrite) {
2680 patch_bytecode(Bytecodes::_fast_agetfield, bc, r1);
2681 }
2682 __ b(Done);
2683 } else { // Valhalla
2684
2685 if (is_static) {
2686 __ load_heap_oop(r0, field);
2687 Label isFlattenable, isUninitialized;
2688 // Issue below if the static field has not been initialized yet
2689 __ test_field_is_flattenable(raw_flags, r8 /*temp*/, isFlattenable);
2690 // Not flattenable case
2691 __ push(atos);
2692 __ b(Done);
2693 // Flattenable case, must not return null even if uninitialized
2694 __ bind(isFlattenable);
2695 __ cbz(r0, isUninitialized);
2696 __ push(atos);
2697 __ b(Done);
2698 __ bind(isUninitialized);
2699 __ andw(raw_flags, raw_flags, ConstantPoolCacheEntry::field_index_mask);
2700 __ call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::uninitialized_static_value_field), obj, raw_flags);
2701 __ verify_oop(r0);
2702 __ push(atos);
2703 __ b(Done);
2704 } else {
2705 Label isFlattened, isInitialized, isFlattenable, rewriteFlattenable;
2706 __ test_field_is_flattenable(raw_flags, r8 /*temp*/, isFlattenable);
2707 // Non-flattenable field case, also covers the object case
2708 __ load_heap_oop(r0, field);
2709 __ push(atos);
2710 if (rc == may_rewrite) {
2711 patch_bytecode(Bytecodes::_fast_agetfield, bc, r1);
2712 }
2713 __ b(Done);
2714 __ bind(isFlattenable);
2715 __ test_field_is_flattened(raw_flags, r8 /* temp */, isFlattened);
2716 // Non-flattened field case
2717 __ load_heap_oop(r0, field);
2718 __ cbnz(r0, isInitialized);
2719 __ andw(raw_flags, raw_flags, ConstantPoolCacheEntry::field_index_mask);
2720 __ call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::uninitialized_instance_value_field), obj, raw_flags);
2721 __ bind(isInitialized);
2722 __ verify_oop(r0);
2723 __ push(atos);
2724 __ b(rewriteFlattenable);
2725 __ bind(isFlattened);
2726 __ ldr(r10, Address(cache, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f1_offset())));
2727 __ andw(raw_flags, raw_flags, ConstantPoolCacheEntry::field_index_mask);
2728 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::read_flattened_field), obj, raw_flags, r10);
2729 __ verify_oop(r0);
2730 __ push(atos);
2731 __ bind(rewriteFlattenable);
2732 if (rc == may_rewrite) {
2733 patch_bytecode(Bytecodes::_fast_qgetfield, bc, r1);
2734 }
2735 __ b(Done);
2736 }
2737 }
2738
2739 __ bind(notObj);
2740 __ cmp(flags, (u1)itos);
2741 __ br(Assembler::NE, notInt);
2742 // itos
2743 __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
2744 __ push(itos);
2745 // Rewrite bytecode to be faster
2746 if (rc == may_rewrite) {
2747 patch_bytecode(Bytecodes::_fast_igetfield, bc, r1);
2748 }
2749 __ b(Done);
2750
2751 __ bind(notInt);
2752 __ cmp(flags, (u1)ctos);
2753 __ br(Assembler::NE, notChar);
2754 // ctos
2755 __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
2756 __ push(ctos);
2757 // Rewrite bytecode to be faster
2887 // c_rarg1: object pointer set up above (NULL if static)
2888 // c_rarg2: cache entry pointer
2889 // c_rarg3: jvalue object on the stack
2890 __ call_VM(noreg,
2891 CAST_FROM_FN_PTR(address,
2892 InterpreterRuntime::post_field_modification),
2893 c_rarg1, c_rarg2, c_rarg3);
2894 __ get_cache_and_index_at_bcp(cache, index, 1);
2895 __ bind(L1);
2896 }
2897 }
2898
2899 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2900 transition(vtos, vtos);
2901
2902 const Register cache = r2;
2903 const Register index = r3;
2904 const Register obj = r2;
2905 const Register off = r19;
2906 const Register flags = r0;
2907 const Register flags2 = r6;
2908 const Register bc = r4;
2909
2910 resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2911 jvmti_post_field_mod(cache, index, is_static);
2912 load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2913
2914 Label Done;
2915 __ mov(r5, flags);
2916
2917 {
2918 Label notVolatile;
2919 __ tbz(r5, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
2920 __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
2921 __ bind(notVolatile);
2922 }
2923
2924 // field address
2925 const Address field(obj, off);
2926
2927 Label notByte, notBool, notInt, notShort, notChar,
2928 notLong, notFloat, notObj, notDouble;
2929
2930 __ mov(flags2, flags);
2931
2932 // x86 uses a shift and mask or wings it with a shift plus assert
2933 // the mask is not needed. aarch64 just uses bitfield extract
2934 __ ubfxw(flags, flags, ConstantPoolCacheEntry::tos_state_shift, ConstantPoolCacheEntry::tos_state_bits);
2935
2936 assert(btos == 0, "change code, btos != 0");
2937 __ cbnz(flags, notByte);
2938
2939 // Don't rewrite putstatic, only putfield
2940 if (is_static) rc = may_not_rewrite;
2941
2942 // btos
2943 {
2944 __ pop(btos);
2945 if (!is_static) pop_and_check_object(obj);
2946 __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg);
2947 if (rc == may_rewrite) {
2948 patch_bytecode(Bytecodes::_fast_bputfield, bc, r1, true, byte_no);
2949 }
2950 __ b(Done);
2951 }
2954 __ cmp(flags, (u1)ztos);
2955 __ br(Assembler::NE, notBool);
2956
2957 // ztos
2958 {
2959 __ pop(ztos);
2960 if (!is_static) pop_and_check_object(obj);
2961 __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg);
2962 if (rc == may_rewrite) {
2963 patch_bytecode(Bytecodes::_fast_zputfield, bc, r1, true, byte_no);
2964 }
2965 __ b(Done);
2966 }
2967
2968 __ bind(notBool);
2969 __ cmp(flags, (u1)atos);
2970 __ br(Assembler::NE, notObj);
2971
2972 // atos
2973 {
2974 if (!EnableValhalla) {
2975 __ pop(atos);
2976 if (!is_static) pop_and_check_object(obj);
2977 // Store into the field
2978 do_oop_store(_masm, field, r0, IN_HEAP);
2979 if (rc == may_rewrite) {
2980 patch_bytecode(Bytecodes::_fast_aputfield, bc, r1, true, byte_no);
2981 }
2982 __ b(Done);
2983 } else { // Valhalla
2984
2985 __ pop(atos);
2986 if (is_static) {
2987 Label notFlattenable;
2988 __ test_field_is_not_flattenable(flags2, r8 /* temp */, notFlattenable);
2989 __ null_check(r0);
2990 __ bind(notFlattenable);
2991 do_oop_store(_masm, field, r0, IN_HEAP);
2992 __ b(Done);
2993 } else {
2994 Label isFlattenable, isFlattened, notBuffered, notBuffered2, rewriteNotFlattenable, rewriteFlattenable;
2995 __ test_field_is_flattenable(flags2, r8 /*temp*/, isFlattenable);
2996 // Not flattenable case, covers not flattenable values and objects
2997 pop_and_check_object(obj);
2998 // Store into the field
2999 do_oop_store(_masm, field, r0, IN_HEAP);
3000 __ bind(rewriteNotFlattenable);
3001 if (rc == may_rewrite) {
3002 patch_bytecode(Bytecodes::_fast_aputfield, bc, r19, true, byte_no);
3003 }
3004 __ b(Done);
3005 // Implementation of the flattenable semantic
3006 __ bind(isFlattenable);
3007 __ null_check(r0);
3008 __ test_field_is_flattened(flags2, r8 /*temp*/, isFlattened);
3009 // Not flattened case
3010 pop_and_check_object(obj);
3011 // Store into the field
3012 do_oop_store(_masm, field, r0, IN_HEAP);
3013 __ b(rewriteFlattenable);
3014 __ bind(isFlattened);
3015 pop_and_check_object(obj);
3016 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_flattened_value), r0, off, obj);
3017 __ bind(rewriteFlattenable);
3018 if (rc == may_rewrite) {
3019 patch_bytecode(Bytecodes::_fast_qputfield, bc, r19, true, byte_no);
3020 }
3021 __ b(Done);
3022 }
3023 } // Valhalla
3024 }
3025
3026 __ bind(notObj);
3027 __ cmp(flags, (u1)itos);
3028 __ br(Assembler::NE, notInt);
3029
3030 // itos
3031 {
3032 __ pop(itos);
3033 if (!is_static) pop_and_check_object(obj);
3034 __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg);
3035 if (rc == may_rewrite) {
3036 patch_bytecode(Bytecodes::_fast_iputfield, bc, r1, true, byte_no);
3037 }
3038 __ b(Done);
3039 }
3040
3041 __ bind(notInt);
3042 __ cmp(flags, (u1)ctos);
3043 __ br(Assembler::NE, notChar);
3143 void TemplateTable::putstatic(int byte_no) {
3144 putfield_or_static(byte_no, true);
3145 }
3146
3147 void TemplateTable::jvmti_post_fast_field_mod()
3148 {
3149 if (JvmtiExport::can_post_field_modification()) {
3150 // Check to see if a field modification watch has been set before
3151 // we take the time to call into the VM.
3152 Label L2;
3153 __ lea(rscratch1, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3154 __ ldrw(c_rarg3, Address(rscratch1));
3155 __ cbzw(c_rarg3, L2);
3156 __ pop_ptr(r19); // copy the object pointer from tos
3157 __ verify_oop(r19);
3158 __ push_ptr(r19); // put the object pointer back on tos
3159 // Save tos values before call_VM() clobbers them. Since we have
3160 // to do it for every data type, we use the saved values as the
3161 // jvalue object.
3162 switch (bytecode()) { // load values into the jvalue object
3163 case Bytecodes::_fast_qputfield: //fall through
3164 case Bytecodes::_fast_aputfield: __ push_ptr(r0); break;
3165 case Bytecodes::_fast_bputfield: // fall through
3166 case Bytecodes::_fast_zputfield: // fall through
3167 case Bytecodes::_fast_sputfield: // fall through
3168 case Bytecodes::_fast_cputfield: // fall through
3169 case Bytecodes::_fast_iputfield: __ push_i(r0); break;
3170 case Bytecodes::_fast_dputfield: __ push_d(); break;
3171 case Bytecodes::_fast_fputfield: __ push_f(); break;
3172 case Bytecodes::_fast_lputfield: __ push_l(r0); break;
3173
3174 default:
3175 ShouldNotReachHere();
3176 }
3177 __ mov(c_rarg3, esp); // points to jvalue on the stack
3178 // access constant pool cache entry
3179 __ get_cache_entry_pointer_at_bcp(c_rarg2, r0, 1);
3180 __ verify_oop(r19);
3181 // r19: object pointer copied above
3182 // c_rarg2: cache entry pointer
3183 // c_rarg3: jvalue object on the stack
3184 __ call_VM(noreg,
3185 CAST_FROM_FN_PTR(address,
3186 InterpreterRuntime::post_field_modification),
3187 r19, c_rarg2, c_rarg3);
3188
3189 switch (bytecode()) { // restore tos values
3190 case Bytecodes::_fast_qputfield: //fall through
3191 case Bytecodes::_fast_aputfield: __ pop_ptr(r0); break;
3192 case Bytecodes::_fast_bputfield: // fall through
3193 case Bytecodes::_fast_zputfield: // fall through
3194 case Bytecodes::_fast_sputfield: // fall through
3195 case Bytecodes::_fast_cputfield: // fall through
3196 case Bytecodes::_fast_iputfield: __ pop_i(r0); break;
3197 case Bytecodes::_fast_dputfield: __ pop_d(); break;
3198 case Bytecodes::_fast_fputfield: __ pop_f(); break;
3199 case Bytecodes::_fast_lputfield: __ pop_l(r0); break;
3200 default: break;
3201 }
3202 __ bind(L2);
3203 }
3204 }
3205
3206 void TemplateTable::fast_storefield(TosState state)
3207 {
3208 transition(state, vtos);
3209
3210 ByteSize base = ConstantPoolCache::base_offset();
3221 // replace index with field offset from cache entry
3222 __ ldr(r1, Address(r2, in_bytes(base + ConstantPoolCacheEntry::f2_offset())));
3223
3224 {
3225 Label notVolatile;
3226 __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3227 __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
3228 __ bind(notVolatile);
3229 }
3230
3231 Label notVolatile;
3232
3233 // Get object from stack
3234 pop_and_check_object(r2);
3235
3236 // field address
3237 const Address field(r2, r1);
3238
3239 // access field
3240 switch (bytecode()) {
3241 case Bytecodes::_fast_qputfield: //fall through
3242 {
3243 Label isFlattened, done;
3244 __ null_check(r0);
3245 __ test_field_is_flattened(r3, r8 /* temp */, isFlattened);
3246 // No Flattened case
3247 do_oop_store(_masm, field, r0, IN_HEAP);
3248 __ b(done);
3249 __ bind(isFlattened);
3250 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_flattened_value), r0, r1, r2);
3251 __ bind(done);
3252 }
3253 break;
3254 case Bytecodes::_fast_aputfield:
3255 do_oop_store(_masm, field, r0, IN_HEAP);
3256 break;
3257 case Bytecodes::_fast_lputfield:
3258 __ access_store_at(T_LONG, IN_HEAP, field, r0, noreg, noreg);
3259 break;
3260 case Bytecodes::_fast_iputfield:
3261 __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg);
3262 break;
3263 case Bytecodes::_fast_zputfield:
3264 __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg);
3265 break;
3266 case Bytecodes::_fast_bputfield:
3267 __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg);
3268 break;
3269 case Bytecodes::_fast_sputfield:
3270 __ access_store_at(T_SHORT, IN_HEAP, field, r0, noreg, noreg);
3271 break;
3272 case Bytecodes::_fast_cputfield:
3273 __ access_store_at(T_CHAR, IN_HEAP, field, r0, noreg, noreg);
3327 // r0: object
3328 __ verify_oop(r0);
3329 __ null_check(r0);
3330 const Address field(r0, r1);
3331
3332 // 8179954: We need to make sure that the code generated for
3333 // volatile accesses forms a sequentially-consistent set of
3334 // operations when combined with STLR and LDAR. Without a leading
3335 // membar it's possible for a simple Dekker test to fail if loads
3336 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
3337 // the stores in one method and we interpret the loads in another.
3338 if (! UseBarriersForVolatile) {
3339 Label notVolatile;
3340 __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3341 __ membar(MacroAssembler::AnyAny);
3342 __ bind(notVolatile);
3343 }
3344
3345 // access field
3346 switch (bytecode()) {
3347 case Bytecodes::_fast_qgetfield:
3348 {
3349 Label isFlattened, isInitialized, Done;
3350 // DMS CHECK: We don't need to reload multiple times, but stay close to original code
3351 __ ldrw(r9, Address(r2, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset())));
3352 __ test_field_is_flattened(r9, r8 /* temp */, isFlattened);
3353 // Non-flattened field case
3354 __ mov(r9, r0);
3355 __ load_heap_oop(r0, field);
3356 __ cbnz(r0, isInitialized);
3357 __ mov(r0, r9);
3358 __ ldrw(r9, Address(r2, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset())));
3359 __ andw(r9, r9, ConstantPoolCacheEntry::field_index_mask);
3360 __ call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::uninitialized_instance_value_field), r0, r9);
3361 __ bind(isInitialized);
3362 __ verify_oop(r0);
3363 __ b(Done);
3364 __ bind(isFlattened);
3365 __ ldrw(r9, Address(r2, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset())));
3366 __ andw(r9, r9, ConstantPoolCacheEntry::field_index_mask);
3367 __ ldr(r3, Address(r2, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f1_offset())));
3368 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::read_flattened_field), r0, r9, r3);
3369 __ verify_oop(r0);
3370 __ bind(Done);
3371 }
3372 break;
3373 case Bytecodes::_fast_agetfield:
3374 do_oop_load(_masm, field, r0, IN_HEAP);
3375 __ verify_oop(r0);
3376 break;
3377 case Bytecodes::_fast_lgetfield:
3378 __ access_load_at(T_LONG, IN_HEAP, r0, field, noreg, noreg);
3379 break;
3380 case Bytecodes::_fast_igetfield:
3381 __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
3382 break;
3383 case Bytecodes::_fast_bgetfield:
3384 __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg);
3385 break;
3386 case Bytecodes::_fast_sgetfield:
3387 __ access_load_at(T_SHORT, IN_HEAP, r0, field, noreg, noreg);
3388 break;
3389 case Bytecodes::_fast_cgetfield:
3390 __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
3391 break;
3392 case Bytecodes::_fast_fgetfield:
3909 CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_object_alloc), r0);
3910 __ pop(atos); // restore the return value
3911
3912 }
3913 __ b(done);
3914 }
3915
3916 // slow case
3917 __ bind(slow_case);
3918 __ get_constant_pool(c_rarg1);
3919 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3920 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3921 __ verify_oop(r0);
3922
3923 // continue
3924 __ bind(done);
3925 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3926 __ membar(Assembler::StoreStore);
3927 }
3928
3929 void TemplateTable::defaultvalue() {
3930 transition(vtos, atos);
3931 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3932 __ get_constant_pool(c_rarg1);
3933 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::defaultvalue),
3934 c_rarg1, c_rarg2);
3935 __ verify_oop(r0);
3936 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3937 __ membar(Assembler::StoreStore);
3938 }
3939
3940 void TemplateTable::withfield() {
3941 transition(vtos, atos);
3942 resolve_cache_and_index(f2_byte, c_rarg1 /*cache*/, c_rarg2 /*index*/, sizeof(u2));
3943
3944 // n.b. unlike x86 cache is now rcpool plus the indexed offset
3945 // so using rcpool to meet shared code expectations
3946
3947 call_VM(r1, CAST_FROM_FN_PTR(address, InterpreterRuntime::withfield), rcpool);
3948 __ verify_oop(r1);
3949 __ add(esp, esp, r0);
3950 __ mov(r0, r1);
3951 }
3952
3953 void TemplateTable::newarray() {
3954 transition(itos, atos);
3955 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3956 __ mov(c_rarg2, r0);
3957 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3958 c_rarg1, c_rarg2);
3959 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3960 __ membar(Assembler::StoreStore);
3961 }
3962
3963 void TemplateTable::anewarray() {
3964 transition(itos, atos);
3965 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3966 __ get_constant_pool(c_rarg1);
3967 __ mov(c_rarg3, r0);
3968 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
3969 c_rarg1, c_rarg2, c_rarg3);
3970 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3971 __ membar(Assembler::StoreStore);
3972 }
4004 __ bind(quicked);
4005 __ mov(r3, r0); // Save object in r3; r0 needed for subtype check
4006 __ load_resolved_klass_at_offset(r2, r19, r0, rscratch1); // r0 = klass
4007
4008 __ bind(resolved);
4009 __ load_klass(r19, r3);
4010
4011 // Generate subtype check. Blows r2, r5. Object in r3.
4012 // Superklass in r0. Subklass in r19.
4013 __ gen_subtype_check(r19, ok_is_subtype);
4014
4015 // Come here on failure
4016 __ push(r3);
4017 // object is at TOS
4018 __ b(Interpreter::_throw_ClassCastException_entry);
4019
4020 // Come here on success
4021 __ bind(ok_is_subtype);
4022 __ mov(r0, r3); // Restore object in r3
4023
4024 __ b(done);
4025 __ bind(is_null);
4026
4027 // Collect counts on whether this test sees NULLs a lot or not.
4028 if (ProfileInterpreter) {
4029 __ profile_null_seen(r2);
4030 }
4031
4032 if (EnableValhalla) {
4033 // Get cpool & tags index
4034 __ get_cpool_and_tags(r2, r3); // r2=cpool, r3=tags array
4035 __ get_unsigned_2_byte_index_at_bcp(r19, 1); // r19=index
4036 // See if bytecode has already been quicked
4037 __ add(rscratch1, r3, Array<u1>::base_offset_in_bytes());
4038 __ lea(r1, Address(rscratch1, r19));
4039 __ ldarb(r1, r1);
4040 // See if CP entry is a Q-descriptor
4041 __ andr (r1, r1, JVM_CONSTANT_QDescBit);
4042 __ cmp(r1, (u1) JVM_CONSTANT_QDescBit);
4043 __ br(Assembler::NE, done);
4044 __ b(ExternalAddress(Interpreter::_throw_NullPointerException_entry));
4045 }
4046
4047 __ bind(done);
4048 }
4049
4050 void TemplateTable::instanceof() {
4051 transition(atos, itos);
4052 Label done, is_null, ok_is_subtype, quicked, resolved;
4053 __ cbz(r0, is_null);
4054
4055 // Get cpool & tags index
4056 __ get_cpool_and_tags(r2, r3); // r2=cpool, r3=tags array
4057 __ get_unsigned_2_byte_index_at_bcp(r19, 1); // r19=index
4058 // See if bytecode has already been quicked
4059 __ add(rscratch1, r3, Array<u1>::base_offset_in_bytes());
4060 __ lea(r1, Address(rscratch1, r19));
4061 __ ldarb(r1, r1);
4062 __ cmp(r1, (u1)JVM_CONSTANT_Class);
4063 __ br(Assembler::EQ, quicked);
4064
4065 __ push(atos); // save receiver for result, and for GC
4066 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
|