7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "asm/macroAssembler.hpp"
27 #include "interpreter/interp_masm.hpp"
28 #include "interpreter/interpreter.hpp"
29 #include "interpreter/interpreterRuntime.hpp"
30 #include "interpreter/templateTable.hpp"
31 #include "memory/universe.hpp"
32 #include "oops/cpCache.hpp"
33 #include "oops/methodData.hpp"
34 #include "oops/objArrayKlass.hpp"
35 #include "oops/oop.inline.hpp"
36 #include "prims/methodHandles.hpp"
37 #include "runtime/frame.inline.hpp"
38 #include "runtime/sharedRuntime.hpp"
39 #include "runtime/stubRoutines.hpp"
40 #include "runtime/synchronizer.hpp"
41
42 #define __ _masm->
43
44 //----------------------------------------------------------------------------------------------------
45 // Platform-dependent initialization
46
170 case TemplateTable::less_equal : return gt;
171 case TemplateTable::greater : return le;
172 case TemplateTable::greater_equal: return lt;
173 }
174 ShouldNotReachHere();
175 return nv;
176 }
177
178 //----------------------------------------------------------------------------------------------------
179 // Miscelaneous helper routines
180
181 // Store an oop (or NULL) at the address described by obj.
182 // Blows all volatile registers (R0-R3 on 32-bit ARM, R0-R18 on AArch64, Rtemp, LR).
183 // Also destroys new_val and obj.base().
184 static void do_oop_store(InterpreterMacroAssembler* _masm,
185 Address obj,
186 Register new_val,
187 Register tmp1,
188 Register tmp2,
189 Register tmp3,
190 BarrierSet::Name barrier,
191 bool precise,
192 bool is_null) {
193
194 assert_different_registers(obj.base(), new_val, tmp1, tmp2, tmp3, noreg);
195 switch (barrier) {
196 #if INCLUDE_ALL_GCS
197 case BarrierSet::G1BarrierSet:
198 {
199 // flatten object address if needed
200 assert (obj.mode() == basic_offset, "pre- or post-indexing is not supported here");
201
202 const Register store_addr = obj.base();
203 if (obj.index() != noreg) {
204 assert (obj.disp() == 0, "index or displacement, not both");
205 #ifdef AARCH64
206 __ add(store_addr, obj.base(), obj.index(), obj.extend(), obj.shift_imm());
207 #else
208 assert(obj.offset_op() == add_offset, "addition is expected");
209 __ add(store_addr, obj.base(), AsmOperand(obj.index(), obj.shift(), obj.shift_imm()));
210 #endif // AARCH64
211 } else if (obj.disp() != 0) {
212 __ add(store_addr, obj.base(), obj.disp());
213 }
214
215 __ g1_write_barrier_pre(store_addr, new_val, tmp1, tmp2, tmp3);
216 if (is_null) {
217 __ store_heap_oop_null(new_val, Address(store_addr));
218 } else {
219 // G1 barrier needs uncompressed oop for region cross check.
220 Register val_to_store = new_val;
221 if (UseCompressedOops) {
222 val_to_store = tmp1;
223 __ mov(val_to_store, new_val);
224 }
225 __ store_heap_oop(val_to_store, Address(store_addr)); // blows val_to_store:
226 val_to_store = noreg;
227 __ g1_write_barrier_post(store_addr, new_val, tmp1, tmp2, tmp3);
228 }
229 }
230 break;
231 #endif // INCLUDE_ALL_GCS
232 case BarrierSet::CardTableBarrierSet:
233 {
234 if (is_null) {
235 __ store_heap_oop_null(new_val, obj);
236 } else {
237 assert (!precise || (obj.index() == noreg && obj.disp() == 0),
238 "store check address should be calculated beforehand");
239
240 __ store_check_part1(tmp1);
241 __ store_heap_oop(new_val, obj); // blows new_val:
242 new_val = noreg;
243 __ store_check_part2(obj.base(), tmp1, tmp2);
244 }
245 }
246 break;
247 case BarrierSet::ModRef:
248 ShouldNotReachHere();
249 break;
250 default:
251 ShouldNotReachHere();
252 break;
253 }
254 }
255
256 Address TemplateTable::at_bcp(int offset) {
257 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
258 return Address(Rbcp, offset);
259 }
260
261
262 // Blows volatile registers (R0-R3 on 32-bit ARM, R0-R18 on AArch64), Rtemp, LR.
263 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
264 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
265 int byte_no) {
266 assert_different_registers(bc_reg, temp_reg);
267 if (!RewriteBytecodes) return;
268 Label L_patch_done;
269
270 switch (bc) {
271 case Bytecodes::_fast_aputfield:
272 case Bytecodes::_fast_bputfield:
273 case Bytecodes::_fast_zputfield:
846 const Register Rindex = R0_tos;
847
848 index_check(Rarray, Rindex);
849
850 #ifdef __SOFTFP__
851 __ add(Rtemp, Rarray, AsmOperand(Rindex, lsl, LogBytesPerLong));
852 __ add(Rtemp, Rtemp, arrayOopDesc::base_offset_in_bytes(T_DOUBLE));
853 __ ldmia(Rtemp, RegisterSet(R0_tos_lo, R1_tos_hi));
854 #else
855 __ ldr_double(D0_tos, get_array_elem_addr(T_DOUBLE, Rarray, Rindex, Rtemp));
856 #endif // __SOFTFP__
857 }
858
859
860 void TemplateTable::aaload() {
861 transition(itos, atos);
862 const Register Rarray = R1_tmp;
863 const Register Rindex = R0_tos;
864
865 index_check(Rarray, Rindex);
866 __ load_heap_oop(R0_tos, get_array_elem_addr(T_OBJECT, Rarray, Rindex, Rtemp));
867 }
868
869
870 void TemplateTable::baload() {
871 transition(itos, itos);
872 const Register Rarray = R1_tmp;
873 const Register Rindex = R0_tos;
874
875 index_check(Rarray, Rindex);
876 __ ldrsb(R0_tos, get_array_elem_addr(T_BYTE, Rarray, Rindex, Rtemp));
877 }
878
879
880 void TemplateTable::caload() {
881 transition(itos, itos);
882 const Register Rarray = R1_tmp;
883 const Register Rindex = R0_tos;
884
885 index_check(Rarray, Rindex);
886 __ ldrh(R0_tos, get_array_elem_addr(T_CHAR, Rarray, Rindex, Rtemp));
1231
1232 // Compute the array base
1233 __ add(Raddr_1, Rarray_3, arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1234
1235 // do array store check - check for NULL value first
1236 __ cbz(Rvalue_2, is_null);
1237
1238 // Load subklass
1239 __ load_klass(Rsub_5, Rvalue_2);
1240 // Load superklass
1241 __ load_klass(Rtemp, Rarray_3);
1242 __ ldr(Rsuper_LR, Address(Rtemp, ObjArrayKlass::element_klass_offset()));
1243
1244 __ gen_subtype_check(Rsub_5, Rsuper_LR, throw_array_store, R0_tmp, R3_tmp);
1245 // Come here on success
1246
1247 // Store value
1248 __ add(Raddr_1, Raddr_1, AsmOperand(Rindex_4, lsl, LogBytesPerHeapOop));
1249
1250 // Now store using the appropriate barrier
1251 do_oop_store(_masm, Raddr_1, Rvalue_2, Rtemp, R0_tmp, R3_tmp, _bs->kind(), true, false);
1252 __ b(done);
1253
1254 __ bind(throw_array_store);
1255
1256 // Come here on failure of subtype check
1257 __ profile_typecheck_failed(R0_tmp);
1258
1259 // object is at TOS
1260 __ b(Interpreter::_throw_ArrayStoreException_entry);
1261
1262 // Have a NULL in Rvalue_2, store NULL at array[index].
1263 __ bind(is_null);
1264 __ profile_null_seen(R0_tmp);
1265
1266 // Store a NULL
1267 do_oop_store(_masm, Address::indexed_oop(Raddr_1, Rindex_4), Rvalue_2, Rtemp, R0_tmp, R3_tmp, _bs->kind(), true, true);
1268
1269 // Pop stack arguments
1270 __ bind(done);
1271 __ add(Rstack_top, Rstack_top, 3 * Interpreter::stackElementSize);
1272 }
1273
1274
1275 void TemplateTable::bastore() {
1276 transition(itos, vtos);
1277 const Register Rindex = R4_tmp; // index_check prefers index in R4
1278 const Register Rarray = R3_tmp;
1279 // R0_tos: value
1280
1281 __ pop_i(Rindex);
1282 index_check(Rarray, Rindex);
1283
1284 // Need to check whether array is boolean or byte
1285 // since both types share the bastore bytecode.
1286 __ load_klass(Rtemp, Rarray);
1287 __ ldr_u32(Rtemp, Address(Rtemp, Klass::layout_helper_offset()));
3269 #ifdef AARCH64
3270 __ ldr(R0_tos, Address(Robj, Roffset));
3271 #else
3272 __ add(Rtemp, Robj, Roffset);
3273 __ ldmia(Rtemp, RegisterSet(R0_tos_lo, R1_tos_hi));
3274 #endif // AARCH64
3275 __ push(ltos);
3276 if (!is_static && rc == may_rewrite) {
3277 patch_bytecode(Bytecodes::_fast_dgetfield, R0_tmp, Rtemp);
3278 }
3279 __ b(Done);
3280 }
3281
3282 // atos
3283 {
3284 assert(atos == seq++, "atos has unexpected value");
3285
3286 // atos case for AArch64 and slow version on 32-bit ARM
3287 if(!atos_merged_with_itos) {
3288 __ bind(Latos);
3289 __ load_heap_oop(R0_tos, Address(Robj, Roffset));
3290 __ push(atos);
3291 // Rewrite bytecode to be faster
3292 if (!is_static && rc == may_rewrite) {
3293 patch_bytecode(Bytecodes::_fast_agetfield, R0_tmp, Rtemp);
3294 }
3295 __ b(Done);
3296 }
3297 }
3298
3299 assert(vtos == seq++, "vtos has unexpected value");
3300
3301 __ bind(shouldNotReachHere);
3302 __ should_not_reach_here();
3303
3304 // itos and atos cases are frequent so it makes sense to move them out of table switch
3305 // atos case can be merged with itos case (and thus moved out of table switch) on 32-bit ARM, fast version only
3306
3307 __ bind(Lint);
3308 __ ldr_s32(R0_tos, Address(Robj, Roffset));
3309 __ push(itos);
3621 if (!is_static) pop_and_check_object(Robj);
3622 #ifdef AARCH64
3623 __ str(R0_tos, Address(Robj, Roffset));
3624 #else
3625 __ add(Rtemp, Robj, Roffset);
3626 __ stmia(Rtemp, RegisterSet(R0_tos_lo, R1_tos_hi));
3627 #endif // AARCH64
3628 if (!is_static && rc == may_rewrite) {
3629 patch_bytecode(Bytecodes::_fast_dputfield, R0_tmp, Rtemp, true, byte_no);
3630 }
3631 __ b(Done);
3632 }
3633
3634 // atos
3635 {
3636 assert(atos == seq++, "dtos has unexpected value");
3637 __ bind(Latos);
3638 __ pop(atos);
3639 if (!is_static) pop_and_check_object(Robj);
3640 // Store into the field
3641 do_oop_store(_masm, Address(Robj, Roffset), R0_tos, Rtemp, R1_tmp, R5_tmp, _bs->kind(), false, false);
3642 if (!is_static && rc == may_rewrite) {
3643 patch_bytecode(Bytecodes::_fast_aputfield, R0_tmp, Rtemp, true, byte_no);
3644 }
3645 __ b(Done);
3646 }
3647
3648 __ bind(shouldNotReachHere);
3649 __ should_not_reach_here();
3650
3651 // itos case is frequent and is moved outside table switch
3652 __ bind(Lint);
3653 __ pop(itos);
3654 if (!is_static) pop_and_check_object(Robj);
3655 __ str_32(R0_tos, Address(Robj, Roffset));
3656 if (!is_static && rc == may_rewrite) {
3657 patch_bytecode(Bytecodes::_fast_iputfield, R0_tmp, Rtemp, true, byte_no);
3658 }
3659
3660 __ bind(Done);
3661
3799 case Bytecodes::_fast_lputfield: __ str (R0_tos, Address(Robj, Roffset)); break;
3800 case Bytecodes::_fast_fputfield: __ str_s(S0_tos, Address(Robj, Roffset)); break;
3801 case Bytecodes::_fast_dputfield: __ str_d(D0_tos, Address(Robj, Roffset)); break;
3802 #else
3803 case Bytecodes::_fast_lputfield: __ add(Robj, Robj, Roffset);
3804 __ stmia(Robj, RegisterSet(R0_tos_lo, R1_tos_hi)); break;
3805
3806 #ifdef __SOFTFP__
3807 case Bytecodes::_fast_fputfield: __ str(R0_tos, Address(Robj, Roffset)); break;
3808 case Bytecodes::_fast_dputfield: __ add(Robj, Robj, Roffset);
3809 __ stmia(Robj, RegisterSet(R0_tos_lo, R1_tos_hi)); break;
3810 #else
3811 case Bytecodes::_fast_fputfield: __ add(Robj, Robj, Roffset);
3812 __ fsts(S0_tos, Address(Robj)); break;
3813 case Bytecodes::_fast_dputfield: __ add(Robj, Robj, Roffset);
3814 __ fstd(D0_tos, Address(Robj)); break;
3815 #endif // __SOFTFP__
3816 #endif // AARCH64
3817
3818 case Bytecodes::_fast_aputfield:
3819 do_oop_store(_masm, Address(Robj, Roffset), R0_tos, Rtemp, R1_tmp, R2_tmp, _bs->kind(), false, false);
3820 break;
3821
3822 default:
3823 ShouldNotReachHere();
3824 }
3825
3826 if (gen_volatile_check) {
3827 Label notVolatile;
3828 Label skipMembar;
3829 __ tst(Rflags, 1 << ConstantPoolCacheEntry::is_volatile_shift |
3830 1 << ConstantPoolCacheEntry::is_final_shift);
3831 __ b(skipMembar, eq);
3832
3833 __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3834
3835 // StoreLoad barrier after volatile field write
3836 volatile_barrier(MacroAssembler::StoreLoad, Rtemp);
3837 __ b(skipMembar);
3838
3839 // StoreStore barrier after final field write
3895 case Bytecodes::_fast_bgetfield: __ ldrsb(R0_tos, Address(Robj, Roffset)); break;
3896 case Bytecodes::_fast_sgetfield: __ ldrsh(R0_tos, Address(Robj, Roffset)); break;
3897 case Bytecodes::_fast_cgetfield: __ ldrh (R0_tos, Address(Robj, Roffset)); break;
3898 case Bytecodes::_fast_igetfield: __ ldr_s32(R0_tos, Address(Robj, Roffset)); break;
3899 #ifdef AARCH64
3900 case Bytecodes::_fast_lgetfield: __ ldr (R0_tos, Address(Robj, Roffset)); break;
3901 case Bytecodes::_fast_fgetfield: __ ldr_s(S0_tos, Address(Robj, Roffset)); break;
3902 case Bytecodes::_fast_dgetfield: __ ldr_d(D0_tos, Address(Robj, Roffset)); break;
3903 #else
3904 case Bytecodes::_fast_lgetfield: __ add(Roffset, Robj, Roffset);
3905 __ ldmia(Roffset, RegisterSet(R0_tos_lo, R1_tos_hi)); break;
3906 #ifdef __SOFTFP__
3907 case Bytecodes::_fast_fgetfield: __ ldr (R0_tos, Address(Robj, Roffset)); break;
3908 case Bytecodes::_fast_dgetfield: __ add(Roffset, Robj, Roffset);
3909 __ ldmia(Roffset, RegisterSet(R0_tos_lo, R1_tos_hi)); break;
3910 #else
3911 case Bytecodes::_fast_fgetfield: __ add(Roffset, Robj, Roffset); __ flds(S0_tos, Address(Roffset)); break;
3912 case Bytecodes::_fast_dgetfield: __ add(Roffset, Robj, Roffset); __ fldd(D0_tos, Address(Roffset)); break;
3913 #endif // __SOFTFP__
3914 #endif // AARCH64
3915 case Bytecodes::_fast_agetfield: __ load_heap_oop(R0_tos, Address(Robj, Roffset)); __ verify_oop(R0_tos); break;
3916 default:
3917 ShouldNotReachHere();
3918 }
3919
3920 if (gen_volatile_check) {
3921 // Check for volatile load
3922 Label notVolatile;
3923 __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3924
3925 // TODO-AARCH64 on AArch64, load-acquire instructions can be used to get rid of this explict barrier
3926 volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
3927
3928 __ bind(notVolatile);
3929 }
3930 }
3931
3932
3933 void TemplateTable::fast_xaccess(TosState state) {
3934 transition(vtos, state);
3935
3975 __ decode_heap_oop(R0_tos);
3976 } else {
3977 __ ldar(R0_tos, Rtemp);
3978 }
3979 __ verify_oop(R0_tos);
3980 } else if (state == ftos) {
3981 __ ldar_w(R0_tos, Rtemp);
3982 __ fmov_sw(S0_tos, R0_tos);
3983 } else {
3984 ShouldNotReachHere();
3985 }
3986 __ b(done);
3987
3988 __ bind(notVolatile);
3989 }
3990 #endif // AARCH64
3991
3992 if (state == itos) {
3993 __ ldr_s32(R0_tos, Address(Robj, Roffset));
3994 } else if (state == atos) {
3995 __ load_heap_oop(R0_tos, Address(Robj, Roffset));
3996 __ verify_oop(R0_tos);
3997 } else if (state == ftos) {
3998 #ifdef AARCH64
3999 __ ldr_s(S0_tos, Address(Robj, Roffset));
4000 #else
4001 #ifdef __SOFTFP__
4002 __ ldr(R0_tos, Address(Robj, Roffset));
4003 #else
4004 __ add(Roffset, Robj, Roffset);
4005 __ flds(S0_tos, Address(Roffset));
4006 #endif // __SOFTFP__
4007 #endif // AARCH64
4008 } else {
4009 ShouldNotReachHere();
4010 }
4011
4012 #ifndef AARCH64
4013 if (gen_volatile_check) {
4014 // Check for volatile load
4015 Label notVolatile;
|
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "asm/macroAssembler.hpp"
27 #include "gc/shared/barrierSetAssembler.hpp"
28 #include "interpreter/interp_masm.hpp"
29 #include "interpreter/interpreter.hpp"
30 #include "interpreter/interpreterRuntime.hpp"
31 #include "interpreter/templateTable.hpp"
32 #include "memory/universe.hpp"
33 #include "oops/cpCache.hpp"
34 #include "oops/methodData.hpp"
35 #include "oops/objArrayKlass.hpp"
36 #include "oops/oop.inline.hpp"
37 #include "prims/methodHandles.hpp"
38 #include "runtime/frame.inline.hpp"
39 #include "runtime/sharedRuntime.hpp"
40 #include "runtime/stubRoutines.hpp"
41 #include "runtime/synchronizer.hpp"
42
43 #define __ _masm->
44
45 //----------------------------------------------------------------------------------------------------
46 // Platform-dependent initialization
47
171 case TemplateTable::less_equal : return gt;
172 case TemplateTable::greater : return le;
173 case TemplateTable::greater_equal: return lt;
174 }
175 ShouldNotReachHere();
176 return nv;
177 }
178
179 //----------------------------------------------------------------------------------------------------
180 // Miscelaneous helper routines
181
182 // Store an oop (or NULL) at the address described by obj.
183 // Blows all volatile registers (R0-R3 on 32-bit ARM, R0-R18 on AArch64, Rtemp, LR).
184 // Also destroys new_val and obj.base().
185 static void do_oop_store(InterpreterMacroAssembler* _masm,
186 Address obj,
187 Register new_val,
188 Register tmp1,
189 Register tmp2,
190 Register tmp3,
191 bool is_null,
192 DecoratorSet decorators = 0) {
193
194 assert_different_registers(obj.base(), new_val, tmp1, tmp2, tmp3, noreg);
195 __ store_heap_oop(obj, new_val, tmp1, tmp2, tmp3, is_null);
196 }
197
198 static void do_oop_load(InterpreterMacroAssembler* _masm,
199 Register dst,
200 Address obj,
201 DecoratorSet decorators = 0) {
202 __ load_heap_oop(dst, obj, noreg, noreg, noreg);
203 }
204
205 Address TemplateTable::at_bcp(int offset) {
206 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
207 return Address(Rbcp, offset);
208 }
209
210
211 // Blows volatile registers (R0-R3 on 32-bit ARM, R0-R18 on AArch64), Rtemp, LR.
212 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
213 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
214 int byte_no) {
215 assert_different_registers(bc_reg, temp_reg);
216 if (!RewriteBytecodes) return;
217 Label L_patch_done;
218
219 switch (bc) {
220 case Bytecodes::_fast_aputfield:
221 case Bytecodes::_fast_bputfield:
222 case Bytecodes::_fast_zputfield:
795 const Register Rindex = R0_tos;
796
797 index_check(Rarray, Rindex);
798
799 #ifdef __SOFTFP__
800 __ add(Rtemp, Rarray, AsmOperand(Rindex, lsl, LogBytesPerLong));
801 __ add(Rtemp, Rtemp, arrayOopDesc::base_offset_in_bytes(T_DOUBLE));
802 __ ldmia(Rtemp, RegisterSet(R0_tos_lo, R1_tos_hi));
803 #else
804 __ ldr_double(D0_tos, get_array_elem_addr(T_DOUBLE, Rarray, Rindex, Rtemp));
805 #endif // __SOFTFP__
806 }
807
808
809 void TemplateTable::aaload() {
810 transition(itos, atos);
811 const Register Rarray = R1_tmp;
812 const Register Rindex = R0_tos;
813
814 index_check(Rarray, Rindex);
815 do_oop_load(_masm, R0_tos, get_array_elem_addr(T_OBJECT, Rarray, Rindex, Rtemp), IN_HEAP_ARRAY);
816 }
817
818
819 void TemplateTable::baload() {
820 transition(itos, itos);
821 const Register Rarray = R1_tmp;
822 const Register Rindex = R0_tos;
823
824 index_check(Rarray, Rindex);
825 __ ldrsb(R0_tos, get_array_elem_addr(T_BYTE, Rarray, Rindex, Rtemp));
826 }
827
828
829 void TemplateTable::caload() {
830 transition(itos, itos);
831 const Register Rarray = R1_tmp;
832 const Register Rindex = R0_tos;
833
834 index_check(Rarray, Rindex);
835 __ ldrh(R0_tos, get_array_elem_addr(T_CHAR, Rarray, Rindex, Rtemp));
1180
1181 // Compute the array base
1182 __ add(Raddr_1, Rarray_3, arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1183
1184 // do array store check - check for NULL value first
1185 __ cbz(Rvalue_2, is_null);
1186
1187 // Load subklass
1188 __ load_klass(Rsub_5, Rvalue_2);
1189 // Load superklass
1190 __ load_klass(Rtemp, Rarray_3);
1191 __ ldr(Rsuper_LR, Address(Rtemp, ObjArrayKlass::element_klass_offset()));
1192
1193 __ gen_subtype_check(Rsub_5, Rsuper_LR, throw_array_store, R0_tmp, R3_tmp);
1194 // Come here on success
1195
1196 // Store value
1197 __ add(Raddr_1, Raddr_1, AsmOperand(Rindex_4, lsl, LogBytesPerHeapOop));
1198
1199 // Now store using the appropriate barrier
1200 do_oop_store(_masm, Raddr_1, Rvalue_2, Rtemp, R0_tmp, R3_tmp, false, IN_HEAP_ARRAY);
1201 __ b(done);
1202
1203 __ bind(throw_array_store);
1204
1205 // Come here on failure of subtype check
1206 __ profile_typecheck_failed(R0_tmp);
1207
1208 // object is at TOS
1209 __ b(Interpreter::_throw_ArrayStoreException_entry);
1210
1211 // Have a NULL in Rvalue_2, store NULL at array[index].
1212 __ bind(is_null);
1213 __ profile_null_seen(R0_tmp);
1214
1215 // Store a NULL
1216 do_oop_store(_masm, Address::indexed_oop(Raddr_1, Rindex_4), Rvalue_2, Rtemp, R0_tmp, R3_tmp, true, IN_HEAP_ARRAY);
1217
1218 // Pop stack arguments
1219 __ bind(done);
1220 __ add(Rstack_top, Rstack_top, 3 * Interpreter::stackElementSize);
1221 }
1222
1223
1224 void TemplateTable::bastore() {
1225 transition(itos, vtos);
1226 const Register Rindex = R4_tmp; // index_check prefers index in R4
1227 const Register Rarray = R3_tmp;
1228 // R0_tos: value
1229
1230 __ pop_i(Rindex);
1231 index_check(Rarray, Rindex);
1232
1233 // Need to check whether array is boolean or byte
1234 // since both types share the bastore bytecode.
1235 __ load_klass(Rtemp, Rarray);
1236 __ ldr_u32(Rtemp, Address(Rtemp, Klass::layout_helper_offset()));
3218 #ifdef AARCH64
3219 __ ldr(R0_tos, Address(Robj, Roffset));
3220 #else
3221 __ add(Rtemp, Robj, Roffset);
3222 __ ldmia(Rtemp, RegisterSet(R0_tos_lo, R1_tos_hi));
3223 #endif // AARCH64
3224 __ push(ltos);
3225 if (!is_static && rc == may_rewrite) {
3226 patch_bytecode(Bytecodes::_fast_dgetfield, R0_tmp, Rtemp);
3227 }
3228 __ b(Done);
3229 }
3230
3231 // atos
3232 {
3233 assert(atos == seq++, "atos has unexpected value");
3234
3235 // atos case for AArch64 and slow version on 32-bit ARM
3236 if(!atos_merged_with_itos) {
3237 __ bind(Latos);
3238 do_oop_load(_masm, R0_tos, Address(Robj, Roffset));
3239 __ push(atos);
3240 // Rewrite bytecode to be faster
3241 if (!is_static && rc == may_rewrite) {
3242 patch_bytecode(Bytecodes::_fast_agetfield, R0_tmp, Rtemp);
3243 }
3244 __ b(Done);
3245 }
3246 }
3247
3248 assert(vtos == seq++, "vtos has unexpected value");
3249
3250 __ bind(shouldNotReachHere);
3251 __ should_not_reach_here();
3252
3253 // itos and atos cases are frequent so it makes sense to move them out of table switch
3254 // atos case can be merged with itos case (and thus moved out of table switch) on 32-bit ARM, fast version only
3255
3256 __ bind(Lint);
3257 __ ldr_s32(R0_tos, Address(Robj, Roffset));
3258 __ push(itos);
3570 if (!is_static) pop_and_check_object(Robj);
3571 #ifdef AARCH64
3572 __ str(R0_tos, Address(Robj, Roffset));
3573 #else
3574 __ add(Rtemp, Robj, Roffset);
3575 __ stmia(Rtemp, RegisterSet(R0_tos_lo, R1_tos_hi));
3576 #endif // AARCH64
3577 if (!is_static && rc == may_rewrite) {
3578 patch_bytecode(Bytecodes::_fast_dputfield, R0_tmp, Rtemp, true, byte_no);
3579 }
3580 __ b(Done);
3581 }
3582
3583 // atos
3584 {
3585 assert(atos == seq++, "dtos has unexpected value");
3586 __ bind(Latos);
3587 __ pop(atos);
3588 if (!is_static) pop_and_check_object(Robj);
3589 // Store into the field
3590 do_oop_store(_masm, Address(Robj, Roffset), R0_tos, Rtemp, R1_tmp, R5_tmp, false);
3591 if (!is_static && rc == may_rewrite) {
3592 patch_bytecode(Bytecodes::_fast_aputfield, R0_tmp, Rtemp, true, byte_no);
3593 }
3594 __ b(Done);
3595 }
3596
3597 __ bind(shouldNotReachHere);
3598 __ should_not_reach_here();
3599
3600 // itos case is frequent and is moved outside table switch
3601 __ bind(Lint);
3602 __ pop(itos);
3603 if (!is_static) pop_and_check_object(Robj);
3604 __ str_32(R0_tos, Address(Robj, Roffset));
3605 if (!is_static && rc == may_rewrite) {
3606 patch_bytecode(Bytecodes::_fast_iputfield, R0_tmp, Rtemp, true, byte_no);
3607 }
3608
3609 __ bind(Done);
3610
3748 case Bytecodes::_fast_lputfield: __ str (R0_tos, Address(Robj, Roffset)); break;
3749 case Bytecodes::_fast_fputfield: __ str_s(S0_tos, Address(Robj, Roffset)); break;
3750 case Bytecodes::_fast_dputfield: __ str_d(D0_tos, Address(Robj, Roffset)); break;
3751 #else
3752 case Bytecodes::_fast_lputfield: __ add(Robj, Robj, Roffset);
3753 __ stmia(Robj, RegisterSet(R0_tos_lo, R1_tos_hi)); break;
3754
3755 #ifdef __SOFTFP__
3756 case Bytecodes::_fast_fputfield: __ str(R0_tos, Address(Robj, Roffset)); break;
3757 case Bytecodes::_fast_dputfield: __ add(Robj, Robj, Roffset);
3758 __ stmia(Robj, RegisterSet(R0_tos_lo, R1_tos_hi)); break;
3759 #else
3760 case Bytecodes::_fast_fputfield: __ add(Robj, Robj, Roffset);
3761 __ fsts(S0_tos, Address(Robj)); break;
3762 case Bytecodes::_fast_dputfield: __ add(Robj, Robj, Roffset);
3763 __ fstd(D0_tos, Address(Robj)); break;
3764 #endif // __SOFTFP__
3765 #endif // AARCH64
3766
3767 case Bytecodes::_fast_aputfield:
3768 do_oop_store(_masm, Address(Robj, Roffset), R0_tos, Rtemp, R1_tmp, R2_tmp, false);
3769 break;
3770
3771 default:
3772 ShouldNotReachHere();
3773 }
3774
3775 if (gen_volatile_check) {
3776 Label notVolatile;
3777 Label skipMembar;
3778 __ tst(Rflags, 1 << ConstantPoolCacheEntry::is_volatile_shift |
3779 1 << ConstantPoolCacheEntry::is_final_shift);
3780 __ b(skipMembar, eq);
3781
3782 __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3783
3784 // StoreLoad barrier after volatile field write
3785 volatile_barrier(MacroAssembler::StoreLoad, Rtemp);
3786 __ b(skipMembar);
3787
3788 // StoreStore barrier after final field write
3844 case Bytecodes::_fast_bgetfield: __ ldrsb(R0_tos, Address(Robj, Roffset)); break;
3845 case Bytecodes::_fast_sgetfield: __ ldrsh(R0_tos, Address(Robj, Roffset)); break;
3846 case Bytecodes::_fast_cgetfield: __ ldrh (R0_tos, Address(Robj, Roffset)); break;
3847 case Bytecodes::_fast_igetfield: __ ldr_s32(R0_tos, Address(Robj, Roffset)); break;
3848 #ifdef AARCH64
3849 case Bytecodes::_fast_lgetfield: __ ldr (R0_tos, Address(Robj, Roffset)); break;
3850 case Bytecodes::_fast_fgetfield: __ ldr_s(S0_tos, Address(Robj, Roffset)); break;
3851 case Bytecodes::_fast_dgetfield: __ ldr_d(D0_tos, Address(Robj, Roffset)); break;
3852 #else
3853 case Bytecodes::_fast_lgetfield: __ add(Roffset, Robj, Roffset);
3854 __ ldmia(Roffset, RegisterSet(R0_tos_lo, R1_tos_hi)); break;
3855 #ifdef __SOFTFP__
3856 case Bytecodes::_fast_fgetfield: __ ldr (R0_tos, Address(Robj, Roffset)); break;
3857 case Bytecodes::_fast_dgetfield: __ add(Roffset, Robj, Roffset);
3858 __ ldmia(Roffset, RegisterSet(R0_tos_lo, R1_tos_hi)); break;
3859 #else
3860 case Bytecodes::_fast_fgetfield: __ add(Roffset, Robj, Roffset); __ flds(S0_tos, Address(Roffset)); break;
3861 case Bytecodes::_fast_dgetfield: __ add(Roffset, Robj, Roffset); __ fldd(D0_tos, Address(Roffset)); break;
3862 #endif // __SOFTFP__
3863 #endif // AARCH64
3864 case Bytecodes::_fast_agetfield: do_oop_load(_masm, R0_tos, Address(Robj, Roffset)); __ verify_oop(R0_tos); break;
3865 default:
3866 ShouldNotReachHere();
3867 }
3868
3869 if (gen_volatile_check) {
3870 // Check for volatile load
3871 Label notVolatile;
3872 __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3873
3874 // TODO-AARCH64 on AArch64, load-acquire instructions can be used to get rid of this explict barrier
3875 volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
3876
3877 __ bind(notVolatile);
3878 }
3879 }
3880
3881
3882 void TemplateTable::fast_xaccess(TosState state) {
3883 transition(vtos, state);
3884
3924 __ decode_heap_oop(R0_tos);
3925 } else {
3926 __ ldar(R0_tos, Rtemp);
3927 }
3928 __ verify_oop(R0_tos);
3929 } else if (state == ftos) {
3930 __ ldar_w(R0_tos, Rtemp);
3931 __ fmov_sw(S0_tos, R0_tos);
3932 } else {
3933 ShouldNotReachHere();
3934 }
3935 __ b(done);
3936
3937 __ bind(notVolatile);
3938 }
3939 #endif // AARCH64
3940
3941 if (state == itos) {
3942 __ ldr_s32(R0_tos, Address(Robj, Roffset));
3943 } else if (state == atos) {
3944 do_oop_load(_masm, R0_tos, Address(Robj, Roffset));
3945 __ verify_oop(R0_tos);
3946 } else if (state == ftos) {
3947 #ifdef AARCH64
3948 __ ldr_s(S0_tos, Address(Robj, Roffset));
3949 #else
3950 #ifdef __SOFTFP__
3951 __ ldr(R0_tos, Address(Robj, Roffset));
3952 #else
3953 __ add(Roffset, Robj, Roffset);
3954 __ flds(S0_tos, Address(Roffset));
3955 #endif // __SOFTFP__
3956 #endif // AARCH64
3957 } else {
3958 ShouldNotReachHere();
3959 }
3960
3961 #ifndef AARCH64
3962 if (gen_volatile_check) {
3963 // Check for volatile load
3964 Label notVolatile;
|