7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "asm/macroAssembler.hpp"
27 #include "interpreter/interp_masm.hpp"
28 #include "interpreter/interpreter.hpp"
29 #include "interpreter/interpreterRuntime.hpp"
30 #include "interpreter/templateTable.hpp"
31 #include "memory/universe.hpp"
32 #include "oops/cpCache.hpp"
33 #include "oops/methodData.hpp"
34 #include "oops/objArrayKlass.hpp"
35 #include "oops/oop.inline.hpp"
36 #include "prims/methodHandles.hpp"
37 #include "runtime/frame.inline.hpp"
38 #include "runtime/sharedRuntime.hpp"
39 #include "runtime/stubRoutines.hpp"
40 #include "runtime/synchronizer.hpp"
41
42 #define __ _masm->
43
44 //----------------------------------------------------------------------------------------------------
45 // Platform-dependent initialization
46
170 case TemplateTable::less_equal : return gt;
171 case TemplateTable::greater : return le;
172 case TemplateTable::greater_equal: return lt;
173 }
174 ShouldNotReachHere();
175 return nv;
176 }
177
178 //----------------------------------------------------------------------------------------------------
179 // Miscelaneous helper routines
180
181 // Store an oop (or NULL) at the address described by obj.
182 // Blows all volatile registers (R0-R3 on 32-bit ARM, R0-R18 on AArch64, Rtemp, LR).
183 // Also destroys new_val and obj.base().
184 static void do_oop_store(InterpreterMacroAssembler* _masm,
185 Address obj,
186 Register new_val,
187 Register tmp1,
188 Register tmp2,
189 Register tmp3,
190 BarrierSet::Name barrier,
191 bool precise,
192 bool is_null) {
193
194 assert_different_registers(obj.base(), new_val, tmp1, tmp2, tmp3, noreg);
195 switch (barrier) {
196 #if INCLUDE_ALL_GCS
197 case BarrierSet::G1BarrierSet:
198 {
199 // flatten object address if needed
200 assert (obj.mode() == basic_offset, "pre- or post-indexing is not supported here");
201
202 const Register store_addr = obj.base();
203 if (obj.index() != noreg) {
204 assert (obj.disp() == 0, "index or displacement, not both");
205 #ifdef AARCH64
206 __ add(store_addr, obj.base(), obj.index(), obj.extend(), obj.shift_imm());
207 #else
208 assert(obj.offset_op() == add_offset, "addition is expected");
209 __ add(store_addr, obj.base(), AsmOperand(obj.index(), obj.shift(), obj.shift_imm()));
210 #endif // AARCH64
211 } else if (obj.disp() != 0) {
212 __ add(store_addr, obj.base(), obj.disp());
213 }
214
215 __ g1_write_barrier_pre(store_addr, new_val, tmp1, tmp2, tmp3);
216 if (is_null) {
217 __ store_heap_oop_null(new_val, Address(store_addr));
218 } else {
219 // G1 barrier needs uncompressed oop for region cross check.
220 Register val_to_store = new_val;
221 if (UseCompressedOops) {
222 val_to_store = tmp1;
223 __ mov(val_to_store, new_val);
224 }
225 __ store_heap_oop(val_to_store, Address(store_addr)); // blows val_to_store:
226 val_to_store = noreg;
227 __ g1_write_barrier_post(store_addr, new_val, tmp1, tmp2, tmp3);
228 }
229 }
230 break;
231 #endif // INCLUDE_ALL_GCS
232 case BarrierSet::CardTableBarrierSet:
233 {
234 if (is_null) {
235 __ store_heap_oop_null(new_val, obj);
236 } else {
237 assert (!precise || (obj.index() == noreg && obj.disp() == 0),
238 "store check address should be calculated beforehand");
239
240 __ store_check_part1(tmp1);
241 __ store_heap_oop(new_val, obj); // blows new_val:
242 new_val = noreg;
243 __ store_check_part2(obj.base(), tmp1, tmp2);
244 }
245 }
246 break;
247 case BarrierSet::ModRef:
248 ShouldNotReachHere();
249 break;
250 default:
251 ShouldNotReachHere();
252 break;
253 }
254 }
255
256 Address TemplateTable::at_bcp(int offset) {
257 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
258 return Address(Rbcp, offset);
259 }
260
261
262 // Blows volatile registers (R0-R3 on 32-bit ARM, R0-R18 on AArch64), Rtemp, LR.
263 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
264 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
265 int byte_no) {
266 assert_different_registers(bc_reg, temp_reg);
267 if (!RewriteBytecodes) return;
268 Label L_patch_done;
269
270 switch (bc) {
271 case Bytecodes::_fast_aputfield:
272 case Bytecodes::_fast_bputfield:
273 case Bytecodes::_fast_zputfield:
846 const Register Rindex = R0_tos;
847
848 index_check(Rarray, Rindex);
849
850 #ifdef __SOFTFP__
851 __ add(Rtemp, Rarray, AsmOperand(Rindex, lsl, LogBytesPerLong));
852 __ add(Rtemp, Rtemp, arrayOopDesc::base_offset_in_bytes(T_DOUBLE));
853 __ ldmia(Rtemp, RegisterSet(R0_tos_lo, R1_tos_hi));
854 #else
855 __ ldr_double(D0_tos, get_array_elem_addr(T_DOUBLE, Rarray, Rindex, Rtemp));
856 #endif // __SOFTFP__
857 }
858
859
860 void TemplateTable::aaload() {
861 transition(itos, atos);
862 const Register Rarray = R1_tmp;
863 const Register Rindex = R0_tos;
864
865 index_check(Rarray, Rindex);
866 __ load_heap_oop(R0_tos, get_array_elem_addr(T_OBJECT, Rarray, Rindex, Rtemp));
867 }
868
869
870 void TemplateTable::baload() {
871 transition(itos, itos);
872 const Register Rarray = R1_tmp;
873 const Register Rindex = R0_tos;
874
875 index_check(Rarray, Rindex);
876 __ ldrsb(R0_tos, get_array_elem_addr(T_BYTE, Rarray, Rindex, Rtemp));
877 }
878
879
880 void TemplateTable::caload() {
881 transition(itos, itos);
882 const Register Rarray = R1_tmp;
883 const Register Rindex = R0_tos;
884
885 index_check(Rarray, Rindex);
886 __ ldrh(R0_tos, get_array_elem_addr(T_CHAR, Rarray, Rindex, Rtemp));
1231
1232 // Compute the array base
1233 __ add(Raddr_1, Rarray_3, arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1234
1235 // do array store check - check for NULL value first
1236 __ cbz(Rvalue_2, is_null);
1237
1238 // Load subklass
1239 __ load_klass(Rsub_5, Rvalue_2);
1240 // Load superklass
1241 __ load_klass(Rtemp, Rarray_3);
1242 __ ldr(Rsuper_LR, Address(Rtemp, ObjArrayKlass::element_klass_offset()));
1243
1244 __ gen_subtype_check(Rsub_5, Rsuper_LR, throw_array_store, R0_tmp, R3_tmp);
1245 // Come here on success
1246
1247 // Store value
1248 __ add(Raddr_1, Raddr_1, AsmOperand(Rindex_4, lsl, LogBytesPerHeapOop));
1249
1250 // Now store using the appropriate barrier
1251 do_oop_store(_masm, Raddr_1, Rvalue_2, Rtemp, R0_tmp, R3_tmp, _bs->kind(), true, false);
1252 __ b(done);
1253
1254 __ bind(throw_array_store);
1255
1256 // Come here on failure of subtype check
1257 __ profile_typecheck_failed(R0_tmp);
1258
1259 // object is at TOS
1260 __ b(Interpreter::_throw_ArrayStoreException_entry);
1261
1262 // Have a NULL in Rvalue_2, store NULL at array[index].
1263 __ bind(is_null);
1264 __ profile_null_seen(R0_tmp);
1265
1266 // Store a NULL
1267 do_oop_store(_masm, Address::indexed_oop(Raddr_1, Rindex_4), Rvalue_2, Rtemp, R0_tmp, R3_tmp, _bs->kind(), true, true);
1268
1269 // Pop stack arguments
1270 __ bind(done);
1271 __ add(Rstack_top, Rstack_top, 3 * Interpreter::stackElementSize);
1272 }
1273
1274
1275 void TemplateTable::bastore() {
1276 transition(itos, vtos);
1277 const Register Rindex = R4_tmp; // index_check prefers index in R4
1278 const Register Rarray = R3_tmp;
1279 // R0_tos: value
1280
1281 __ pop_i(Rindex);
1282 index_check(Rarray, Rindex);
1283
1284 // Need to check whether array is boolean or byte
1285 // since both types share the bastore bytecode.
1286 __ load_klass(Rtemp, Rarray);
1287 __ ldr_u32(Rtemp, Address(Rtemp, Klass::layout_helper_offset()));
3269 #ifdef AARCH64
3270 __ ldr(R0_tos, Address(Robj, Roffset));
3271 #else
3272 __ add(Rtemp, Robj, Roffset);
3273 __ ldmia(Rtemp, RegisterSet(R0_tos_lo, R1_tos_hi));
3274 #endif // AARCH64
3275 __ push(ltos);
3276 if (!is_static && rc == may_rewrite) {
3277 patch_bytecode(Bytecodes::_fast_dgetfield, R0_tmp, Rtemp);
3278 }
3279 __ b(Done);
3280 }
3281
3282 // atos
3283 {
3284 assert(atos == seq++, "atos has unexpected value");
3285
3286 // atos case for AArch64 and slow version on 32-bit ARM
3287 if(!atos_merged_with_itos) {
3288 __ bind(Latos);
3289 __ load_heap_oop(R0_tos, Address(Robj, Roffset));
3290 __ push(atos);
3291 // Rewrite bytecode to be faster
3292 if (!is_static && rc == may_rewrite) {
3293 patch_bytecode(Bytecodes::_fast_agetfield, R0_tmp, Rtemp);
3294 }
3295 __ b(Done);
3296 }
3297 }
3298
3299 assert(vtos == seq++, "vtos has unexpected value");
3300
3301 __ bind(shouldNotReachHere);
3302 __ should_not_reach_here();
3303
3304 // itos and atos cases are frequent so it makes sense to move them out of table switch
3305 // atos case can be merged with itos case (and thus moved out of table switch) on 32-bit ARM, fast version only
3306
3307 __ bind(Lint);
3308 __ ldr_s32(R0_tos, Address(Robj, Roffset));
3309 __ push(itos);
3621 if (!is_static) pop_and_check_object(Robj);
3622 #ifdef AARCH64
3623 __ str(R0_tos, Address(Robj, Roffset));
3624 #else
3625 __ add(Rtemp, Robj, Roffset);
3626 __ stmia(Rtemp, RegisterSet(R0_tos_lo, R1_tos_hi));
3627 #endif // AARCH64
3628 if (!is_static && rc == may_rewrite) {
3629 patch_bytecode(Bytecodes::_fast_dputfield, R0_tmp, Rtemp, true, byte_no);
3630 }
3631 __ b(Done);
3632 }
3633
3634 // atos
3635 {
3636 assert(atos == seq++, "dtos has unexpected value");
3637 __ bind(Latos);
3638 __ pop(atos);
3639 if (!is_static) pop_and_check_object(Robj);
3640 // Store into the field
3641 do_oop_store(_masm, Address(Robj, Roffset), R0_tos, Rtemp, R1_tmp, R5_tmp, _bs->kind(), false, false);
3642 if (!is_static && rc == may_rewrite) {
3643 patch_bytecode(Bytecodes::_fast_aputfield, R0_tmp, Rtemp, true, byte_no);
3644 }
3645 __ b(Done);
3646 }
3647
3648 __ bind(shouldNotReachHere);
3649 __ should_not_reach_here();
3650
3651 // itos case is frequent and is moved outside table switch
3652 __ bind(Lint);
3653 __ pop(itos);
3654 if (!is_static) pop_and_check_object(Robj);
3655 __ str_32(R0_tos, Address(Robj, Roffset));
3656 if (!is_static && rc == may_rewrite) {
3657 patch_bytecode(Bytecodes::_fast_iputfield, R0_tmp, Rtemp, true, byte_no);
3658 }
3659
3660 __ bind(Done);
3661
3799 case Bytecodes::_fast_lputfield: __ str (R0_tos, Address(Robj, Roffset)); break;
3800 case Bytecodes::_fast_fputfield: __ str_s(S0_tos, Address(Robj, Roffset)); break;
3801 case Bytecodes::_fast_dputfield: __ str_d(D0_tos, Address(Robj, Roffset)); break;
3802 #else
3803 case Bytecodes::_fast_lputfield: __ add(Robj, Robj, Roffset);
3804 __ stmia(Robj, RegisterSet(R0_tos_lo, R1_tos_hi)); break;
3805
3806 #ifdef __SOFTFP__
3807 case Bytecodes::_fast_fputfield: __ str(R0_tos, Address(Robj, Roffset)); break;
3808 case Bytecodes::_fast_dputfield: __ add(Robj, Robj, Roffset);
3809 __ stmia(Robj, RegisterSet(R0_tos_lo, R1_tos_hi)); break;
3810 #else
3811 case Bytecodes::_fast_fputfield: __ add(Robj, Robj, Roffset);
3812 __ fsts(S0_tos, Address(Robj)); break;
3813 case Bytecodes::_fast_dputfield: __ add(Robj, Robj, Roffset);
3814 __ fstd(D0_tos, Address(Robj)); break;
3815 #endif // __SOFTFP__
3816 #endif // AARCH64
3817
3818 case Bytecodes::_fast_aputfield:
3819 do_oop_store(_masm, Address(Robj, Roffset), R0_tos, Rtemp, R1_tmp, R2_tmp, _bs->kind(), false, false);
3820 break;
3821
3822 default:
3823 ShouldNotReachHere();
3824 }
3825
3826 if (gen_volatile_check) {
3827 Label notVolatile;
3828 Label skipMembar;
3829 __ tst(Rflags, 1 << ConstantPoolCacheEntry::is_volatile_shift |
3830 1 << ConstantPoolCacheEntry::is_final_shift);
3831 __ b(skipMembar, eq);
3832
3833 __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3834
3835 // StoreLoad barrier after volatile field write
3836 volatile_barrier(MacroAssembler::StoreLoad, Rtemp);
3837 __ b(skipMembar);
3838
3839 // StoreStore barrier after final field write
3895 case Bytecodes::_fast_bgetfield: __ ldrsb(R0_tos, Address(Robj, Roffset)); break;
3896 case Bytecodes::_fast_sgetfield: __ ldrsh(R0_tos, Address(Robj, Roffset)); break;
3897 case Bytecodes::_fast_cgetfield: __ ldrh (R0_tos, Address(Robj, Roffset)); break;
3898 case Bytecodes::_fast_igetfield: __ ldr_s32(R0_tos, Address(Robj, Roffset)); break;
3899 #ifdef AARCH64
3900 case Bytecodes::_fast_lgetfield: __ ldr (R0_tos, Address(Robj, Roffset)); break;
3901 case Bytecodes::_fast_fgetfield: __ ldr_s(S0_tos, Address(Robj, Roffset)); break;
3902 case Bytecodes::_fast_dgetfield: __ ldr_d(D0_tos, Address(Robj, Roffset)); break;
3903 #else
3904 case Bytecodes::_fast_lgetfield: __ add(Roffset, Robj, Roffset);
3905 __ ldmia(Roffset, RegisterSet(R0_tos_lo, R1_tos_hi)); break;
3906 #ifdef __SOFTFP__
3907 case Bytecodes::_fast_fgetfield: __ ldr (R0_tos, Address(Robj, Roffset)); break;
3908 case Bytecodes::_fast_dgetfield: __ add(Roffset, Robj, Roffset);
3909 __ ldmia(Roffset, RegisterSet(R0_tos_lo, R1_tos_hi)); break;
3910 #else
3911 case Bytecodes::_fast_fgetfield: __ add(Roffset, Robj, Roffset); __ flds(S0_tos, Address(Roffset)); break;
3912 case Bytecodes::_fast_dgetfield: __ add(Roffset, Robj, Roffset); __ fldd(D0_tos, Address(Roffset)); break;
3913 #endif // __SOFTFP__
3914 #endif // AARCH64
3915 case Bytecodes::_fast_agetfield: __ load_heap_oop(R0_tos, Address(Robj, Roffset)); __ verify_oop(R0_tos); break;
3916 default:
3917 ShouldNotReachHere();
3918 }
3919
3920 if (gen_volatile_check) {
3921 // Check for volatile load
3922 Label notVolatile;
3923 __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3924
3925 // TODO-AARCH64 on AArch64, load-acquire instructions can be used to get rid of this explict barrier
3926 volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
3927
3928 __ bind(notVolatile);
3929 }
3930 }
3931
3932
3933 void TemplateTable::fast_xaccess(TosState state) {
3934 transition(vtos, state);
3935
3975 __ decode_heap_oop(R0_tos);
3976 } else {
3977 __ ldar(R0_tos, Rtemp);
3978 }
3979 __ verify_oop(R0_tos);
3980 } else if (state == ftos) {
3981 __ ldar_w(R0_tos, Rtemp);
3982 __ fmov_sw(S0_tos, R0_tos);
3983 } else {
3984 ShouldNotReachHere();
3985 }
3986 __ b(done);
3987
3988 __ bind(notVolatile);
3989 }
3990 #endif // AARCH64
3991
3992 if (state == itos) {
3993 __ ldr_s32(R0_tos, Address(Robj, Roffset));
3994 } else if (state == atos) {
3995 __ load_heap_oop(R0_tos, Address(Robj, Roffset));
3996 __ verify_oop(R0_tos);
3997 } else if (state == ftos) {
3998 #ifdef AARCH64
3999 __ ldr_s(S0_tos, Address(Robj, Roffset));
4000 #else
4001 #ifdef __SOFTFP__
4002 __ ldr(R0_tos, Address(Robj, Roffset));
4003 #else
4004 __ add(Roffset, Robj, Roffset);
4005 __ flds(S0_tos, Address(Roffset));
4006 #endif // __SOFTFP__
4007 #endif // AARCH64
4008 } else {
4009 ShouldNotReachHere();
4010 }
4011
4012 #ifndef AARCH64
4013 if (gen_volatile_check) {
4014 // Check for volatile load
4015 Label notVolatile;
|
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "asm/macroAssembler.hpp"
27 #include "gc/shared/barrierSetAssembler.hpp"
28 #include "interpreter/interp_masm.hpp"
29 #include "interpreter/interpreter.hpp"
30 #include "interpreter/interpreterRuntime.hpp"
31 #include "interpreter/templateTable.hpp"
32 #include "memory/universe.hpp"
33 #include "oops/cpCache.hpp"
34 #include "oops/methodData.hpp"
35 #include "oops/objArrayKlass.hpp"
36 #include "oops/oop.inline.hpp"
37 #include "prims/methodHandles.hpp"
38 #include "runtime/frame.inline.hpp"
39 #include "runtime/sharedRuntime.hpp"
40 #include "runtime/stubRoutines.hpp"
41 #include "runtime/synchronizer.hpp"
42
43 #define __ _masm->
44
45 //----------------------------------------------------------------------------------------------------
46 // Platform-dependent initialization
47
171 case TemplateTable::less_equal : return gt;
172 case TemplateTable::greater : return le;
173 case TemplateTable::greater_equal: return lt;
174 }
175 ShouldNotReachHere();
176 return nv;
177 }
178
179 //----------------------------------------------------------------------------------------------------
180 // Miscelaneous helper routines
181
182 // Store an oop (or NULL) at the address described by obj.
183 // Blows all volatile registers (R0-R3 on 32-bit ARM, R0-R18 on AArch64, Rtemp, LR).
184 // Also destroys new_val and obj.base().
185 static void do_oop_store(InterpreterMacroAssembler* _masm,
186 Address obj,
187 Register new_val,
188 Register tmp1,
189 Register tmp2,
190 Register tmp3,
191 bool is_null,
192 DecoratorSet decorators = 0) {
193
194 assert_different_registers(obj.base(), new_val, tmp1, tmp2, tmp3, noreg);
195 if (is_null) {
196 __ store_heap_oop_null(obj, new_val, tmp1, tmp2, tmp3, decorators);
197 } else {
198 __ store_heap_oop(obj, new_val, tmp1, tmp2, tmp3, decorators);
199 }
200 }
201
202 static void do_oop_load(InterpreterMacroAssembler* _masm,
203 Register dst,
204 Address obj,
205 DecoratorSet decorators = 0) {
206 __ load_heap_oop(dst, obj, noreg, noreg, noreg, decorators);
207 }
208
209 Address TemplateTable::at_bcp(int offset) {
210 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
211 return Address(Rbcp, offset);
212 }
213
214
215 // Blows volatile registers (R0-R3 on 32-bit ARM, R0-R18 on AArch64), Rtemp, LR.
216 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
217 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
218 int byte_no) {
219 assert_different_registers(bc_reg, temp_reg);
220 if (!RewriteBytecodes) return;
221 Label L_patch_done;
222
223 switch (bc) {
224 case Bytecodes::_fast_aputfield:
225 case Bytecodes::_fast_bputfield:
226 case Bytecodes::_fast_zputfield:
799 const Register Rindex = R0_tos;
800
801 index_check(Rarray, Rindex);
802
803 #ifdef __SOFTFP__
804 __ add(Rtemp, Rarray, AsmOperand(Rindex, lsl, LogBytesPerLong));
805 __ add(Rtemp, Rtemp, arrayOopDesc::base_offset_in_bytes(T_DOUBLE));
806 __ ldmia(Rtemp, RegisterSet(R0_tos_lo, R1_tos_hi));
807 #else
808 __ ldr_double(D0_tos, get_array_elem_addr(T_DOUBLE, Rarray, Rindex, Rtemp));
809 #endif // __SOFTFP__
810 }
811
812
813 void TemplateTable::aaload() {
814 transition(itos, atos);
815 const Register Rarray = R1_tmp;
816 const Register Rindex = R0_tos;
817
818 index_check(Rarray, Rindex);
819 do_oop_load(_masm, R0_tos, get_array_elem_addr(T_OBJECT, Rarray, Rindex, Rtemp), IN_HEAP_ARRAY);
820 }
821
822
823 void TemplateTable::baload() {
824 transition(itos, itos);
825 const Register Rarray = R1_tmp;
826 const Register Rindex = R0_tos;
827
828 index_check(Rarray, Rindex);
829 __ ldrsb(R0_tos, get_array_elem_addr(T_BYTE, Rarray, Rindex, Rtemp));
830 }
831
832
833 void TemplateTable::caload() {
834 transition(itos, itos);
835 const Register Rarray = R1_tmp;
836 const Register Rindex = R0_tos;
837
838 index_check(Rarray, Rindex);
839 __ ldrh(R0_tos, get_array_elem_addr(T_CHAR, Rarray, Rindex, Rtemp));
1184
1185 // Compute the array base
1186 __ add(Raddr_1, Rarray_3, arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1187
1188 // do array store check - check for NULL value first
1189 __ cbz(Rvalue_2, is_null);
1190
1191 // Load subklass
1192 __ load_klass(Rsub_5, Rvalue_2);
1193 // Load superklass
1194 __ load_klass(Rtemp, Rarray_3);
1195 __ ldr(Rsuper_LR, Address(Rtemp, ObjArrayKlass::element_klass_offset()));
1196
1197 __ gen_subtype_check(Rsub_5, Rsuper_LR, throw_array_store, R0_tmp, R3_tmp);
1198 // Come here on success
1199
1200 // Store value
1201 __ add(Raddr_1, Raddr_1, AsmOperand(Rindex_4, lsl, LogBytesPerHeapOop));
1202
1203 // Now store using the appropriate barrier
1204 do_oop_store(_masm, Raddr_1, Rvalue_2, Rtemp, R0_tmp, R3_tmp, false, IN_HEAP_ARRAY);
1205 __ b(done);
1206
1207 __ bind(throw_array_store);
1208
1209 // Come here on failure of subtype check
1210 __ profile_typecheck_failed(R0_tmp);
1211
1212 // object is at TOS
1213 __ b(Interpreter::_throw_ArrayStoreException_entry);
1214
1215 // Have a NULL in Rvalue_2, store NULL at array[index].
1216 __ bind(is_null);
1217 __ profile_null_seen(R0_tmp);
1218
1219 // Store a NULL
1220 do_oop_store(_masm, Address::indexed_oop(Raddr_1, Rindex_4), Rvalue_2, Rtemp, R0_tmp, R3_tmp, true, IN_HEAP_ARRAY);
1221
1222 // Pop stack arguments
1223 __ bind(done);
1224 __ add(Rstack_top, Rstack_top, 3 * Interpreter::stackElementSize);
1225 }
1226
1227
1228 void TemplateTable::bastore() {
1229 transition(itos, vtos);
1230 const Register Rindex = R4_tmp; // index_check prefers index in R4
1231 const Register Rarray = R3_tmp;
1232 // R0_tos: value
1233
1234 __ pop_i(Rindex);
1235 index_check(Rarray, Rindex);
1236
1237 // Need to check whether array is boolean or byte
1238 // since both types share the bastore bytecode.
1239 __ load_klass(Rtemp, Rarray);
1240 __ ldr_u32(Rtemp, Address(Rtemp, Klass::layout_helper_offset()));
3222 #ifdef AARCH64
3223 __ ldr(R0_tos, Address(Robj, Roffset));
3224 #else
3225 __ add(Rtemp, Robj, Roffset);
3226 __ ldmia(Rtemp, RegisterSet(R0_tos_lo, R1_tos_hi));
3227 #endif // AARCH64
3228 __ push(ltos);
3229 if (!is_static && rc == may_rewrite) {
3230 patch_bytecode(Bytecodes::_fast_dgetfield, R0_tmp, Rtemp);
3231 }
3232 __ b(Done);
3233 }
3234
3235 // atos
3236 {
3237 assert(atos == seq++, "atos has unexpected value");
3238
3239 // atos case for AArch64 and slow version on 32-bit ARM
3240 if(!atos_merged_with_itos) {
3241 __ bind(Latos);
3242 do_oop_load(_masm, R0_tos, Address(Robj, Roffset));
3243 __ push(atos);
3244 // Rewrite bytecode to be faster
3245 if (!is_static && rc == may_rewrite) {
3246 patch_bytecode(Bytecodes::_fast_agetfield, R0_tmp, Rtemp);
3247 }
3248 __ b(Done);
3249 }
3250 }
3251
3252 assert(vtos == seq++, "vtos has unexpected value");
3253
3254 __ bind(shouldNotReachHere);
3255 __ should_not_reach_here();
3256
3257 // itos and atos cases are frequent so it makes sense to move them out of table switch
3258 // atos case can be merged with itos case (and thus moved out of table switch) on 32-bit ARM, fast version only
3259
3260 __ bind(Lint);
3261 __ ldr_s32(R0_tos, Address(Robj, Roffset));
3262 __ push(itos);
3574 if (!is_static) pop_and_check_object(Robj);
3575 #ifdef AARCH64
3576 __ str(R0_tos, Address(Robj, Roffset));
3577 #else
3578 __ add(Rtemp, Robj, Roffset);
3579 __ stmia(Rtemp, RegisterSet(R0_tos_lo, R1_tos_hi));
3580 #endif // AARCH64
3581 if (!is_static && rc == may_rewrite) {
3582 patch_bytecode(Bytecodes::_fast_dputfield, R0_tmp, Rtemp, true, byte_no);
3583 }
3584 __ b(Done);
3585 }
3586
3587 // atos
3588 {
3589 assert(atos == seq++, "dtos has unexpected value");
3590 __ bind(Latos);
3591 __ pop(atos);
3592 if (!is_static) pop_and_check_object(Robj);
3593 // Store into the field
3594 do_oop_store(_masm, Address(Robj, Roffset), R0_tos, Rtemp, R1_tmp, R5_tmp, false);
3595 if (!is_static && rc == may_rewrite) {
3596 patch_bytecode(Bytecodes::_fast_aputfield, R0_tmp, Rtemp, true, byte_no);
3597 }
3598 __ b(Done);
3599 }
3600
3601 __ bind(shouldNotReachHere);
3602 __ should_not_reach_here();
3603
3604 // itos case is frequent and is moved outside table switch
3605 __ bind(Lint);
3606 __ pop(itos);
3607 if (!is_static) pop_and_check_object(Robj);
3608 __ str_32(R0_tos, Address(Robj, Roffset));
3609 if (!is_static && rc == may_rewrite) {
3610 patch_bytecode(Bytecodes::_fast_iputfield, R0_tmp, Rtemp, true, byte_no);
3611 }
3612
3613 __ bind(Done);
3614
3752 case Bytecodes::_fast_lputfield: __ str (R0_tos, Address(Robj, Roffset)); break;
3753 case Bytecodes::_fast_fputfield: __ str_s(S0_tos, Address(Robj, Roffset)); break;
3754 case Bytecodes::_fast_dputfield: __ str_d(D0_tos, Address(Robj, Roffset)); break;
3755 #else
3756 case Bytecodes::_fast_lputfield: __ add(Robj, Robj, Roffset);
3757 __ stmia(Robj, RegisterSet(R0_tos_lo, R1_tos_hi)); break;
3758
3759 #ifdef __SOFTFP__
3760 case Bytecodes::_fast_fputfield: __ str(R0_tos, Address(Robj, Roffset)); break;
3761 case Bytecodes::_fast_dputfield: __ add(Robj, Robj, Roffset);
3762 __ stmia(Robj, RegisterSet(R0_tos_lo, R1_tos_hi)); break;
3763 #else
3764 case Bytecodes::_fast_fputfield: __ add(Robj, Robj, Roffset);
3765 __ fsts(S0_tos, Address(Robj)); break;
3766 case Bytecodes::_fast_dputfield: __ add(Robj, Robj, Roffset);
3767 __ fstd(D0_tos, Address(Robj)); break;
3768 #endif // __SOFTFP__
3769 #endif // AARCH64
3770
3771 case Bytecodes::_fast_aputfield:
3772 do_oop_store(_masm, Address(Robj, Roffset), R0_tos, Rtemp, R1_tmp, R2_tmp, false);
3773 break;
3774
3775 default:
3776 ShouldNotReachHere();
3777 }
3778
3779 if (gen_volatile_check) {
3780 Label notVolatile;
3781 Label skipMembar;
3782 __ tst(Rflags, 1 << ConstantPoolCacheEntry::is_volatile_shift |
3783 1 << ConstantPoolCacheEntry::is_final_shift);
3784 __ b(skipMembar, eq);
3785
3786 __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3787
3788 // StoreLoad barrier after volatile field write
3789 volatile_barrier(MacroAssembler::StoreLoad, Rtemp);
3790 __ b(skipMembar);
3791
3792 // StoreStore barrier after final field write
3848 case Bytecodes::_fast_bgetfield: __ ldrsb(R0_tos, Address(Robj, Roffset)); break;
3849 case Bytecodes::_fast_sgetfield: __ ldrsh(R0_tos, Address(Robj, Roffset)); break;
3850 case Bytecodes::_fast_cgetfield: __ ldrh (R0_tos, Address(Robj, Roffset)); break;
3851 case Bytecodes::_fast_igetfield: __ ldr_s32(R0_tos, Address(Robj, Roffset)); break;
3852 #ifdef AARCH64
3853 case Bytecodes::_fast_lgetfield: __ ldr (R0_tos, Address(Robj, Roffset)); break;
3854 case Bytecodes::_fast_fgetfield: __ ldr_s(S0_tos, Address(Robj, Roffset)); break;
3855 case Bytecodes::_fast_dgetfield: __ ldr_d(D0_tos, Address(Robj, Roffset)); break;
3856 #else
3857 case Bytecodes::_fast_lgetfield: __ add(Roffset, Robj, Roffset);
3858 __ ldmia(Roffset, RegisterSet(R0_tos_lo, R1_tos_hi)); break;
3859 #ifdef __SOFTFP__
3860 case Bytecodes::_fast_fgetfield: __ ldr (R0_tos, Address(Robj, Roffset)); break;
3861 case Bytecodes::_fast_dgetfield: __ add(Roffset, Robj, Roffset);
3862 __ ldmia(Roffset, RegisterSet(R0_tos_lo, R1_tos_hi)); break;
3863 #else
3864 case Bytecodes::_fast_fgetfield: __ add(Roffset, Robj, Roffset); __ flds(S0_tos, Address(Roffset)); break;
3865 case Bytecodes::_fast_dgetfield: __ add(Roffset, Robj, Roffset); __ fldd(D0_tos, Address(Roffset)); break;
3866 #endif // __SOFTFP__
3867 #endif // AARCH64
3868 case Bytecodes::_fast_agetfield: do_oop_load(_masm, R0_tos, Address(Robj, Roffset)); __ verify_oop(R0_tos); break;
3869 default:
3870 ShouldNotReachHere();
3871 }
3872
3873 if (gen_volatile_check) {
3874 // Check for volatile load
3875 Label notVolatile;
3876 __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3877
3878 // TODO-AARCH64 on AArch64, load-acquire instructions can be used to get rid of this explict barrier
3879 volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
3880
3881 __ bind(notVolatile);
3882 }
3883 }
3884
3885
3886 void TemplateTable::fast_xaccess(TosState state) {
3887 transition(vtos, state);
3888
3928 __ decode_heap_oop(R0_tos);
3929 } else {
3930 __ ldar(R0_tos, Rtemp);
3931 }
3932 __ verify_oop(R0_tos);
3933 } else if (state == ftos) {
3934 __ ldar_w(R0_tos, Rtemp);
3935 __ fmov_sw(S0_tos, R0_tos);
3936 } else {
3937 ShouldNotReachHere();
3938 }
3939 __ b(done);
3940
3941 __ bind(notVolatile);
3942 }
3943 #endif // AARCH64
3944
3945 if (state == itos) {
3946 __ ldr_s32(R0_tos, Address(Robj, Roffset));
3947 } else if (state == atos) {
3948 do_oop_load(_masm, R0_tos, Address(Robj, Roffset));
3949 __ verify_oop(R0_tos);
3950 } else if (state == ftos) {
3951 #ifdef AARCH64
3952 __ ldr_s(S0_tos, Address(Robj, Roffset));
3953 #else
3954 #ifdef __SOFTFP__
3955 __ ldr(R0_tos, Address(Robj, Roffset));
3956 #else
3957 __ add(Roffset, Robj, Roffset);
3958 __ flds(S0_tos, Address(Roffset));
3959 #endif // __SOFTFP__
3960 #endif // AARCH64
3961 } else {
3962 ShouldNotReachHere();
3963 }
3964
3965 #ifndef AARCH64
3966 if (gen_volatile_check) {
3967 // Check for volatile load
3968 Label notVolatile;
|