--- old/src/hotspot/cpu/aarch64/gc/shared/barrierSetAssembler_aarch64.cpp 2018-05-14 16:13:11.128168518 -0400 +++ new/src/hotspot/cpu/aarch64/gc/shared/barrierSetAssembler_aarch64.cpp 2018-05-14 16:13:10.838190900 -0400 @@ -56,6 +56,15 @@ } break; } + case T_BOOLEAN: __ load_unsigned_byte (dst, src); break; + case T_BYTE: __ load_signed_byte (dst, src); break; + case T_CHAR: __ load_unsigned_short(dst, src); break; + case T_SHORT: __ load_signed_short (dst, src); break; + case T_INT: __ ldrw (dst, src); break; + case T_LONG: __ ldr (dst, src); break; + case T_ADDRESS: __ ldr (dst, src); break; + case T_FLOAT: __ ldrs (v0, src); break; + case T_DOUBLE: __ ldrd (v0, src); break; default: Unimplemented(); } } @@ -84,6 +93,18 @@ } break; } + case T_BOOLEAN: + __ andw(val, val, 0x1); // boolean is true if LSB is 1 + __ strb(val, dst); + break; + case T_BYTE: __ strb(val, dst); break; + case T_CHAR: __ strh(val, dst); break; + case T_SHORT: __ strh(val, dst); break; + case T_INT: __ strw(val, dst); break; + case T_LONG: __ str (val, dst); break; + case T_ADDRESS: __ str (val, dst); break; + case T_FLOAT: __ strs(v0, dst); break; + case T_DOUBLE: __ strd(v0, dst); break; default: Unimplemented(); } } --- old/src/hotspot/cpu/aarch64/interp_masm_aarch64.cpp 2018-05-14 16:13:12.148089794 -0400 +++ new/src/hotspot/cpu/aarch64/interp_masm_aarch64.cpp 2018-05-14 16:13:11.868111404 -0400 @@ -267,9 +267,6 @@ void InterpreterMacroAssembler::load_resolved_reference_at_index( Register result, Register index, Register tmp) { assert_different_registers(result, index); - // convert from field index to resolved_references() index and from - // word index to byte offset. Since this is a java object, it can be compressed - lslw(index, index, LogBytesPerHeapOop); get_constant_pool(result); // load pointer for resolved_references[] objArray @@ -277,8 +274,8 @@ ldr(result, Address(result, ConstantPoolCache::resolved_references_offset_in_bytes())); resolve_oop_handle(result, tmp); // Add in the index - add(result, result, index); - load_heap_oop(result, Address(result, arrayOopDesc::base_offset_in_bytes(T_OBJECT))); + add(index, index, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop); + load_heap_oop(result, Address(result, index, Address::uxtw(LogBytesPerHeapOop))); } void InterpreterMacroAssembler::load_resolved_klass_at_offset( --- old/src/hotspot/cpu/aarch64/jniFastGetField_aarch64.cpp 2018-05-14 16:13:13.168011070 -0400 +++ new/src/hotspot/cpu/aarch64/jniFastGetField_aarch64.cpp 2018-05-14 16:13:12.888032681 -0400 @@ -91,30 +91,23 @@ assert(count < LIST_CAPACITY, "LIST_CAPACITY too small"); speculative_load_pclist[count] = __ pc(); // Used by the segfault handler + __ access_load_at(type, IN_HEAP, noreg /* tos: r0/v0 */, Address(robj, roffset), noreg, noreg); + + // Ensure that field has finished loading before loading the safepoint_counter. switch (type) { - case T_BOOLEAN: __ ldrb (result, Address(robj, roffset)); break; - case T_BYTE: __ ldrsb (result, Address(robj, roffset)); break; - case T_CHAR: __ ldrh (result, Address(robj, roffset)); break; - case T_SHORT: __ ldrsh (result, Address(robj, roffset)); break; - case T_FLOAT: __ ldrw (result, Address(robj, roffset)); break; - case T_INT: __ ldrsw (result, Address(robj, roffset)); break; + case T_FLOAT: case T_DOUBLE: - case T_LONG: __ ldr (result, Address(robj, roffset)); break; - default: ShouldNotReachHere(); + __ membar(MacroAssembler::LoadLoad); + break; + default: + // counter_addr is address dependent on result. + __ eor(rcounter_addr, rcounter_addr, r0); + __ eor(rcounter_addr, rcounter_addr, r0); } - // counter_addr is address dependent on result. - __ eor(rcounter_addr, rcounter_addr, result); - __ eor(rcounter_addr, rcounter_addr, result); __ ldrw(rscratch1, safepoint_counter_addr); __ cmpw(rcounter, rscratch1); __ br (Assembler::NE, slow); - - switch (type) { - case T_FLOAT: __ fmovs(v0, result); break; - case T_DOUBLE: __ fmovd(v0, result); break; - default: __ mov(r0, result); break; - } __ ret(lr); slowcase_entry_pclist[count++] = __ pc(); --- old/src/hotspot/cpu/aarch64/methodHandles_aarch64.cpp 2018-05-14 16:13:14.167933890 -0400 +++ new/src/hotspot/cpu/aarch64/methodHandles_aarch64.cpp 2018-05-14 16:13:13.887955500 -0400 @@ -141,7 +141,7 @@ __ verify_oop(method_temp); __ load_heap_oop(method_temp, Address(method_temp, NONZERO(java_lang_invoke_MemberName::method_offset_in_bytes())), temp2); __ verify_oop(method_temp); - __ ldr(method_temp, Address(method_temp, NONZERO(java_lang_invoke_ResolvedMethodName::vmtarget_offset_in_bytes()))); + __ access_load_at(T_ADDRESS, IN_HEAP, method_temp, Address(method_temp, NONZERO(java_lang_invoke_ResolvedMethodName::vmtarget_offset_in_bytes())), noreg, noreg); if (VerifyMethodHandles && !for_compiler_entry) { // make sure recv is already on stack @@ -340,7 +340,7 @@ verify_ref_kind(_masm, JVM_REF_invokeSpecial, member_reg, temp3); } __ load_heap_oop(rmethod, member_vmtarget); - __ ldr(rmethod, vmtarget_method); + __ access_load_at(T_ADDRESS, IN_HEAP, rmethod, vmtarget_method, noreg, noreg); break; case vmIntrinsics::_linkToStatic: @@ -348,7 +348,7 @@ verify_ref_kind(_masm, JVM_REF_invokeStatic, member_reg, temp3); } __ load_heap_oop(rmethod, member_vmtarget); - __ ldr(rmethod, vmtarget_method); + __ access_load_at(T_ADDRESS, IN_HEAP, rmethod, vmtarget_method, noreg, noreg); break; case vmIntrinsics::_linkToVirtual: @@ -362,7 +362,7 @@ // pick out the vtable index from the MemberName, and then we can discard it: Register temp2_index = temp2; - __ ldr(temp2_index, member_vmindex); + __ access_load_at(T_ADDRESS, IN_HEAP, temp2_index, member_vmindex, noreg, noreg); if (VerifyMethodHandles) { Label L_index_ok; @@ -394,7 +394,7 @@ __ verify_klass_ptr(temp3_intf); Register rindex = rmethod; - __ ldr(rindex, member_vmindex); + __ access_load_at(T_ADDRESS, IN_HEAP, rindex, member_vmindex, noreg, noreg); if (VerifyMethodHandles) { Label L; __ cmpw(rindex, 0U); --- old/src/hotspot/cpu/aarch64/templateTable_aarch64.cpp 2018-05-14 16:13:15.167856710 -0400 +++ new/src/hotspot/cpu/aarch64/templateTable_aarch64.cpp 2018-05-14 16:13:14.877879092 -0400 @@ -760,8 +760,8 @@ // r0: array // r1: index index_check(r0, r1); // leaves index in r1, kills rscratch1 - __ lea(r1, Address(r0, r1, Address::uxtw(2))); - __ ldrw(r0, Address(r1, arrayOopDesc::base_offset_in_bytes(T_INT))); + __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_INT) >> 2); + __ access_load_at(T_INT, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(2)), noreg, noreg); } void TemplateTable::laload() @@ -772,8 +772,8 @@ // r0: array // r1: index index_check(r0, r1); // leaves index in r1, kills rscratch1 - __ lea(r1, Address(r0, r1, Address::uxtw(3))); - __ ldr(r0, Address(r1, arrayOopDesc::base_offset_in_bytes(T_LONG))); + __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_LONG) >> 3); + __ access_load_at(T_LONG, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(3)), noreg, noreg); } void TemplateTable::faload() @@ -784,8 +784,8 @@ // r0: array // r1: index index_check(r0, r1); // leaves index in r1, kills rscratch1 - __ lea(r1, Address(r0, r1, Address::uxtw(2))); - __ ldrs(v0, Address(r1, arrayOopDesc::base_offset_in_bytes(T_FLOAT))); + __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_FLOAT) >> 2); + __ access_load_at(T_FLOAT, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(2)), noreg, noreg); } void TemplateTable::daload() @@ -796,8 +796,8 @@ // r0: array // r1: index index_check(r0, r1); // leaves index in r1, kills rscratch1 - __ lea(r1, Address(r0, r1, Address::uxtw(3))); - __ ldrd(v0, Address(r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE))); + __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3); + __ access_load_at(T_DOUBLE, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(3)), noreg, noreg); } void TemplateTable::aaload() @@ -808,10 +808,9 @@ // r0: array // r1: index index_check(r0, r1); // leaves index in r1, kills rscratch1 - int s = (UseCompressedOops ? 2 : 3); - __ lea(r1, Address(r0, r1, Address::uxtw(s))); + __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop); do_oop_load(_masm, - Address(r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT)), + Address(r0, r1, Address::uxtw(LogBytesPerHeapOop)), r0, IN_HEAP | IN_HEAP_ARRAY); } @@ -824,8 +823,8 @@ // r0: array // r1: index index_check(r0, r1); // leaves index in r1, kills rscratch1 - __ lea(r1, Address(r0, r1, Address::uxtw(0))); - __ load_signed_byte(r0, Address(r1, arrayOopDesc::base_offset_in_bytes(T_BYTE))); + __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_BYTE) >> 0); + __ access_load_at(T_BYTE, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(0)), noreg, noreg); } void TemplateTable::caload() @@ -836,8 +835,8 @@ // r0: array // r1: index index_check(r0, r1); // leaves index in r1, kills rscratch1 - __ lea(r1, Address(r0, r1, Address::uxtw(1))); - __ load_unsigned_short(r0, Address(r1, arrayOopDesc::base_offset_in_bytes(T_CHAR))); + __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_CHAR) >> 1); + __ access_load_at(T_CHAR, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(1)), noreg, noreg); } // iload followed by caload frequent pair @@ -853,8 +852,8 @@ // r0: array // r1: index index_check(r0, r1); // leaves index in r1, kills rscratch1 - __ lea(r1, Address(r0, r1, Address::uxtw(1))); - __ load_unsigned_short(r0, Address(r1, arrayOopDesc::base_offset_in_bytes(T_CHAR))); + __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_CHAR) >> 1); + __ access_load_at(T_CHAR, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(1)), noreg, noreg); } void TemplateTable::saload() @@ -865,8 +864,8 @@ // r0: array // r1: index index_check(r0, r1); // leaves index in r1, kills rscratch1 - __ lea(r1, Address(r0, r1, Address::uxtw(1))); - __ load_signed_short(r0, Address(r1, arrayOopDesc::base_offset_in_bytes(T_SHORT))); + __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_SHORT) >> 1); + __ access_load_at(T_SHORT, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(1)), noreg, noreg); } void TemplateTable::iload(int n) @@ -1059,9 +1058,8 @@ // r1: index // r3: array index_check(r3, r1); // prefer index in r1 - __ lea(rscratch1, Address(r3, r1, Address::uxtw(2))); - __ strw(r0, Address(rscratch1, - arrayOopDesc::base_offset_in_bytes(T_INT))); + __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_INT) >> 2); + __ access_store_at(T_INT, IN_HEAP | IN_HEAP_ARRAY, Address(r3, r1, Address::uxtw(2)), r0, noreg, noreg); } void TemplateTable::lastore() { @@ -1072,9 +1070,8 @@ // r1: index // r3: array index_check(r3, r1); // prefer index in r1 - __ lea(rscratch1, Address(r3, r1, Address::uxtw(3))); - __ str(r0, Address(rscratch1, - arrayOopDesc::base_offset_in_bytes(T_LONG))); + __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_LONG) >> 3); + __ access_store_at(T_LONG, IN_HEAP | IN_HEAP_ARRAY, Address(r3, r1, Address::uxtw(3)), r0, noreg, noreg); } void TemplateTable::fastore() { @@ -1085,9 +1082,8 @@ // r1: index // r3: array index_check(r3, r1); // prefer index in r1 - __ lea(rscratch1, Address(r3, r1, Address::uxtw(2))); - __ strs(v0, Address(rscratch1, - arrayOopDesc::base_offset_in_bytes(T_FLOAT))); + __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_FLOAT) >> 2); + __ access_store_at(T_FLOAT, IN_HEAP | IN_HEAP_ARRAY, Address(r3, r1, Address::uxtw(2)), noreg /* ftos */, noreg, noreg); } void TemplateTable::dastore() { @@ -1098,9 +1094,8 @@ // r1: index // r3: array index_check(r3, r1); // prefer index in r1 - __ lea(rscratch1, Address(r3, r1, Address::uxtw(3))); - __ strd(v0, Address(rscratch1, - arrayOopDesc::base_offset_in_bytes(T_DOUBLE))); + __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3); + __ access_store_at(T_DOUBLE, IN_HEAP | IN_HEAP_ARRAY, Address(r3, r1, Address::uxtw(3)), noreg /* dtos */, noreg, noreg); } void TemplateTable::aastore() { @@ -1111,10 +1106,10 @@ __ ldr(r2, at_tos_p1()); // index __ ldr(r3, at_tos_p2()); // array - Address element_address(r4, arrayOopDesc::base_offset_in_bytes(T_OBJECT)); + Address element_address(r3, r4, Address::uxtw(LogBytesPerHeapOop)); index_check(r3, r2); // kills r1 - __ lea(r4, Address(r3, r2, Address::uxtw(UseCompressedOops? 2 : 3))); + __ add(r4, r2, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop); // do array store check - check for NULL value first __ cbz(r0, is_null); @@ -1176,9 +1171,8 @@ __ andw(r0, r0, 1); // if it is a T_BOOLEAN array, mask the stored value to 0/1 __ bind(L_skip); - __ lea(rscratch1, Address(r3, r1, Address::uxtw(0))); - __ strb(r0, Address(rscratch1, - arrayOopDesc::base_offset_in_bytes(T_BYTE))); + __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_BYTE) >> 0); + __ access_store_at(T_BYTE, IN_HEAP | IN_HEAP_ARRAY, Address(r3, r1, Address::uxtw(0)), r0, noreg, noreg); } void TemplateTable::castore() @@ -1190,9 +1184,8 @@ // r1: index // r3: array index_check(r3, r1); // prefer index in r1 - __ lea(rscratch1, Address(r3, r1, Address::uxtw(1))); - __ strh(r0, Address(rscratch1, - arrayOopDesc::base_offset_in_bytes(T_CHAR))); + __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_CHAR) >> 1); + __ access_store_at(T_CHAR, IN_HEAP | IN_HEAP_ARRAY, Address(r3, r1, Address::uxtw(1)), r0, noreg, noreg); } void TemplateTable::sastore() @@ -2513,7 +2506,7 @@ if (is_static) rc = may_not_rewrite; // btos - __ load_signed_byte(r0, field); + __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg); __ push(btos); // Rewrite bytecode to be faster if (rc == may_rewrite) { @@ -2526,7 +2519,7 @@ __ br(Assembler::NE, notBool); // ztos (same code as btos) - __ ldrsb(r0, field); + __ access_load_at(T_BOOLEAN, IN_HEAP, r0, field, noreg, noreg); __ push(ztos); // Rewrite bytecode to be faster if (rc == may_rewrite) { @@ -2550,7 +2543,7 @@ __ cmp(flags, itos); __ br(Assembler::NE, notInt); // itos - __ ldrw(r0, field); + __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg); __ push(itos); // Rewrite bytecode to be faster if (rc == may_rewrite) { @@ -2562,7 +2555,7 @@ __ cmp(flags, ctos); __ br(Assembler::NE, notChar); // ctos - __ load_unsigned_short(r0, field); + __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg); __ push(ctos); // Rewrite bytecode to be faster if (rc == may_rewrite) { @@ -2574,7 +2567,7 @@ __ cmp(flags, stos); __ br(Assembler::NE, notShort); // stos - __ load_signed_short(r0, field); + __ access_load_at(T_SHORT, IN_HEAP, r0, field, noreg, noreg); __ push(stos); // Rewrite bytecode to be faster if (rc == may_rewrite) { @@ -2586,7 +2579,7 @@ __ cmp(flags, ltos); __ br(Assembler::NE, notLong); // ltos - __ ldr(r0, field); + __ access_load_at(T_LONG, IN_HEAP, r0, field, noreg, noreg); __ push(ltos); // Rewrite bytecode to be faster if (rc == may_rewrite) { @@ -2598,7 +2591,7 @@ __ cmp(flags, ftos); __ br(Assembler::NE, notFloat); // ftos - __ ldrs(v0, field); + __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg); __ push(ftos); // Rewrite bytecode to be faster if (rc == may_rewrite) { @@ -2612,7 +2605,7 @@ __ br(Assembler::NE, notDouble); #endif // dtos - __ ldrd(v0, field); + __ access_load_at(T_DOUBLE, IN_HEAP, noreg /* ftos */, field, noreg, noreg); __ push(dtos); // Rewrite bytecode to be faster if (rc == may_rewrite) { @@ -2750,7 +2743,7 @@ { __ pop(btos); if (!is_static) pop_and_check_object(obj); - __ strb(r0, field); + __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg); if (rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_bputfield, bc, r1, true, byte_no); } @@ -2765,8 +2758,7 @@ { __ pop(ztos); if (!is_static) pop_and_check_object(obj); - __ andw(r0, r0, 0x1); - __ strb(r0, field); + __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg); if (rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_zputfield, bc, r1, true, byte_no); } @@ -2797,7 +2789,7 @@ { __ pop(itos); if (!is_static) pop_and_check_object(obj); - __ strw(r0, field); + __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg); if (rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_iputfield, bc, r1, true, byte_no); } @@ -2812,7 +2804,7 @@ { __ pop(ctos); if (!is_static) pop_and_check_object(obj); - __ strh(r0, field); + __ access_store_at(T_CHAR, IN_HEAP, field, r0, noreg, noreg); if (rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_cputfield, bc, r1, true, byte_no); } @@ -2827,7 +2819,7 @@ { __ pop(stos); if (!is_static) pop_and_check_object(obj); - __ strh(r0, field); + __ access_store_at(T_SHORT, IN_HEAP, field, r0, noreg, noreg); if (rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_sputfield, bc, r1, true, byte_no); } @@ -2842,7 +2834,7 @@ { __ pop(ltos); if (!is_static) pop_and_check_object(obj); - __ str(r0, field); + __ access_store_at(T_LONG, IN_HEAP, field, r0, noreg, noreg); if (rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_lputfield, bc, r1, true, byte_no); } @@ -2857,7 +2849,7 @@ { __ pop(ftos); if (!is_static) pop_and_check_object(obj); - __ strs(v0, field); + __ access_store_at(T_FLOAT, IN_HEAP, field, noreg /* ftos */, noreg, noreg); if (rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_fputfield, bc, r1, true, byte_no); } @@ -2874,7 +2866,7 @@ { __ pop(dtos); if (!is_static) pop_and_check_object(obj); - __ strd(v0, field); + __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* dtos */, noreg, noreg); if (rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_dputfield, bc, r1, true, byte_no); } @@ -3005,27 +2997,28 @@ do_oop_store(_masm, field, r0, IN_HEAP); break; case Bytecodes::_fast_lputfield: - __ str(r0, field); + __ access_store_at(T_LONG, IN_HEAP, field, r0, noreg, noreg); break; case Bytecodes::_fast_iputfield: - __ strw(r0, field); + __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg); break; case Bytecodes::_fast_zputfield: - __ andw(r0, r0, 0x1); // boolean is true if LSB is 1 - // fall through to bputfield + __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg); + break; case Bytecodes::_fast_bputfield: - __ strb(r0, field); + __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg); break; case Bytecodes::_fast_sputfield: - // fall through + __ access_store_at(T_SHORT, IN_HEAP, field, r0, noreg, noreg); + break; case Bytecodes::_fast_cputfield: - __ strh(r0, field); + __ access_store_at(T_CHAR, IN_HEAP, field, r0, noreg, noreg); break; case Bytecodes::_fast_fputfield: - __ strs(v0, field); + __ access_store_at(T_FLOAT, IN_HEAP, field, noreg /* ftos */, noreg, noreg); break; case Bytecodes::_fast_dputfield: - __ strd(v0, field); + __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* dtos */, noreg, noreg); break; default: ShouldNotReachHere(); @@ -3098,25 +3091,25 @@ __ verify_oop(r0); break; case Bytecodes::_fast_lgetfield: - __ ldr(r0, field); + __ access_load_at(T_LONG, IN_HEAP, r0, field, noreg, noreg); break; case Bytecodes::_fast_igetfield: - __ ldrw(r0, field); + __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg); break; case Bytecodes::_fast_bgetfield: - __ load_signed_byte(r0, field); + __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg); break; case Bytecodes::_fast_sgetfield: - __ load_signed_short(r0, field); + __ access_load_at(T_SHORT, IN_HEAP, r0, field, noreg, noreg); break; case Bytecodes::_fast_cgetfield: - __ load_unsigned_short(r0, field); + __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg); break; case Bytecodes::_fast_fgetfield: - __ ldrs(v0, field); + __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg); break; case Bytecodes::_fast_dgetfield: - __ ldrd(v0, field); + __ access_load_at(T_DOUBLE, IN_HEAP, noreg /* dtos */, field, noreg, noreg); break; default: ShouldNotReachHere(); @@ -3161,14 +3154,14 @@ __ null_check(r0); switch (state) { case itos: - __ ldrw(r0, Address(r0, r1, Address::lsl(0))); + __ access_load_at(T_INT, IN_HEAP, r0, Address(r0, r1, Address::lsl(0)), noreg, noreg); break; case atos: do_oop_load(_masm, Address(r0, r1, Address::lsl(0)), r0, IN_HEAP); __ verify_oop(r0); break; case ftos: - __ ldrs(v0, Address(r0, r1, Address::lsl(0))); + __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, Address(r0, r1, Address::lsl(0)), noreg, noreg); break; default: ShouldNotReachHere();