< prev index next >
src/hotspot/cpu/aarch64/templateTable_aarch64.cpp
Print this page
rev 50376 : [mq]: JDK-8203172.patch
@@ -758,62 +758,61 @@
__ mov(r1, r0);
__ pop_ptr(r0);
// r0: array
// r1: index
index_check(r0, r1); // leaves index in r1, kills rscratch1
- __ lea(r1, Address(r0, r1, Address::uxtw(2)));
- __ ldrw(r0, Address(r1, arrayOopDesc::base_offset_in_bytes(T_INT)));
+ __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_INT) >> 2);
+ __ access_load_at(T_INT, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(2)), noreg, noreg);
}
void TemplateTable::laload()
{
transition(itos, ltos);
__ mov(r1, r0);
__ pop_ptr(r0);
// r0: array
// r1: index
index_check(r0, r1); // leaves index in r1, kills rscratch1
- __ lea(r1, Address(r0, r1, Address::uxtw(3)));
- __ ldr(r0, Address(r1, arrayOopDesc::base_offset_in_bytes(T_LONG)));
+ __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_LONG) >> 3);
+ __ access_load_at(T_LONG, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(3)), noreg, noreg);
}
void TemplateTable::faload()
{
transition(itos, ftos);
__ mov(r1, r0);
__ pop_ptr(r0);
// r0: array
// r1: index
index_check(r0, r1); // leaves index in r1, kills rscratch1
- __ lea(r1, Address(r0, r1, Address::uxtw(2)));
- __ ldrs(v0, Address(r1, arrayOopDesc::base_offset_in_bytes(T_FLOAT)));
+ __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_FLOAT) >> 2);
+ __ access_load_at(T_FLOAT, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(2)), noreg, noreg);
}
void TemplateTable::daload()
{
transition(itos, dtos);
__ mov(r1, r0);
__ pop_ptr(r0);
// r0: array
// r1: index
index_check(r0, r1); // leaves index in r1, kills rscratch1
- __ lea(r1, Address(r0, r1, Address::uxtw(3)));
- __ ldrd(v0, Address(r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE)));
+ __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
+ __ access_load_at(T_DOUBLE, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(3)), noreg, noreg);
}
void TemplateTable::aaload()
{
transition(itos, atos);
__ mov(r1, r0);
__ pop_ptr(r0);
// r0: array
// r1: index
index_check(r0, r1); // leaves index in r1, kills rscratch1
- int s = (UseCompressedOops ? 2 : 3);
- __ lea(r1, Address(r0, r1, Address::uxtw(s)));
+ __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
do_oop_load(_masm,
- Address(r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
+ Address(r0, r1, Address::uxtw(LogBytesPerHeapOop)),
r0,
IN_HEAP | IN_HEAP_ARRAY);
}
void TemplateTable::baload()
@@ -822,24 +821,24 @@
__ mov(r1, r0);
__ pop_ptr(r0);
// r0: array
// r1: index
index_check(r0, r1); // leaves index in r1, kills rscratch1
- __ lea(r1, Address(r0, r1, Address::uxtw(0)));
- __ load_signed_byte(r0, Address(r1, arrayOopDesc::base_offset_in_bytes(T_BYTE)));
+ __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_BYTE) >> 0);
+ __ access_load_at(T_BYTE, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(0)), noreg, noreg);
}
void TemplateTable::caload()
{
transition(itos, itos);
__ mov(r1, r0);
__ pop_ptr(r0);
// r0: array
// r1: index
index_check(r0, r1); // leaves index in r1, kills rscratch1
- __ lea(r1, Address(r0, r1, Address::uxtw(1)));
- __ load_unsigned_short(r0, Address(r1, arrayOopDesc::base_offset_in_bytes(T_CHAR)));
+ __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_CHAR) >> 1);
+ __ access_load_at(T_CHAR, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(1)), noreg, noreg);
}
// iload followed by caload frequent pair
void TemplateTable::fast_icaload()
{
@@ -851,24 +850,24 @@
__ pop_ptr(r0);
// r0: array
// r1: index
index_check(r0, r1); // leaves index in r1, kills rscratch1
- __ lea(r1, Address(r0, r1, Address::uxtw(1)));
- __ load_unsigned_short(r0, Address(r1, arrayOopDesc::base_offset_in_bytes(T_CHAR)));
+ __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_CHAR) >> 1);
+ __ access_load_at(T_CHAR, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(1)), noreg, noreg);
}
void TemplateTable::saload()
{
transition(itos, itos);
__ mov(r1, r0);
__ pop_ptr(r0);
// r0: array
// r1: index
index_check(r0, r1); // leaves index in r1, kills rscratch1
- __ lea(r1, Address(r0, r1, Address::uxtw(1)));
- __ load_signed_short(r0, Address(r1, arrayOopDesc::base_offset_in_bytes(T_SHORT)));
+ __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_SHORT) >> 1);
+ __ access_load_at(T_SHORT, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(1)), noreg, noreg);
}
void TemplateTable::iload(int n)
{
transition(vtos, itos);
@@ -1057,66 +1056,62 @@
__ pop_ptr(r3);
// r0: value
// r1: index
// r3: array
index_check(r3, r1); // prefer index in r1
- __ lea(rscratch1, Address(r3, r1, Address::uxtw(2)));
- __ strw(r0, Address(rscratch1,
- arrayOopDesc::base_offset_in_bytes(T_INT)));
+ __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_INT) >> 2);
+ __ access_store_at(T_INT, IN_HEAP | IN_HEAP_ARRAY, Address(r3, r1, Address::uxtw(2)), r0, noreg, noreg);
}
void TemplateTable::lastore() {
transition(ltos, vtos);
__ pop_i(r1);
__ pop_ptr(r3);
// r0: value
// r1: index
// r3: array
index_check(r3, r1); // prefer index in r1
- __ lea(rscratch1, Address(r3, r1, Address::uxtw(3)));
- __ str(r0, Address(rscratch1,
- arrayOopDesc::base_offset_in_bytes(T_LONG)));
+ __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_LONG) >> 3);
+ __ access_store_at(T_LONG, IN_HEAP | IN_HEAP_ARRAY, Address(r3, r1, Address::uxtw(3)), r0, noreg, noreg);
}
void TemplateTable::fastore() {
transition(ftos, vtos);
__ pop_i(r1);
__ pop_ptr(r3);
// v0: value
// r1: index
// r3: array
index_check(r3, r1); // prefer index in r1
- __ lea(rscratch1, Address(r3, r1, Address::uxtw(2)));
- __ strs(v0, Address(rscratch1,
- arrayOopDesc::base_offset_in_bytes(T_FLOAT)));
+ __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_FLOAT) >> 2);
+ __ access_store_at(T_FLOAT, IN_HEAP | IN_HEAP_ARRAY, Address(r3, r1, Address::uxtw(2)), noreg /* ftos */, noreg, noreg);
}
void TemplateTable::dastore() {
transition(dtos, vtos);
__ pop_i(r1);
__ pop_ptr(r3);
// v0: value
// r1: index
// r3: array
index_check(r3, r1); // prefer index in r1
- __ lea(rscratch1, Address(r3, r1, Address::uxtw(3)));
- __ strd(v0, Address(rscratch1,
- arrayOopDesc::base_offset_in_bytes(T_DOUBLE)));
+ __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
+ __ access_store_at(T_DOUBLE, IN_HEAP | IN_HEAP_ARRAY, Address(r3, r1, Address::uxtw(3)), noreg /* dtos */, noreg, noreg);
}
void TemplateTable::aastore() {
Label is_null, ok_is_subtype, done;
transition(vtos, vtos);
// stack: ..., array, index, value
__ ldr(r0, at_tos()); // value
__ ldr(r2, at_tos_p1()); // index
__ ldr(r3, at_tos_p2()); // array
- Address element_address(r4, arrayOopDesc::base_offset_in_bytes(T_OBJECT));
+ Address element_address(r3, r4, Address::uxtw(LogBytesPerHeapOop));
index_check(r3, r2); // kills r1
- __ lea(r4, Address(r3, r2, Address::uxtw(UseCompressedOops? 2 : 3)));
+ __ add(r4, r2, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
// do array store check - check for NULL value first
__ cbz(r0, is_null);
// Move subklass into r1
@@ -1174,13 +1169,12 @@
Label L_skip;
__ tbz(r2, diffbit_index, L_skip);
__ andw(r0, r0, 1); // if it is a T_BOOLEAN array, mask the stored value to 0/1
__ bind(L_skip);
- __ lea(rscratch1, Address(r3, r1, Address::uxtw(0)));
- __ strb(r0, Address(rscratch1,
- arrayOopDesc::base_offset_in_bytes(T_BYTE)));
+ __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_BYTE) >> 0);
+ __ access_store_at(T_BYTE, IN_HEAP | IN_HEAP_ARRAY, Address(r3, r1, Address::uxtw(0)), r0, noreg, noreg);
}
void TemplateTable::castore()
{
transition(itos, vtos);
@@ -1188,13 +1182,12 @@
__ pop_ptr(r3);
// r0: value
// r1: index
// r3: array
index_check(r3, r1); // prefer index in r1
- __ lea(rscratch1, Address(r3, r1, Address::uxtw(1)));
- __ strh(r0, Address(rscratch1,
- arrayOopDesc::base_offset_in_bytes(T_CHAR)));
+ __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_CHAR) >> 1);
+ __ access_store_at(T_CHAR, IN_HEAP | IN_HEAP_ARRAY, Address(r3, r1, Address::uxtw(1)), r0, noreg, noreg);
}
void TemplateTable::sastore()
{
castore();
@@ -2511,11 +2504,11 @@
// Don't rewrite getstatic, only getfield
if (is_static) rc = may_not_rewrite;
// btos
- __ load_signed_byte(r0, field);
+ __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg);
__ push(btos);
// Rewrite bytecode to be faster
if (rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
}
@@ -2524,11 +2517,11 @@
__ bind(notByte);
__ cmp(flags, ztos);
__ br(Assembler::NE, notBool);
// ztos (same code as btos)
- __ ldrsb(r0, field);
+ __ access_load_at(T_BOOLEAN, IN_HEAP, r0, field, noreg, noreg);
__ push(ztos);
// Rewrite bytecode to be faster
if (rc == may_rewrite) {
// use btos rewriting, no truncating to t/f bit is needed for getfield.
patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
@@ -2548,11 +2541,11 @@
__ bind(notObj);
__ cmp(flags, itos);
__ br(Assembler::NE, notInt);
// itos
- __ ldrw(r0, field);
+ __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
__ push(itos);
// Rewrite bytecode to be faster
if (rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_igetfield, bc, r1);
}
@@ -2560,11 +2553,11 @@
__ bind(notInt);
__ cmp(flags, ctos);
__ br(Assembler::NE, notChar);
// ctos
- __ load_unsigned_short(r0, field);
+ __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
__ push(ctos);
// Rewrite bytecode to be faster
if (rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_cgetfield, bc, r1);
}
@@ -2572,11 +2565,11 @@
__ bind(notChar);
__ cmp(flags, stos);
__ br(Assembler::NE, notShort);
// stos
- __ load_signed_short(r0, field);
+ __ access_load_at(T_SHORT, IN_HEAP, r0, field, noreg, noreg);
__ push(stos);
// Rewrite bytecode to be faster
if (rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_sgetfield, bc, r1);
}
@@ -2584,11 +2577,11 @@
__ bind(notShort);
__ cmp(flags, ltos);
__ br(Assembler::NE, notLong);
// ltos
- __ ldr(r0, field);
+ __ access_load_at(T_LONG, IN_HEAP, r0, field, noreg, noreg);
__ push(ltos);
// Rewrite bytecode to be faster
if (rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_lgetfield, bc, r1);
}
@@ -2596,11 +2589,11 @@
__ bind(notLong);
__ cmp(flags, ftos);
__ br(Assembler::NE, notFloat);
// ftos
- __ ldrs(v0, field);
+ __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
__ push(ftos);
// Rewrite bytecode to be faster
if (rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_fgetfield, bc, r1);
}
@@ -2610,11 +2603,11 @@
#ifdef ASSERT
__ cmp(flags, dtos);
__ br(Assembler::NE, notDouble);
#endif
// dtos
- __ ldrd(v0, field);
+ __ access_load_at(T_DOUBLE, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
__ push(dtos);
// Rewrite bytecode to be faster
if (rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_dgetfield, bc, r1);
}
@@ -2748,11 +2741,11 @@
// btos
{
__ pop(btos);
if (!is_static) pop_and_check_object(obj);
- __ strb(r0, field);
+ __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg);
if (rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_bputfield, bc, r1, true, byte_no);
}
__ b(Done);
}
@@ -2763,12 +2756,11 @@
// ztos
{
__ pop(ztos);
if (!is_static) pop_and_check_object(obj);
- __ andw(r0, r0, 0x1);
- __ strb(r0, field);
+ __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg);
if (rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_zputfield, bc, r1, true, byte_no);
}
__ b(Done);
}
@@ -2795,11 +2787,11 @@
// itos
{
__ pop(itos);
if (!is_static) pop_and_check_object(obj);
- __ strw(r0, field);
+ __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg);
if (rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_iputfield, bc, r1, true, byte_no);
}
__ b(Done);
}
@@ -2810,11 +2802,11 @@
// ctos
{
__ pop(ctos);
if (!is_static) pop_and_check_object(obj);
- __ strh(r0, field);
+ __ access_store_at(T_CHAR, IN_HEAP, field, r0, noreg, noreg);
if (rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_cputfield, bc, r1, true, byte_no);
}
__ b(Done);
}
@@ -2825,11 +2817,11 @@
// stos
{
__ pop(stos);
if (!is_static) pop_and_check_object(obj);
- __ strh(r0, field);
+ __ access_store_at(T_SHORT, IN_HEAP, field, r0, noreg, noreg);
if (rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_sputfield, bc, r1, true, byte_no);
}
__ b(Done);
}
@@ -2840,11 +2832,11 @@
// ltos
{
__ pop(ltos);
if (!is_static) pop_and_check_object(obj);
- __ str(r0, field);
+ __ access_store_at(T_LONG, IN_HEAP, field, r0, noreg, noreg);
if (rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_lputfield, bc, r1, true, byte_no);
}
__ b(Done);
}
@@ -2855,11 +2847,11 @@
// ftos
{
__ pop(ftos);
if (!is_static) pop_and_check_object(obj);
- __ strs(v0, field);
+ __ access_store_at(T_FLOAT, IN_HEAP, field, noreg /* ftos */, noreg, noreg);
if (rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_fputfield, bc, r1, true, byte_no);
}
__ b(Done);
}
@@ -2872,11 +2864,11 @@
// dtos
{
__ pop(dtos);
if (!is_static) pop_and_check_object(obj);
- __ strd(v0, field);
+ __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* dtos */, noreg, noreg);
if (rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_dputfield, bc, r1, true, byte_no);
}
}
@@ -3003,31 +2995,32 @@
switch (bytecode()) {
case Bytecodes::_fast_aputfield:
do_oop_store(_masm, field, r0, IN_HEAP);
break;
case Bytecodes::_fast_lputfield:
- __ str(r0, field);
+ __ access_store_at(T_LONG, IN_HEAP, field, r0, noreg, noreg);
break;
case Bytecodes::_fast_iputfield:
- __ strw(r0, field);
+ __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg);
break;
case Bytecodes::_fast_zputfield:
- __ andw(r0, r0, 0x1); // boolean is true if LSB is 1
- // fall through to bputfield
+ __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg);
+ break;
case Bytecodes::_fast_bputfield:
- __ strb(r0, field);
+ __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg);
break;
case Bytecodes::_fast_sputfield:
- // fall through
+ __ access_store_at(T_SHORT, IN_HEAP, field, r0, noreg, noreg);
+ break;
case Bytecodes::_fast_cputfield:
- __ strh(r0, field);
+ __ access_store_at(T_CHAR, IN_HEAP, field, r0, noreg, noreg);
break;
case Bytecodes::_fast_fputfield:
- __ strs(v0, field);
+ __ access_store_at(T_FLOAT, IN_HEAP, field, noreg /* ftos */, noreg, noreg);
break;
case Bytecodes::_fast_dputfield:
- __ strd(v0, field);
+ __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* dtos */, noreg, noreg);
break;
default:
ShouldNotReachHere();
}
@@ -3096,29 +3089,29 @@
case Bytecodes::_fast_agetfield:
do_oop_load(_masm, field, r0, IN_HEAP);
__ verify_oop(r0);
break;
case Bytecodes::_fast_lgetfield:
- __ ldr(r0, field);
+ __ access_load_at(T_LONG, IN_HEAP, r0, field, noreg, noreg);
break;
case Bytecodes::_fast_igetfield:
- __ ldrw(r0, field);
+ __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
break;
case Bytecodes::_fast_bgetfield:
- __ load_signed_byte(r0, field);
+ __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg);
break;
case Bytecodes::_fast_sgetfield:
- __ load_signed_short(r0, field);
+ __ access_load_at(T_SHORT, IN_HEAP, r0, field, noreg, noreg);
break;
case Bytecodes::_fast_cgetfield:
- __ load_unsigned_short(r0, field);
+ __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
break;
case Bytecodes::_fast_fgetfield:
- __ ldrs(v0, field);
+ __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
break;
case Bytecodes::_fast_dgetfield:
- __ ldrd(v0, field);
+ __ access_load_at(T_DOUBLE, IN_HEAP, noreg /* dtos */, field, noreg, noreg);
break;
default:
ShouldNotReachHere();
}
{
@@ -3159,18 +3152,18 @@
// next instruction)
__ increment(rbcp);
__ null_check(r0);
switch (state) {
case itos:
- __ ldrw(r0, Address(r0, r1, Address::lsl(0)));
+ __ access_load_at(T_INT, IN_HEAP, r0, Address(r0, r1, Address::lsl(0)), noreg, noreg);
break;
case atos:
do_oop_load(_masm, Address(r0, r1, Address::lsl(0)), r0, IN_HEAP);
__ verify_oop(r0);
break;
case ftos:
- __ ldrs(v0, Address(r0, r1, Address::lsl(0)));
+ __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, Address(r0, r1, Address::lsl(0)), noreg, noreg);
break;
default:
ShouldNotReachHere();
}
< prev index next >