< prev index next >
src/hotspot/cpu/arm/templateTable_arm.cpp
Print this page
@@ -3143,19 +3143,15 @@
const Register Rcache = R4_tmp;
const Register Rflagsav = Rtmp_save0; // R4/R19
const Register Rindex = R5_tmp;
const Register Rflags = R5_tmp;
- const bool gen_volatile_check = os::is_MP();
-
resolve_cache_and_index(byte_no, Rcache, Rindex, sizeof(u2));
jvmti_post_field_access(Rcache, Rindex, is_static, false);
load_field_cp_cache_entry(Rcache, Rindex, Roffset, Rflags, Robj, is_static);
- if (gen_volatile_check) {
__ mov(Rflagsav, Rflags);
- }
if (!is_static) pop_and_check_object(Robj);
Label Done, Lint, Ltable, shouldNotReachHere;
Label Lbtos, Lztos, Lctos, Lstos, Litos, Lltos, Lftos, Ldtos, Latos;
@@ -3388,20 +3384,17 @@
patch_bytecode(Bytecodes::_fast_igetfield, R0_tmp, Rtemp);
}
__ bind(Done);
- if (gen_volatile_check) {
// Check for volatile field
Label notVolatile;
__ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
__ bind(notVolatile);
- }
-
}
void TemplateTable::getfield(int byte_no) {
getfield_or_static(byte_no, false);
}
@@ -3489,26 +3482,22 @@
const Register Rcache = R4_tmp;
const Register Rflagsav = Rtmp_save0; // R4/R19
const Register Rindex = R5_tmp;
const Register Rflags = R5_tmp;
- const bool gen_volatile_check = os::is_MP();
-
resolve_cache_and_index(byte_no, Rcache, Rindex, sizeof(u2));
jvmti_post_field_mod(Rcache, Rindex, is_static);
load_field_cp_cache_entry(Rcache, Rindex, Roffset, Rflags, Robj, is_static);
- if (gen_volatile_check) {
// Check for volatile field
Label notVolatile;
__ mov(Rflagsav, Rflags);
__ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::StoreStore | MacroAssembler::LoadStore), Rtemp);
__ bind(notVolatile);
- }
Label Done, Lint, shouldNotReachHere;
Label Ltable, Lbtos, Lztos, Lctos, Lstos, Litos, Lltos, Lftos, Ldtos, Latos;
// compute type
@@ -3730,40 +3719,37 @@
patch_bytecode(Bytecodes::_fast_iputfield, R0_tmp, Rtemp, true, byte_no);
}
__ bind(Done);
- if (gen_volatile_check) {
- Label notVolatile;
+ Label notVolatile2;
if (is_static) {
// Just check for volatile. Memory barrier for static final field
// is handled by class initialization.
- __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
+ __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile2);
volatile_barrier(MacroAssembler::StoreLoad, Rtemp);
- __ bind(notVolatile);
+ __ bind(notVolatile2);
} else {
// Check for volatile field and final field
Label skipMembar;
__ tst(Rflagsav, 1 << ConstantPoolCacheEntry::is_volatile_shift |
1 << ConstantPoolCacheEntry::is_final_shift);
__ b(skipMembar, eq);
- __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
+ __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile2);
// StoreLoad barrier after volatile field write
volatile_barrier(MacroAssembler::StoreLoad, Rtemp);
__ b(skipMembar);
// StoreStore barrier after final field write
- __ bind(notVolatile);
+ __ bind(notVolatile2);
volatile_barrier(MacroAssembler::StoreStore, Rtemp);
__ bind(skipMembar);
}
- }
-
}
void TemplateTable::putfield(int byte_no) {
putfield_or_static(byte_no, false);
}
@@ -3829,35 +3815,29 @@
const Register Rindex = R3_tmp;
const Register Roffset = R3_tmp;
const Register Rflags = Rtmp_save0; // R4/R19
const Register Robj = R5_tmp;
- const bool gen_volatile_check = os::is_MP();
-
// access constant pool cache
__ get_cache_and_index_at_bcp(Rcache, Rindex, 1);
__ add(Rcache, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord));
- if (gen_volatile_check) {
// load flags to test volatile
__ ldr_u32(Rflags, Address(Rcache, base + ConstantPoolCacheEntry::flags_offset()));
- }
// replace index with field offset from cache entry
__ ldr(Roffset, Address(Rcache, base + ConstantPoolCacheEntry::f2_offset()));
- if (gen_volatile_check) {
// Check for volatile store
Label notVolatile;
__ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
// TODO-AARCH64 on AArch64, store-release instructions can be used to get rid of this explict barrier
volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::StoreStore | MacroAssembler::LoadStore), Rtemp);
__ bind(notVolatile);
- }
// Get object from stack
pop_and_check_object(Robj);
Address addr = Address(Robj, Roffset);
@@ -3900,32 +3880,29 @@
default:
ShouldNotReachHere();
}
- if (gen_volatile_check) {
- Label notVolatile;
+ Label notVolatile2;
Label skipMembar;
__ tst(Rflags, 1 << ConstantPoolCacheEntry::is_volatile_shift |
1 << ConstantPoolCacheEntry::is_final_shift);
__ b(skipMembar, eq);
- __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
+ __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile2);
// StoreLoad barrier after volatile field write
volatile_barrier(MacroAssembler::StoreLoad, Rtemp);
__ b(skipMembar);
// StoreStore barrier after final field write
- __ bind(notVolatile);
+ __ bind(notVolatile2);
volatile_barrier(MacroAssembler::StoreStore, Rtemp);
__ bind(skipMembar);
- }
}
-
void TemplateTable::fast_accessfield(TosState state) {
transition(atos, state);
// do the JVMTI work here to avoid disturbing the register state below
if (__ can_post_field_access()) {
@@ -3951,22 +3928,18 @@
const Register Rcache = R2_tmp;
const Register Rflags = R2_tmp;
const Register Rindex = R3_tmp;
const Register Roffset = R3_tmp;
- const bool gen_volatile_check = os::is_MP();
-
// access constant pool cache
__ get_cache_and_index_at_bcp(Rcache, Rindex, 1);
// replace index with field offset from cache entry
__ add(Rtemp, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord));
__ ldr(Roffset, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f2_offset()));
- if (gen_volatile_check) {
// load flags to test volatile
__ ldr_u32(Rflags, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset()));
- }
__ verify_oop(Robj);
__ null_check(Robj, Rtemp);
Address addr = Address(Robj, Roffset);
@@ -4005,20 +3978,18 @@
break;
default:
ShouldNotReachHere();
}
- if (gen_volatile_check) {
// Check for volatile load
Label notVolatile;
__ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
// TODO-AARCH64 on AArch64, load-acquire instructions can be used to get rid of this explict barrier
volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
__ bind(notVolatile);
- }
}
void TemplateTable::fast_xaccess(TosState state) {
transition(vtos, state);
@@ -4036,24 +4007,19 @@
// access constant pool cache
__ get_cache_and_index_at_bcp(Rcache, Rindex, 2);
__ add(Rtemp, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord));
__ ldr(Roffset, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f2_offset()));
- const bool gen_volatile_check = os::is_MP();
-
- if (gen_volatile_check) {
// load flags to test volatile
__ ldr_u32(Rflags, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset()));
- }
// make sure exception is reported in correct bcp range (getfield is next instruction)
__ add(Rbcp, Rbcp, 1);
__ null_check(Robj, Rtemp);
__ sub(Rbcp, Rbcp, 1);
#ifdef AARCH64
- if (gen_volatile_check) {
Label notVolatile;
__ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
__ add(Rtemp, Robj, Roffset);
@@ -4074,11 +4040,10 @@
ShouldNotReachHere();
}
__ b(done);
__ bind(notVolatile);
- }
#endif // AARCH64
if (state == itos) {
__ access_load_at(T_INT, IN_HEAP, Address(Robj, Roffset), R0_tos, noreg, noreg, noreg);
} else if (state == atos) {
@@ -4097,19 +4062,17 @@
} else {
ShouldNotReachHere();
}
#ifndef AARCH64
- if (gen_volatile_check) {
// Check for volatile load
Label notVolatile;
__ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
__ bind(notVolatile);
- }
#endif // !AARCH64
__ bind(done);
}
< prev index next >