< prev index next >

src/hotspot/cpu/arm/templateTable_arm.cpp

Print this page

        

@@ -3143,19 +3143,15 @@
   const Register Rcache   = R4_tmp;
   const Register Rflagsav = Rtmp_save0;  // R4/R19
   const Register Rindex   = R5_tmp;
   const Register Rflags   = R5_tmp;
 
-  const bool gen_volatile_check = os::is_MP();
-
   resolve_cache_and_index(byte_no, Rcache, Rindex, sizeof(u2));
   jvmti_post_field_access(Rcache, Rindex, is_static, false);
   load_field_cp_cache_entry(Rcache, Rindex, Roffset, Rflags, Robj, is_static);
 
-  if (gen_volatile_check) {
     __ mov(Rflagsav, Rflags);
-  }
 
   if (!is_static) pop_and_check_object(Robj);
 
   Label Done, Lint, Ltable, shouldNotReachHere;
   Label Lbtos, Lztos, Lctos, Lstos, Litos, Lltos, Lftos, Ldtos, Latos;

@@ -3388,11 +3384,11 @@
     patch_bytecode(Bytecodes::_fast_igetfield, R0_tmp, Rtemp);
   }
 
   __ bind(Done);
 
-  if (gen_volatile_check) {
+  {
     // Check for volatile field
     Label notVolatile;
     __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
 
     volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);

@@ -3489,17 +3485,15 @@
   const Register Rcache   = R4_tmp;
   const Register Rflagsav = Rtmp_save0;  // R4/R19
   const Register Rindex   = R5_tmp;
   const Register Rflags   = R5_tmp;
 
-  const bool gen_volatile_check = os::is_MP();
-
   resolve_cache_and_index(byte_no, Rcache, Rindex, sizeof(u2));
   jvmti_post_field_mod(Rcache, Rindex, is_static);
   load_field_cp_cache_entry(Rcache, Rindex, Roffset, Rflags, Robj, is_static);
 
-  if (gen_volatile_check) {
+  {
     // Check for volatile field
     Label notVolatile;
     __ mov(Rflagsav, Rflags);
     __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
 

@@ -3730,11 +3724,11 @@
     patch_bytecode(Bytecodes::_fast_iputfield, R0_tmp, Rtemp, true, byte_no);
   }
 
   __ bind(Done);
 
-  if (gen_volatile_check) {
+  {
     Label notVolatile;
     if (is_static) {
       // Just check for volatile. Memory barrier for static final field
       // is handled by class initialization.
       __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);

@@ -3829,26 +3823,22 @@
   const Register Rindex  = R3_tmp;
   const Register Roffset = R3_tmp;
   const Register Rflags  = Rtmp_save0; // R4/R19
   const Register Robj    = R5_tmp;
 
-  const bool gen_volatile_check = os::is_MP();
-
   // access constant pool cache
   __ get_cache_and_index_at_bcp(Rcache, Rindex, 1);
 
   __ add(Rcache, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord));
 
-  if (gen_volatile_check) {
     // load flags to test volatile
     __ ldr_u32(Rflags, Address(Rcache, base + ConstantPoolCacheEntry::flags_offset()));
-  }
 
   // replace index with field offset from cache entry
   __ ldr(Roffset, Address(Rcache, base + ConstantPoolCacheEntry::f2_offset()));
 
-  if (gen_volatile_check) {
+  {
     // Check for volatile store
     Label notVolatile;
     __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
 
     // TODO-AARCH64 on AArch64, store-release instructions can be used to get rid of this explict barrier

@@ -3900,11 +3890,11 @@
 
     default:
       ShouldNotReachHere();
   }
 
-  if (gen_volatile_check) {
+  {
     Label notVolatile;
     Label skipMembar;
     __ tst(Rflags, 1 << ConstantPoolCacheEntry::is_volatile_shift |
                    1 << ConstantPoolCacheEntry::is_final_shift);
     __ b(skipMembar, eq);

@@ -3951,22 +3941,18 @@
   const Register Rcache  = R2_tmp;
   const Register Rflags  = R2_tmp;
   const Register Rindex  = R3_tmp;
   const Register Roffset = R3_tmp;
 
-  const bool gen_volatile_check = os::is_MP();
-
   // access constant pool cache
   __ get_cache_and_index_at_bcp(Rcache, Rindex, 1);
   // replace index with field offset from cache entry
   __ add(Rtemp, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord));
   __ ldr(Roffset, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f2_offset()));
 
-  if (gen_volatile_check) {
     // load flags to test volatile
     __ ldr_u32(Rflags, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset()));
-  }
 
   __ verify_oop(Robj);
   __ null_check(Robj, Rtemp);
 
   Address addr = Address(Robj, Roffset);

@@ -4005,11 +3991,11 @@
       break;
     default:
       ShouldNotReachHere();
   }
 
-  if (gen_volatile_check) {
+  {
     // Check for volatile load
     Label notVolatile;
     __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
 
     // TODO-AARCH64 on AArch64, load-acquire instructions can be used to get rid of this explict barrier

@@ -4036,16 +4022,12 @@
   // access constant pool cache
   __ get_cache_and_index_at_bcp(Rcache, Rindex, 2);
   __ add(Rtemp, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord));
   __ ldr(Roffset, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f2_offset()));
 
-  const bool gen_volatile_check = os::is_MP();
-
-  if (gen_volatile_check) {
     // load flags to test volatile
     __ ldr_u32(Rflags, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset()));
-  }
 
   // make sure exception is reported in correct bcp range (getfield is next instruction)
   __ add(Rbcp, Rbcp, 1);
   __ null_check(Robj, Rtemp);
   __ sub(Rbcp, Rbcp, 1);

@@ -4097,11 +4079,11 @@
   } else {
     ShouldNotReachHere();
   }
 
 #ifndef AARCH64
-  if (gen_volatile_check) {
+  {
     // Check for volatile load
     Label notVolatile;
     __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
 
     volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
< prev index next >