< prev index next >

src/hotspot/cpu/arm/templateTable_arm.cpp

Print this page

        

*** 3143,3161 **** const Register Rcache = R4_tmp; const Register Rflagsav = Rtmp_save0; // R4/R19 const Register Rindex = R5_tmp; const Register Rflags = R5_tmp; - const bool gen_volatile_check = os::is_MP(); - resolve_cache_and_index(byte_no, Rcache, Rindex, sizeof(u2)); jvmti_post_field_access(Rcache, Rindex, is_static, false); load_field_cp_cache_entry(Rcache, Rindex, Roffset, Rflags, Robj, is_static); - if (gen_volatile_check) { __ mov(Rflagsav, Rflags); - } if (!is_static) pop_and_check_object(Robj); Label Done, Lint, Ltable, shouldNotReachHere; Label Lbtos, Lztos, Lctos, Lstos, Litos, Lltos, Lftos, Ldtos, Latos; --- 3143,3157 ----
*** 3388,3407 **** patch_bytecode(Bytecodes::_fast_igetfield, R0_tmp, Rtemp); } __ bind(Done); - if (gen_volatile_check) { // Check for volatile field Label notVolatile; __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile); volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp); __ bind(notVolatile); - } - } void TemplateTable::getfield(int byte_no) { getfield_or_static(byte_no, false); } --- 3384,3400 ----
*** 3489,3514 **** const Register Rcache = R4_tmp; const Register Rflagsav = Rtmp_save0; // R4/R19 const Register Rindex = R5_tmp; const Register Rflags = R5_tmp; - const bool gen_volatile_check = os::is_MP(); - resolve_cache_and_index(byte_no, Rcache, Rindex, sizeof(u2)); jvmti_post_field_mod(Rcache, Rindex, is_static); load_field_cp_cache_entry(Rcache, Rindex, Roffset, Rflags, Robj, is_static); - if (gen_volatile_check) { // Check for volatile field Label notVolatile; __ mov(Rflagsav, Rflags); __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile); volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::StoreStore | MacroAssembler::LoadStore), Rtemp); __ bind(notVolatile); - } Label Done, Lint, shouldNotReachHere; Label Ltable, Lbtos, Lztos, Lctos, Lstos, Litos, Lltos, Lftos, Ldtos, Latos; // compute type --- 3482,3503 ----
*** 3730,3769 **** patch_bytecode(Bytecodes::_fast_iputfield, R0_tmp, Rtemp, true, byte_no); } __ bind(Done); ! if (gen_volatile_check) { ! Label notVolatile; if (is_static) { // Just check for volatile. Memory barrier for static final field // is handled by class initialization. ! __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile); volatile_barrier(MacroAssembler::StoreLoad, Rtemp); ! __ bind(notVolatile); } else { // Check for volatile field and final field Label skipMembar; __ tst(Rflagsav, 1 << ConstantPoolCacheEntry::is_volatile_shift | 1 << ConstantPoolCacheEntry::is_final_shift); __ b(skipMembar, eq); ! __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile); // StoreLoad barrier after volatile field write volatile_barrier(MacroAssembler::StoreLoad, Rtemp); __ b(skipMembar); // StoreStore barrier after final field write ! __ bind(notVolatile); volatile_barrier(MacroAssembler::StoreStore, Rtemp); __ bind(skipMembar); } - } - } void TemplateTable::putfield(int byte_no) { putfield_or_static(byte_no, false); } --- 3719,3755 ---- patch_bytecode(Bytecodes::_fast_iputfield, R0_tmp, Rtemp, true, byte_no); } __ bind(Done); ! Label notVolatile2; if (is_static) { // Just check for volatile. Memory barrier for static final field // is handled by class initialization. ! __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile2); volatile_barrier(MacroAssembler::StoreLoad, Rtemp); ! __ bind(notVolatile2); } else { // Check for volatile field and final field Label skipMembar; __ tst(Rflagsav, 1 << ConstantPoolCacheEntry::is_volatile_shift | 1 << ConstantPoolCacheEntry::is_final_shift); __ b(skipMembar, eq); ! __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile2); // StoreLoad barrier after volatile field write volatile_barrier(MacroAssembler::StoreLoad, Rtemp); __ b(skipMembar); // StoreStore barrier after final field write ! __ bind(notVolatile2); volatile_barrier(MacroAssembler::StoreStore, Rtemp); __ bind(skipMembar); } } void TemplateTable::putfield(int byte_no) { putfield_or_static(byte_no, false); }
*** 3829,3863 **** const Register Rindex = R3_tmp; const Register Roffset = R3_tmp; const Register Rflags = Rtmp_save0; // R4/R19 const Register Robj = R5_tmp; - const bool gen_volatile_check = os::is_MP(); - // access constant pool cache __ get_cache_and_index_at_bcp(Rcache, Rindex, 1); __ add(Rcache, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord)); - if (gen_volatile_check) { // load flags to test volatile __ ldr_u32(Rflags, Address(Rcache, base + ConstantPoolCacheEntry::flags_offset())); - } // replace index with field offset from cache entry __ ldr(Roffset, Address(Rcache, base + ConstantPoolCacheEntry::f2_offset())); - if (gen_volatile_check) { // Check for volatile store Label notVolatile; __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile); // TODO-AARCH64 on AArch64, store-release instructions can be used to get rid of this explict barrier volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::StoreStore | MacroAssembler::LoadStore), Rtemp); __ bind(notVolatile); - } // Get object from stack pop_and_check_object(Robj); Address addr = Address(Robj, Roffset); --- 3815,3843 ----
*** 3900,3931 **** default: ShouldNotReachHere(); } ! if (gen_volatile_check) { ! Label notVolatile; Label skipMembar; __ tst(Rflags, 1 << ConstantPoolCacheEntry::is_volatile_shift | 1 << ConstantPoolCacheEntry::is_final_shift); __ b(skipMembar, eq); ! __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile); // StoreLoad barrier after volatile field write volatile_barrier(MacroAssembler::StoreLoad, Rtemp); __ b(skipMembar); // StoreStore barrier after final field write ! __ bind(notVolatile); volatile_barrier(MacroAssembler::StoreStore, Rtemp); __ bind(skipMembar); - } } - void TemplateTable::fast_accessfield(TosState state) { transition(atos, state); // do the JVMTI work here to avoid disturbing the register state below if (__ can_post_field_access()) { --- 3880,3908 ---- default: ShouldNotReachHere(); } ! Label notVolatile2; Label skipMembar; __ tst(Rflags, 1 << ConstantPoolCacheEntry::is_volatile_shift | 1 << ConstantPoolCacheEntry::is_final_shift); __ b(skipMembar, eq); ! __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile2); // StoreLoad barrier after volatile field write volatile_barrier(MacroAssembler::StoreLoad, Rtemp); __ b(skipMembar); // StoreStore barrier after final field write ! __ bind(notVolatile2); volatile_barrier(MacroAssembler::StoreStore, Rtemp); __ bind(skipMembar); } void TemplateTable::fast_accessfield(TosState state) { transition(atos, state); // do the JVMTI work here to avoid disturbing the register state below if (__ can_post_field_access()) {
*** 3951,3972 **** const Register Rcache = R2_tmp; const Register Rflags = R2_tmp; const Register Rindex = R3_tmp; const Register Roffset = R3_tmp; - const bool gen_volatile_check = os::is_MP(); - // access constant pool cache __ get_cache_and_index_at_bcp(Rcache, Rindex, 1); // replace index with field offset from cache entry __ add(Rtemp, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord)); __ ldr(Roffset, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f2_offset())); - if (gen_volatile_check) { // load flags to test volatile __ ldr_u32(Rflags, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset())); - } __ verify_oop(Robj); __ null_check(Robj, Rtemp); Address addr = Address(Robj, Roffset); --- 3928,3945 ----
*** 4005,4024 **** break; default: ShouldNotReachHere(); } - if (gen_volatile_check) { // Check for volatile load Label notVolatile; __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile); // TODO-AARCH64 on AArch64, load-acquire instructions can be used to get rid of this explict barrier volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp); __ bind(notVolatile); - } } void TemplateTable::fast_xaccess(TosState state) { transition(vtos, state); --- 3978,3995 ----
*** 4036,4059 **** // access constant pool cache __ get_cache_and_index_at_bcp(Rcache, Rindex, 2); __ add(Rtemp, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord)); __ ldr(Roffset, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f2_offset())); - const bool gen_volatile_check = os::is_MP(); - - if (gen_volatile_check) { // load flags to test volatile __ ldr_u32(Rflags, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset())); - } // make sure exception is reported in correct bcp range (getfield is next instruction) __ add(Rbcp, Rbcp, 1); __ null_check(Robj, Rtemp); __ sub(Rbcp, Rbcp, 1); #ifdef AARCH64 - if (gen_volatile_check) { Label notVolatile; __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile); __ add(Rtemp, Robj, Roffset); --- 4007,4025 ----
*** 4074,4084 **** ShouldNotReachHere(); } __ b(done); __ bind(notVolatile); - } #endif // AARCH64 if (state == itos) { __ access_load_at(T_INT, IN_HEAP, Address(Robj, Roffset), R0_tos, noreg, noreg, noreg); } else if (state == atos) { --- 4040,4049 ----
*** 4097,4115 **** } else { ShouldNotReachHere(); } #ifndef AARCH64 - if (gen_volatile_check) { // Check for volatile load Label notVolatile; __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile); volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp); __ bind(notVolatile); - } #endif // !AARCH64 __ bind(done); } --- 4062,4078 ----
< prev index next >