< prev index next >

src/hotspot/cpu/arm/templateTable_arm.cpp

Print this page

        

*** 3143,3161 **** const Register Rcache = R4_tmp; const Register Rflagsav = Rtmp_save0; // R4/R19 const Register Rindex = R5_tmp; const Register Rflags = R5_tmp; - const bool gen_volatile_check = os::is_MP(); - resolve_cache_and_index(byte_no, Rcache, Rindex, sizeof(u2)); jvmti_post_field_access(Rcache, Rindex, is_static, false); load_field_cp_cache_entry(Rcache, Rindex, Roffset, Rflags, Robj, is_static); - if (gen_volatile_check) { __ mov(Rflagsav, Rflags); - } if (!is_static) pop_and_check_object(Robj); Label Done, Lint, Ltable, shouldNotReachHere; Label Lbtos, Lztos, Lctos, Lstos, Litos, Lltos, Lftos, Ldtos, Latos; --- 3143,3157 ----
*** 3388,3398 **** patch_bytecode(Bytecodes::_fast_igetfield, R0_tmp, Rtemp); } __ bind(Done); ! if (gen_volatile_check) { // Check for volatile field Label notVolatile; __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile); volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp); --- 3384,3394 ---- patch_bytecode(Bytecodes::_fast_igetfield, R0_tmp, Rtemp); } __ bind(Done); ! { // Check for volatile field Label notVolatile; __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile); volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
*** 3489,3505 **** const Register Rcache = R4_tmp; const Register Rflagsav = Rtmp_save0; // R4/R19 const Register Rindex = R5_tmp; const Register Rflags = R5_tmp; - const bool gen_volatile_check = os::is_MP(); - resolve_cache_and_index(byte_no, Rcache, Rindex, sizeof(u2)); jvmti_post_field_mod(Rcache, Rindex, is_static); load_field_cp_cache_entry(Rcache, Rindex, Roffset, Rflags, Robj, is_static); ! if (gen_volatile_check) { // Check for volatile field Label notVolatile; __ mov(Rflagsav, Rflags); __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile); --- 3485,3499 ---- const Register Rcache = R4_tmp; const Register Rflagsav = Rtmp_save0; // R4/R19 const Register Rindex = R5_tmp; const Register Rflags = R5_tmp; resolve_cache_and_index(byte_no, Rcache, Rindex, sizeof(u2)); jvmti_post_field_mod(Rcache, Rindex, is_static); load_field_cp_cache_entry(Rcache, Rindex, Roffset, Rflags, Robj, is_static); ! { // Check for volatile field Label notVolatile; __ mov(Rflagsav, Rflags); __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
*** 3730,3740 **** patch_bytecode(Bytecodes::_fast_iputfield, R0_tmp, Rtemp, true, byte_no); } __ bind(Done); ! if (gen_volatile_check) { Label notVolatile; if (is_static) { // Just check for volatile. Memory barrier for static final field // is handled by class initialization. __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile); --- 3724,3734 ---- patch_bytecode(Bytecodes::_fast_iputfield, R0_tmp, Rtemp, true, byte_no); } __ bind(Done); ! { Label notVolatile; if (is_static) { // Just check for volatile. Memory barrier for static final field // is handled by class initialization. __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
*** 3829,3854 **** const Register Rindex = R3_tmp; const Register Roffset = R3_tmp; const Register Rflags = Rtmp_save0; // R4/R19 const Register Robj = R5_tmp; - const bool gen_volatile_check = os::is_MP(); - // access constant pool cache __ get_cache_and_index_at_bcp(Rcache, Rindex, 1); __ add(Rcache, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord)); - if (gen_volatile_check) { // load flags to test volatile __ ldr_u32(Rflags, Address(Rcache, base + ConstantPoolCacheEntry::flags_offset())); - } // replace index with field offset from cache entry __ ldr(Roffset, Address(Rcache, base + ConstantPoolCacheEntry::f2_offset())); ! if (gen_volatile_check) { // Check for volatile store Label notVolatile; __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile); // TODO-AARCH64 on AArch64, store-release instructions can be used to get rid of this explict barrier --- 3823,3844 ---- const Register Rindex = R3_tmp; const Register Roffset = R3_tmp; const Register Rflags = Rtmp_save0; // R4/R19 const Register Robj = R5_tmp; // access constant pool cache __ get_cache_and_index_at_bcp(Rcache, Rindex, 1); __ add(Rcache, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord)); // load flags to test volatile __ ldr_u32(Rflags, Address(Rcache, base + ConstantPoolCacheEntry::flags_offset())); // replace index with field offset from cache entry __ ldr(Roffset, Address(Rcache, base + ConstantPoolCacheEntry::f2_offset())); ! { // Check for volatile store Label notVolatile; __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile); // TODO-AARCH64 on AArch64, store-release instructions can be used to get rid of this explict barrier
*** 3900,3910 **** default: ShouldNotReachHere(); } ! if (gen_volatile_check) { Label notVolatile; Label skipMembar; __ tst(Rflags, 1 << ConstantPoolCacheEntry::is_volatile_shift | 1 << ConstantPoolCacheEntry::is_final_shift); __ b(skipMembar, eq); --- 3890,3900 ---- default: ShouldNotReachHere(); } ! { Label notVolatile; Label skipMembar; __ tst(Rflags, 1 << ConstantPoolCacheEntry::is_volatile_shift | 1 << ConstantPoolCacheEntry::is_final_shift); __ b(skipMembar, eq);
*** 3951,3972 **** const Register Rcache = R2_tmp; const Register Rflags = R2_tmp; const Register Rindex = R3_tmp; const Register Roffset = R3_tmp; - const bool gen_volatile_check = os::is_MP(); - // access constant pool cache __ get_cache_and_index_at_bcp(Rcache, Rindex, 1); // replace index with field offset from cache entry __ add(Rtemp, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord)); __ ldr(Roffset, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f2_offset())); - if (gen_volatile_check) { // load flags to test volatile __ ldr_u32(Rflags, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset())); - } __ verify_oop(Robj); __ null_check(Robj, Rtemp); Address addr = Address(Robj, Roffset); --- 3941,3958 ----
*** 4005,4015 **** break; default: ShouldNotReachHere(); } ! if (gen_volatile_check) { // Check for volatile load Label notVolatile; __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile); // TODO-AARCH64 on AArch64, load-acquire instructions can be used to get rid of this explict barrier --- 3991,4001 ---- break; default: ShouldNotReachHere(); } ! { // Check for volatile load Label notVolatile; __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile); // TODO-AARCH64 on AArch64, load-acquire instructions can be used to get rid of this explict barrier
*** 4036,4051 **** // access constant pool cache __ get_cache_and_index_at_bcp(Rcache, Rindex, 2); __ add(Rtemp, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord)); __ ldr(Roffset, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f2_offset())); - const bool gen_volatile_check = os::is_MP(); - - if (gen_volatile_check) { // load flags to test volatile __ ldr_u32(Rflags, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset())); - } // make sure exception is reported in correct bcp range (getfield is next instruction) __ add(Rbcp, Rbcp, 1); __ null_check(Robj, Rtemp); __ sub(Rbcp, Rbcp, 1); --- 4022,4033 ----
*** 4097,4107 **** } else { ShouldNotReachHere(); } #ifndef AARCH64 ! if (gen_volatile_check) { // Check for volatile load Label notVolatile; __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile); volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp); --- 4079,4089 ---- } else { ShouldNotReachHere(); } #ifndef AARCH64 ! { // Check for volatile load Label notVolatile; __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile); volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
< prev index next >