2905 case Bytecodes::_fast_iputfield: __ pop_i(r0); break;
2906 case Bytecodes::_fast_dputfield: __ pop_d(); break;
2907 case Bytecodes::_fast_fputfield: __ pop_f(); break;
2908 case Bytecodes::_fast_lputfield: __ pop_l(r0); break;
2909 }
2910 __ bind(L2);
2911 }
2912 }
2913
2914 void TemplateTable::fast_storefield(TosState state)
2915 {
2916 transition(state, vtos);
2917
2918 ByteSize base = ConstantPoolCache::base_offset();
2919
2920 jvmti_post_fast_field_mod();
2921
2922 // access constant pool cache
2923 __ get_cache_and_index_at_bcp(r2, r1, 1);
2924
2925 // test for volatile with r3
2926 __ ldrw(r3, Address(r2, in_bytes(base +
2927 ConstantPoolCacheEntry::flags_offset())));
2928
2929 // replace index with field offset from cache entry
2930 __ ldr(r1, Address(r2, in_bytes(base + ConstantPoolCacheEntry::f2_offset())));
2931
2932 {
2933 Label notVolatile;
2934 __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
2935 __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
2936 __ bind(notVolatile);
2937 }
2938
2939 Label notVolatile;
2940
2941 // Get object from stack
2942 pop_and_check_object(r2);
2943
2944 // field address
2996 __ lea(rscratch1, ExternalAddress((address) JvmtiExport::get_field_access_count_addr()));
2997 __ ldrw(r2, Address(rscratch1));
2998 __ cbzw(r2, L1);
2999 // access constant pool cache entry
3000 __ get_cache_entry_pointer_at_bcp(c_rarg2, rscratch2, 1);
3001 __ verify_oop(r0);
3002 __ push_ptr(r0); // save object pointer before call_VM() clobbers it
3003 __ mov(c_rarg1, r0);
3004 // c_rarg1: object pointer copied above
3005 // c_rarg2: cache entry pointer
3006 __ call_VM(noreg,
3007 CAST_FROM_FN_PTR(address,
3008 InterpreterRuntime::post_field_access),
3009 c_rarg1, c_rarg2);
3010 __ pop_ptr(r0); // restore object pointer
3011 __ bind(L1);
3012 }
3013
3014 // access constant pool cache
3015 __ get_cache_and_index_at_bcp(r2, r1, 1);
3016 __ ldr(r1, Address(r2, in_bytes(ConstantPoolCache::base_offset() +
3017 ConstantPoolCacheEntry::f2_offset())));
3018 __ ldrw(r3, Address(r2, in_bytes(ConstantPoolCache::base_offset() +
3019 ConstantPoolCacheEntry::flags_offset())));
3020
3021 // r0: object
3022 __ verify_oop(r0);
3023 __ null_check(r0);
3024 const Address field(r0, r1);
3025
3026 // 8179954: We need to make sure that the code generated for
3027 // volatile accesses forms a sequentially-consistent set of
3028 // operations when combined with STLR and LDAR. Without a leading
3029 // membar it's possible for a simple Dekker test to fail if loads
3030 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
3031 // the stores in one method and we interpret the loads in another.
3032 if (! UseBarriersForVolatile) {
3033 Label notVolatile;
3034 __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3035 __ membar(MacroAssembler::AnyAny);
|
2905 case Bytecodes::_fast_iputfield: __ pop_i(r0); break;
2906 case Bytecodes::_fast_dputfield: __ pop_d(); break;
2907 case Bytecodes::_fast_fputfield: __ pop_f(); break;
2908 case Bytecodes::_fast_lputfield: __ pop_l(r0); break;
2909 }
2910 __ bind(L2);
2911 }
2912 }
2913
2914 void TemplateTable::fast_storefield(TosState state)
2915 {
2916 transition(state, vtos);
2917
2918 ByteSize base = ConstantPoolCache::base_offset();
2919
2920 jvmti_post_fast_field_mod();
2921
2922 // access constant pool cache
2923 __ get_cache_and_index_at_bcp(r2, r1, 1);
2924
2925 // Must prevent reordering of the following cp cache loads with bytecode load
2926 __ membar(MacroAssembler::LoadLoad);
2927
2928 // test for volatile with r3
2929 __ ldrw(r3, Address(r2, in_bytes(base +
2930 ConstantPoolCacheEntry::flags_offset())));
2931
2932 // replace index with field offset from cache entry
2933 __ ldr(r1, Address(r2, in_bytes(base + ConstantPoolCacheEntry::f2_offset())));
2934
2935 {
2936 Label notVolatile;
2937 __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
2938 __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
2939 __ bind(notVolatile);
2940 }
2941
2942 Label notVolatile;
2943
2944 // Get object from stack
2945 pop_and_check_object(r2);
2946
2947 // field address
2999 __ lea(rscratch1, ExternalAddress((address) JvmtiExport::get_field_access_count_addr()));
3000 __ ldrw(r2, Address(rscratch1));
3001 __ cbzw(r2, L1);
3002 // access constant pool cache entry
3003 __ get_cache_entry_pointer_at_bcp(c_rarg2, rscratch2, 1);
3004 __ verify_oop(r0);
3005 __ push_ptr(r0); // save object pointer before call_VM() clobbers it
3006 __ mov(c_rarg1, r0);
3007 // c_rarg1: object pointer copied above
3008 // c_rarg2: cache entry pointer
3009 __ call_VM(noreg,
3010 CAST_FROM_FN_PTR(address,
3011 InterpreterRuntime::post_field_access),
3012 c_rarg1, c_rarg2);
3013 __ pop_ptr(r0); // restore object pointer
3014 __ bind(L1);
3015 }
3016
3017 // access constant pool cache
3018 __ get_cache_and_index_at_bcp(r2, r1, 1);
3019
3020 // Must prevent reordering of the following cp cache loads with bytecode load
3021 __ membar(MacroAssembler::LoadLoad);
3022
3023 __ ldr(r1, Address(r2, in_bytes(ConstantPoolCache::base_offset() +
3024 ConstantPoolCacheEntry::f2_offset())));
3025 __ ldrw(r3, Address(r2, in_bytes(ConstantPoolCache::base_offset() +
3026 ConstantPoolCacheEntry::flags_offset())));
3027
3028 // r0: object
3029 __ verify_oop(r0);
3030 __ null_check(r0);
3031 const Address field(r0, r1);
3032
3033 // 8179954: We need to make sure that the code generated for
3034 // volatile accesses forms a sequentially-consistent set of
3035 // operations when combined with STLR and LDAR. Without a leading
3036 // membar it's possible for a simple Dekker test to fail if loads
3037 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
3038 // the stores in one method and we interpret the loads in another.
3039 if (! UseBarriersForVolatile) {
3040 Label notVolatile;
3041 __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3042 __ membar(MacroAssembler::AnyAny);
|