< prev index next >

src/hotspot/cpu/arm/templateTable_arm.cpp

Print this page




3128 }
3129 
3130 
3131 void TemplateTable::pop_and_check_object(Register r) {
3132   __ pop_ptr(r);
3133   __ null_check(r, Rtemp);  // for field access must check obj.
3134   __ verify_oop(r);
3135 }
3136 
3137 
3138 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3139   transition(vtos, vtos);
3140 
3141   const Register Roffset  = R2_tmp;
3142   const Register Robj     = R3_tmp;
3143   const Register Rcache   = R4_tmp;
3144   const Register Rflagsav = Rtmp_save0;  // R4/R19
3145   const Register Rindex   = R5_tmp;
3146   const Register Rflags   = R5_tmp;
3147 
3148   const bool gen_volatile_check = os::is_MP();
3149 
3150   resolve_cache_and_index(byte_no, Rcache, Rindex, sizeof(u2));
3151   jvmti_post_field_access(Rcache, Rindex, is_static, false);
3152   load_field_cp_cache_entry(Rcache, Rindex, Roffset, Rflags, Robj, is_static);
3153 
3154   if (gen_volatile_check) {
3155     __ mov(Rflagsav, Rflags);
3156   }
3157 
3158   if (!is_static) pop_and_check_object(Robj);
3159 
3160   Label Done, Lint, Ltable, shouldNotReachHere;
3161   Label Lbtos, Lztos, Lctos, Lstos, Litos, Lltos, Lftos, Ldtos, Latos;
3162 
3163   // compute type
3164   __ logical_shift_right(Rflags, Rflags, ConstantPoolCacheEntry::tos_state_shift);
3165   // Make sure we don't need to mask flags after the above shift
3166   ConstantPoolCacheEntry::verify_tos_state_shift();
3167 
3168   // There are actually two versions of implementation of getfield/getstatic:
3169   //
3170   // 32-bit ARM:
3171   // 1) Table switch using add(PC,...) instruction (fast_version)
3172   // 2) Table switch using ldr(PC,...) instruction
3173   //
3174   // AArch64:
3175   // 1) Table switch using adr/add/br instructions (fast_version)
3176   // 2) Table switch using adr/ldr/br instructions


3373   }
3374 
3375   assert(vtos == seq++, "vtos has unexpected value");
3376 
3377   __ bind(shouldNotReachHere);
3378   __ should_not_reach_here();
3379 
3380   // itos and atos cases are frequent so it makes sense to move them out of table switch
3381   // atos case can be merged with itos case (and thus moved out of table switch) on 32-bit ARM, fast version only
3382 
3383   __ bind(Lint);
3384   __ access_load_at(T_INT, IN_HEAP, Address(Robj, Roffset), R0_tos, noreg, noreg, noreg);
3385   __ push(itos);
3386   // Rewrite bytecode to be faster
3387   if (!is_static && rc == may_rewrite) {
3388     patch_bytecode(Bytecodes::_fast_igetfield, R0_tmp, Rtemp);
3389   }
3390 
3391   __ bind(Done);
3392 
3393   if (gen_volatile_check) {
3394     // Check for volatile field
3395     Label notVolatile;
3396     __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3397 
3398     volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
3399 
3400     __ bind(notVolatile);
3401   }
3402 
3403 }
3404 
3405 void TemplateTable::getfield(int byte_no) {
3406   getfield_or_static(byte_no, false);
3407 }
3408 
3409 void TemplateTable::nofast_getfield(int byte_no) {
3410   getfield_or_static(byte_no, false, may_not_rewrite);
3411 }
3412 
3413 void TemplateTable::getstatic(int byte_no) {


3474     // R3: value object on the stack
3475     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification),
3476                R1, R2, R3);
3477     __ get_cache_and_index_at_bcp(Rcache, Rindex, 1);
3478 
3479     __ bind(Lcontinue);
3480   }
3481 }
3482 
3483 
3484 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3485   transition(vtos, vtos);
3486 
3487   const Register Roffset  = R2_tmp;
3488   const Register Robj     = R3_tmp;
3489   const Register Rcache   = R4_tmp;
3490   const Register Rflagsav = Rtmp_save0;  // R4/R19
3491   const Register Rindex   = R5_tmp;
3492   const Register Rflags   = R5_tmp;
3493 
3494   const bool gen_volatile_check = os::is_MP();
3495 
3496   resolve_cache_and_index(byte_no, Rcache, Rindex, sizeof(u2));
3497   jvmti_post_field_mod(Rcache, Rindex, is_static);
3498   load_field_cp_cache_entry(Rcache, Rindex, Roffset, Rflags, Robj, is_static);
3499 
3500   if (gen_volatile_check) {
3501     // Check for volatile field
3502     Label notVolatile;
3503     __ mov(Rflagsav, Rflags);
3504     __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3505 
3506     volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::StoreStore | MacroAssembler::LoadStore), Rtemp);
3507 
3508     __ bind(notVolatile);
3509   }
3510 
3511   Label Done, Lint, shouldNotReachHere;
3512   Label Ltable, Lbtos, Lztos, Lctos, Lstos, Litos, Lltos, Lftos, Ldtos, Latos;
3513 
3514   // compute type
3515   __ logical_shift_right(Rflags, Rflags, ConstantPoolCacheEntry::tos_state_shift);
3516   // Make sure we don't need to mask flags after the above shift
3517   ConstantPoolCacheEntry::verify_tos_state_shift();
3518 
3519   // There are actually two versions of implementation of putfield/putstatic:
3520   //


3715     if (!is_static && rc == may_rewrite) {
3716       patch_bytecode(Bytecodes::_fast_aputfield, R0_tmp, Rtemp, true, byte_no);
3717     }
3718     __ b(Done);
3719   }
3720 
3721   __ bind(shouldNotReachHere);
3722   __ should_not_reach_here();
3723 
3724   // itos case is frequent and is moved outside table switch
3725   __ bind(Lint);
3726   __ pop(itos);
3727   if (!is_static) pop_and_check_object(Robj);
3728   __ access_store_at(T_INT, IN_HEAP, Address(Robj, Roffset), R0_tos, noreg, noreg, noreg, false);
3729   if (!is_static && rc == may_rewrite) {
3730     patch_bytecode(Bytecodes::_fast_iputfield, R0_tmp, Rtemp, true, byte_no);
3731   }
3732 
3733   __ bind(Done);
3734 
3735   if (gen_volatile_check) {
3736     Label notVolatile;
3737     if (is_static) {
3738       // Just check for volatile. Memory barrier for static final field
3739       // is handled by class initialization.
3740       __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3741       volatile_barrier(MacroAssembler::StoreLoad, Rtemp);
3742       __ bind(notVolatile);
3743     } else {
3744       // Check for volatile field and final field
3745       Label skipMembar;
3746 
3747       __ tst(Rflagsav, 1 << ConstantPoolCacheEntry::is_volatile_shift |
3748                        1 << ConstantPoolCacheEntry::is_final_shift);
3749       __ b(skipMembar, eq);
3750 
3751       __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3752 
3753       // StoreLoad barrier after volatile field write
3754       volatile_barrier(MacroAssembler::StoreLoad, Rtemp);
3755       __ b(skipMembar);


3814     __ pop(state);                // restore value
3815 
3816     __ bind(done);
3817   }
3818 }
3819 
3820 
3821 void TemplateTable::fast_storefield(TosState state) {
3822   transition(state, vtos);
3823 
3824   ByteSize base = ConstantPoolCache::base_offset();
3825 
3826   jvmti_post_fast_field_mod(state);
3827 
3828   const Register Rcache  = R2_tmp;
3829   const Register Rindex  = R3_tmp;
3830   const Register Roffset = R3_tmp;
3831   const Register Rflags  = Rtmp_save0; // R4/R19
3832   const Register Robj    = R5_tmp;
3833 
3834   const bool gen_volatile_check = os::is_MP();
3835 
3836   // access constant pool cache
3837   __ get_cache_and_index_at_bcp(Rcache, Rindex, 1);
3838 
3839   __ add(Rcache, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord));
3840 
3841   if (gen_volatile_check) {
3842     // load flags to test volatile
3843     __ ldr_u32(Rflags, Address(Rcache, base + ConstantPoolCacheEntry::flags_offset()));
3844   }
3845 
3846   // replace index with field offset from cache entry
3847   __ ldr(Roffset, Address(Rcache, base + ConstantPoolCacheEntry::f2_offset()));
3848 
3849   if (gen_volatile_check) {
3850     // Check for volatile store
3851     Label notVolatile;
3852     __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3853 
3854     // TODO-AARCH64 on AArch64, store-release instructions can be used to get rid of this explict barrier
3855     volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::StoreStore | MacroAssembler::LoadStore), Rtemp);
3856 
3857     __ bind(notVolatile);
3858   }
3859 
3860   // Get object from stack
3861   pop_and_check_object(Robj);
3862 
3863   Address addr = Address(Robj, Roffset);
3864   // access field
3865   switch (bytecode()) {
3866     case Bytecodes::_fast_zputfield:
3867       __ access_store_at(T_BOOLEAN, IN_HEAP, addr, R0_tos, noreg, noreg, noreg, false);
3868       break;
3869     case Bytecodes::_fast_bputfield:


3885 #else
3886     case Bytecodes::_fast_lputfield:
3887       __ access_store_at(T_LONG, IN_HEAP, addr, noreg, noreg, noreg, noreg, false);
3888       break;
3889     case Bytecodes::_fast_fputfield:
3890       __ access_store_at(T_FLOAT, IN_HEAP, addr, noreg, noreg, noreg, noreg, false);
3891       break;
3892     case Bytecodes::_fast_dputfield:
3893       __ access_store_at(T_DOUBLE, IN_HEAP, addr, noreg, noreg, noreg, noreg, false);
3894       break;
3895 #endif // AARCH64
3896 
3897     case Bytecodes::_fast_aputfield:
3898       do_oop_store(_masm, addr, R0_tos, Rtemp, R1_tmp, R2_tmp, false);
3899       break;
3900 
3901     default:
3902       ShouldNotReachHere();
3903   }
3904 
3905   if (gen_volatile_check) {
3906     Label notVolatile;
3907     Label skipMembar;
3908     __ tst(Rflags, 1 << ConstantPoolCacheEntry::is_volatile_shift |
3909                    1 << ConstantPoolCacheEntry::is_final_shift);
3910     __ b(skipMembar, eq);
3911 
3912     __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3913 
3914     // StoreLoad barrier after volatile field write
3915     volatile_barrier(MacroAssembler::StoreLoad, Rtemp);
3916     __ b(skipMembar);
3917 
3918     // StoreStore barrier after final field write
3919     __ bind(notVolatile);
3920     volatile_barrier(MacroAssembler::StoreStore, Rtemp);
3921 
3922     __ bind(skipMembar);
3923   }
3924 }
3925 


3936     __ cbz(R2, done);
3937     // access constant pool cache entry
3938     __ get_cache_entry_pointer_at_bcp(R2, R1, 1);
3939     __ push_ptr(R0_tos);  // save object pointer before call_VM() clobbers it
3940     __ verify_oop(R0_tos);
3941     __ mov(R1, R0_tos);
3942     // R1: object pointer copied above
3943     // R2: cache entry pointer
3944     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), R1, R2);
3945     __ pop_ptr(R0_tos);   // restore object pointer
3946 
3947     __ bind(done);
3948   }
3949 
3950   const Register Robj    = R0_tos;
3951   const Register Rcache  = R2_tmp;
3952   const Register Rflags  = R2_tmp;
3953   const Register Rindex  = R3_tmp;
3954   const Register Roffset = R3_tmp;
3955 
3956   const bool gen_volatile_check = os::is_MP();
3957 
3958   // access constant pool cache
3959   __ get_cache_and_index_at_bcp(Rcache, Rindex, 1);
3960   // replace index with field offset from cache entry
3961   __ add(Rtemp, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord));
3962   __ ldr(Roffset, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f2_offset()));
3963 
3964   if (gen_volatile_check) {
3965     // load flags to test volatile
3966     __ ldr_u32(Rflags, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset()));
3967   }
3968 
3969   __ verify_oop(Robj);
3970   __ null_check(Robj, Rtemp);
3971 
3972   Address addr = Address(Robj, Roffset);
3973   // access field
3974   switch (bytecode()) {
3975     case Bytecodes::_fast_bgetfield:
3976       __ access_load_at(T_BYTE, IN_HEAP, addr, R0_tos, noreg, noreg, noreg);
3977       break;
3978     case Bytecodes::_fast_sgetfield:
3979       __ access_load_at(T_SHORT, IN_HEAP, addr, R0_tos, noreg, noreg, noreg);
3980       break;
3981     case Bytecodes::_fast_cgetfield:
3982       __ access_load_at(T_CHAR, IN_HEAP, addr, R0_tos, noreg, noreg, noreg);
3983       break;
3984     case Bytecodes::_fast_igetfield:
3985       __ access_load_at(T_INT, IN_HEAP, addr, R0_tos, noreg, noreg, noreg);
3986       break;
3987 #ifdef AARCH64


3990     case Bytecodes::_fast_dgetfield: __ ldr_d(D0_tos, addr); break;
3991 #else
3992     case Bytecodes::_fast_lgetfield:
3993       __ access_load_at(T_LONG, IN_HEAP, addr, noreg, noreg, noreg, noreg);
3994       break;
3995     case Bytecodes::_fast_fgetfield:
3996       __ access_load_at(T_FLOAT, IN_HEAP, addr, noreg, noreg, noreg, noreg);
3997       break;
3998     case Bytecodes::_fast_dgetfield:
3999       __ access_load_at(T_DOUBLE, IN_HEAP, addr, noreg, noreg, noreg, noreg);
4000       break;
4001 #endif // AARCH64
4002     case Bytecodes::_fast_agetfield:
4003       do_oop_load(_masm, R0_tos, addr);
4004       __ verify_oop(R0_tos);
4005       break;
4006     default:
4007       ShouldNotReachHere();
4008   }
4009 
4010   if (gen_volatile_check) {
4011     // Check for volatile load
4012     Label notVolatile;
4013     __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
4014 
4015     // TODO-AARCH64 on AArch64, load-acquire instructions can be used to get rid of this explict barrier
4016     volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
4017 
4018     __ bind(notVolatile);
4019   }
4020 }
4021 
4022 
4023 void TemplateTable::fast_xaccess(TosState state) {
4024   transition(vtos, state);
4025 
4026   const Register Robj = R1_tmp;
4027   const Register Rcache = R2_tmp;
4028   const Register Rindex = R3_tmp;
4029   const Register Roffset = R3_tmp;
4030   const Register Rflags = R4_tmp;
4031   Label done;
4032 
4033   // get receiver
4034   __ ldr(Robj, aaddress(0));
4035 
4036   // access constant pool cache
4037   __ get_cache_and_index_at_bcp(Rcache, Rindex, 2);
4038   __ add(Rtemp, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord));
4039   __ ldr(Roffset, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f2_offset()));
4040 
4041   const bool gen_volatile_check = os::is_MP();
4042 
4043   if (gen_volatile_check) {
4044     // load flags to test volatile
4045     __ ldr_u32(Rflags, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset()));
4046   }
4047 
4048   // make sure exception is reported in correct bcp range (getfield is next instruction)
4049   __ add(Rbcp, Rbcp, 1);
4050   __ null_check(Robj, Rtemp);
4051   __ sub(Rbcp, Rbcp, 1);
4052 
4053 #ifdef AARCH64
4054   if (gen_volatile_check) {
4055     Label notVolatile;
4056     __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
4057 
4058     __ add(Rtemp, Robj, Roffset);
4059 
4060     if (state == itos) {
4061       __ ldar_w(R0_tos, Rtemp);
4062     } else if (state == atos) {
4063       if (UseCompressedOops) {
4064         __ ldar_w(R0_tos, Rtemp);
4065         __ decode_heap_oop(R0_tos);
4066       } else {


4082   if (state == itos) {
4083     __ access_load_at(T_INT, IN_HEAP, Address(Robj, Roffset), R0_tos, noreg, noreg, noreg);
4084   } else if (state == atos) {
4085     do_oop_load(_masm, R0_tos, Address(Robj, Roffset));
4086     __ verify_oop(R0_tos);
4087   } else if (state == ftos) {
4088 #ifdef AARCH64
4089     __ ldr_s(S0_tos, Address(Robj, Roffset));
4090 #else
4091 #ifdef __SOFTFP__
4092     __ ldr(R0_tos, Address(Robj, Roffset));
4093 #else
4094     __ access_load_at(T_FLOAT, IN_HEAP, Address(Robj, Roffset), noreg /* ftos */, noreg, noreg, noreg);
4095 #endif // __SOFTFP__
4096 #endif // AARCH64
4097   } else {
4098     ShouldNotReachHere();
4099   }
4100 
4101 #ifndef AARCH64
4102   if (gen_volatile_check) {
4103     // Check for volatile load
4104     Label notVolatile;
4105     __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
4106 
4107     volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
4108 
4109     __ bind(notVolatile);
4110   }
4111 #endif // !AARCH64
4112 
4113   __ bind(done);
4114 }
4115 
4116 
4117 
4118 //----------------------------------------------------------------------------------------------------
4119 // Calls
4120 
4121 void TemplateTable::count_calls(Register method, Register temp) {
4122   // implemented elsewhere




3128 }
3129 
3130 
3131 void TemplateTable::pop_and_check_object(Register r) {
3132   __ pop_ptr(r);
3133   __ null_check(r, Rtemp);  // for field access must check obj.
3134   __ verify_oop(r);
3135 }
3136 
3137 
3138 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3139   transition(vtos, vtos);
3140 
3141   const Register Roffset  = R2_tmp;
3142   const Register Robj     = R3_tmp;
3143   const Register Rcache   = R4_tmp;
3144   const Register Rflagsav = Rtmp_save0;  // R4/R19
3145   const Register Rindex   = R5_tmp;
3146   const Register Rflags   = R5_tmp;
3147 


3148   resolve_cache_and_index(byte_no, Rcache, Rindex, sizeof(u2));
3149   jvmti_post_field_access(Rcache, Rindex, is_static, false);
3150   load_field_cp_cache_entry(Rcache, Rindex, Roffset, Rflags, Robj, is_static);
3151 

3152   __ mov(Rflagsav, Rflags);

3153 
3154   if (!is_static) pop_and_check_object(Robj);
3155 
3156   Label Done, Lint, Ltable, shouldNotReachHere;
3157   Label Lbtos, Lztos, Lctos, Lstos, Litos, Lltos, Lftos, Ldtos, Latos;
3158 
3159   // compute type
3160   __ logical_shift_right(Rflags, Rflags, ConstantPoolCacheEntry::tos_state_shift);
3161   // Make sure we don't need to mask flags after the above shift
3162   ConstantPoolCacheEntry::verify_tos_state_shift();
3163 
3164   // There are actually two versions of implementation of getfield/getstatic:
3165   //
3166   // 32-bit ARM:
3167   // 1) Table switch using add(PC,...) instruction (fast_version)
3168   // 2) Table switch using ldr(PC,...) instruction
3169   //
3170   // AArch64:
3171   // 1) Table switch using adr/add/br instructions (fast_version)
3172   // 2) Table switch using adr/ldr/br instructions


3369   }
3370 
3371   assert(vtos == seq++, "vtos has unexpected value");
3372 
3373   __ bind(shouldNotReachHere);
3374   __ should_not_reach_here();
3375 
3376   // itos and atos cases are frequent so it makes sense to move them out of table switch
3377   // atos case can be merged with itos case (and thus moved out of table switch) on 32-bit ARM, fast version only
3378 
3379   __ bind(Lint);
3380   __ access_load_at(T_INT, IN_HEAP, Address(Robj, Roffset), R0_tos, noreg, noreg, noreg);
3381   __ push(itos);
3382   // Rewrite bytecode to be faster
3383   if (!is_static && rc == may_rewrite) {
3384     patch_bytecode(Bytecodes::_fast_igetfield, R0_tmp, Rtemp);
3385   }
3386 
3387   __ bind(Done);
3388 
3389   {
3390     // Check for volatile field
3391     Label notVolatile;
3392     __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3393 
3394     volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
3395 
3396     __ bind(notVolatile);
3397   }
3398 
3399 }
3400 
3401 void TemplateTable::getfield(int byte_no) {
3402   getfield_or_static(byte_no, false);
3403 }
3404 
3405 void TemplateTable::nofast_getfield(int byte_no) {
3406   getfield_or_static(byte_no, false, may_not_rewrite);
3407 }
3408 
3409 void TemplateTable::getstatic(int byte_no) {


3470     // R3: value object on the stack
3471     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification),
3472                R1, R2, R3);
3473     __ get_cache_and_index_at_bcp(Rcache, Rindex, 1);
3474 
3475     __ bind(Lcontinue);
3476   }
3477 }
3478 
3479 
3480 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3481   transition(vtos, vtos);
3482 
3483   const Register Roffset  = R2_tmp;
3484   const Register Robj     = R3_tmp;
3485   const Register Rcache   = R4_tmp;
3486   const Register Rflagsav = Rtmp_save0;  // R4/R19
3487   const Register Rindex   = R5_tmp;
3488   const Register Rflags   = R5_tmp;
3489 


3490   resolve_cache_and_index(byte_no, Rcache, Rindex, sizeof(u2));
3491   jvmti_post_field_mod(Rcache, Rindex, is_static);
3492   load_field_cp_cache_entry(Rcache, Rindex, Roffset, Rflags, Robj, is_static);
3493 
3494   {
3495     // Check for volatile field
3496     Label notVolatile;
3497     __ mov(Rflagsav, Rflags);
3498     __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3499 
3500     volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::StoreStore | MacroAssembler::LoadStore), Rtemp);
3501 
3502     __ bind(notVolatile);
3503   }
3504 
3505   Label Done, Lint, shouldNotReachHere;
3506   Label Ltable, Lbtos, Lztos, Lctos, Lstos, Litos, Lltos, Lftos, Ldtos, Latos;
3507 
3508   // compute type
3509   __ logical_shift_right(Rflags, Rflags, ConstantPoolCacheEntry::tos_state_shift);
3510   // Make sure we don't need to mask flags after the above shift
3511   ConstantPoolCacheEntry::verify_tos_state_shift();
3512 
3513   // There are actually two versions of implementation of putfield/putstatic:
3514   //


3709     if (!is_static && rc == may_rewrite) {
3710       patch_bytecode(Bytecodes::_fast_aputfield, R0_tmp, Rtemp, true, byte_no);
3711     }
3712     __ b(Done);
3713   }
3714 
3715   __ bind(shouldNotReachHere);
3716   __ should_not_reach_here();
3717 
3718   // itos case is frequent and is moved outside table switch
3719   __ bind(Lint);
3720   __ pop(itos);
3721   if (!is_static) pop_and_check_object(Robj);
3722   __ access_store_at(T_INT, IN_HEAP, Address(Robj, Roffset), R0_tos, noreg, noreg, noreg, false);
3723   if (!is_static && rc == may_rewrite) {
3724     patch_bytecode(Bytecodes::_fast_iputfield, R0_tmp, Rtemp, true, byte_no);
3725   }
3726 
3727   __ bind(Done);
3728 
3729   {
3730     Label notVolatile;
3731     if (is_static) {
3732       // Just check for volatile. Memory barrier for static final field
3733       // is handled by class initialization.
3734       __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3735       volatile_barrier(MacroAssembler::StoreLoad, Rtemp);
3736       __ bind(notVolatile);
3737     } else {
3738       // Check for volatile field and final field
3739       Label skipMembar;
3740 
3741       __ tst(Rflagsav, 1 << ConstantPoolCacheEntry::is_volatile_shift |
3742                        1 << ConstantPoolCacheEntry::is_final_shift);
3743       __ b(skipMembar, eq);
3744 
3745       __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3746 
3747       // StoreLoad barrier after volatile field write
3748       volatile_barrier(MacroAssembler::StoreLoad, Rtemp);
3749       __ b(skipMembar);


3808     __ pop(state);                // restore value
3809 
3810     __ bind(done);
3811   }
3812 }
3813 
3814 
3815 void TemplateTable::fast_storefield(TosState state) {
3816   transition(state, vtos);
3817 
3818   ByteSize base = ConstantPoolCache::base_offset();
3819 
3820   jvmti_post_fast_field_mod(state);
3821 
3822   const Register Rcache  = R2_tmp;
3823   const Register Rindex  = R3_tmp;
3824   const Register Roffset = R3_tmp;
3825   const Register Rflags  = Rtmp_save0; // R4/R19
3826   const Register Robj    = R5_tmp;
3827 


3828   // access constant pool cache
3829   __ get_cache_and_index_at_bcp(Rcache, Rindex, 1);
3830 
3831   __ add(Rcache, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord));
3832 

3833   // load flags to test volatile
3834   __ ldr_u32(Rflags, Address(Rcache, base + ConstantPoolCacheEntry::flags_offset()));

3835 
3836   // replace index with field offset from cache entry
3837   __ ldr(Roffset, Address(Rcache, base + ConstantPoolCacheEntry::f2_offset()));
3838 
3839   {
3840     // Check for volatile store
3841     Label notVolatile;
3842     __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3843 
3844     // TODO-AARCH64 on AArch64, store-release instructions can be used to get rid of this explict barrier
3845     volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::StoreStore | MacroAssembler::LoadStore), Rtemp);
3846 
3847     __ bind(notVolatile);
3848   }
3849 
3850   // Get object from stack
3851   pop_and_check_object(Robj);
3852 
3853   Address addr = Address(Robj, Roffset);
3854   // access field
3855   switch (bytecode()) {
3856     case Bytecodes::_fast_zputfield:
3857       __ access_store_at(T_BOOLEAN, IN_HEAP, addr, R0_tos, noreg, noreg, noreg, false);
3858       break;
3859     case Bytecodes::_fast_bputfield:


3875 #else
3876     case Bytecodes::_fast_lputfield:
3877       __ access_store_at(T_LONG, IN_HEAP, addr, noreg, noreg, noreg, noreg, false);
3878       break;
3879     case Bytecodes::_fast_fputfield:
3880       __ access_store_at(T_FLOAT, IN_HEAP, addr, noreg, noreg, noreg, noreg, false);
3881       break;
3882     case Bytecodes::_fast_dputfield:
3883       __ access_store_at(T_DOUBLE, IN_HEAP, addr, noreg, noreg, noreg, noreg, false);
3884       break;
3885 #endif // AARCH64
3886 
3887     case Bytecodes::_fast_aputfield:
3888       do_oop_store(_masm, addr, R0_tos, Rtemp, R1_tmp, R2_tmp, false);
3889       break;
3890 
3891     default:
3892       ShouldNotReachHere();
3893   }
3894 
3895   {
3896     Label notVolatile;
3897     Label skipMembar;
3898     __ tst(Rflags, 1 << ConstantPoolCacheEntry::is_volatile_shift |
3899                    1 << ConstantPoolCacheEntry::is_final_shift);
3900     __ b(skipMembar, eq);
3901 
3902     __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3903 
3904     // StoreLoad barrier after volatile field write
3905     volatile_barrier(MacroAssembler::StoreLoad, Rtemp);
3906     __ b(skipMembar);
3907 
3908     // StoreStore barrier after final field write
3909     __ bind(notVolatile);
3910     volatile_barrier(MacroAssembler::StoreStore, Rtemp);
3911 
3912     __ bind(skipMembar);
3913   }
3914 }
3915 


3926     __ cbz(R2, done);
3927     // access constant pool cache entry
3928     __ get_cache_entry_pointer_at_bcp(R2, R1, 1);
3929     __ push_ptr(R0_tos);  // save object pointer before call_VM() clobbers it
3930     __ verify_oop(R0_tos);
3931     __ mov(R1, R0_tos);
3932     // R1: object pointer copied above
3933     // R2: cache entry pointer
3934     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), R1, R2);
3935     __ pop_ptr(R0_tos);   // restore object pointer
3936 
3937     __ bind(done);
3938   }
3939 
3940   const Register Robj    = R0_tos;
3941   const Register Rcache  = R2_tmp;
3942   const Register Rflags  = R2_tmp;
3943   const Register Rindex  = R3_tmp;
3944   const Register Roffset = R3_tmp;
3945 


3946   // access constant pool cache
3947   __ get_cache_and_index_at_bcp(Rcache, Rindex, 1);
3948   // replace index with field offset from cache entry
3949   __ add(Rtemp, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord));
3950   __ ldr(Roffset, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f2_offset()));
3951 

3952   // load flags to test volatile
3953   __ ldr_u32(Rflags, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset()));

3954 
3955   __ verify_oop(Robj);
3956   __ null_check(Robj, Rtemp);
3957 
3958   Address addr = Address(Robj, Roffset);
3959   // access field
3960   switch (bytecode()) {
3961     case Bytecodes::_fast_bgetfield:
3962       __ access_load_at(T_BYTE, IN_HEAP, addr, R0_tos, noreg, noreg, noreg);
3963       break;
3964     case Bytecodes::_fast_sgetfield:
3965       __ access_load_at(T_SHORT, IN_HEAP, addr, R0_tos, noreg, noreg, noreg);
3966       break;
3967     case Bytecodes::_fast_cgetfield:
3968       __ access_load_at(T_CHAR, IN_HEAP, addr, R0_tos, noreg, noreg, noreg);
3969       break;
3970     case Bytecodes::_fast_igetfield:
3971       __ access_load_at(T_INT, IN_HEAP, addr, R0_tos, noreg, noreg, noreg);
3972       break;
3973 #ifdef AARCH64


3976     case Bytecodes::_fast_dgetfield: __ ldr_d(D0_tos, addr); break;
3977 #else
3978     case Bytecodes::_fast_lgetfield:
3979       __ access_load_at(T_LONG, IN_HEAP, addr, noreg, noreg, noreg, noreg);
3980       break;
3981     case Bytecodes::_fast_fgetfield:
3982       __ access_load_at(T_FLOAT, IN_HEAP, addr, noreg, noreg, noreg, noreg);
3983       break;
3984     case Bytecodes::_fast_dgetfield:
3985       __ access_load_at(T_DOUBLE, IN_HEAP, addr, noreg, noreg, noreg, noreg);
3986       break;
3987 #endif // AARCH64
3988     case Bytecodes::_fast_agetfield:
3989       do_oop_load(_masm, R0_tos, addr);
3990       __ verify_oop(R0_tos);
3991       break;
3992     default:
3993       ShouldNotReachHere();
3994   }
3995 
3996   {
3997     // Check for volatile load
3998     Label notVolatile;
3999     __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
4000 
4001     // TODO-AARCH64 on AArch64, load-acquire instructions can be used to get rid of this explict barrier
4002     volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
4003 
4004     __ bind(notVolatile);
4005   }
4006 }
4007 
4008 
4009 void TemplateTable::fast_xaccess(TosState state) {
4010   transition(vtos, state);
4011 
4012   const Register Robj = R1_tmp;
4013   const Register Rcache = R2_tmp;
4014   const Register Rindex = R3_tmp;
4015   const Register Roffset = R3_tmp;
4016   const Register Rflags = R4_tmp;
4017   Label done;
4018 
4019   // get receiver
4020   __ ldr(Robj, aaddress(0));
4021 
4022   // access constant pool cache
4023   __ get_cache_and_index_at_bcp(Rcache, Rindex, 2);
4024   __ add(Rtemp, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord));
4025   __ ldr(Roffset, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f2_offset()));
4026 



4027   // load flags to test volatile
4028   __ ldr_u32(Rflags, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset()));

4029 
4030   // make sure exception is reported in correct bcp range (getfield is next instruction)
4031   __ add(Rbcp, Rbcp, 1);
4032   __ null_check(Robj, Rtemp);
4033   __ sub(Rbcp, Rbcp, 1);
4034 
4035 #ifdef AARCH64
4036   if (gen_volatile_check) {
4037     Label notVolatile;
4038     __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
4039 
4040     __ add(Rtemp, Robj, Roffset);
4041 
4042     if (state == itos) {
4043       __ ldar_w(R0_tos, Rtemp);
4044     } else if (state == atos) {
4045       if (UseCompressedOops) {
4046         __ ldar_w(R0_tos, Rtemp);
4047         __ decode_heap_oop(R0_tos);
4048       } else {


4064   if (state == itos) {
4065     __ access_load_at(T_INT, IN_HEAP, Address(Robj, Roffset), R0_tos, noreg, noreg, noreg);
4066   } else if (state == atos) {
4067     do_oop_load(_masm, R0_tos, Address(Robj, Roffset));
4068     __ verify_oop(R0_tos);
4069   } else if (state == ftos) {
4070 #ifdef AARCH64
4071     __ ldr_s(S0_tos, Address(Robj, Roffset));
4072 #else
4073 #ifdef __SOFTFP__
4074     __ ldr(R0_tos, Address(Robj, Roffset));
4075 #else
4076     __ access_load_at(T_FLOAT, IN_HEAP, Address(Robj, Roffset), noreg /* ftos */, noreg, noreg, noreg);
4077 #endif // __SOFTFP__
4078 #endif // AARCH64
4079   } else {
4080     ShouldNotReachHere();
4081   }
4082 
4083 #ifndef AARCH64
4084   {
4085     // Check for volatile load
4086     Label notVolatile;
4087     __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
4088 
4089     volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
4090 
4091     __ bind(notVolatile);
4092   }
4093 #endif // !AARCH64
4094 
4095   __ bind(done);
4096 }
4097 
4098 
4099 
4100 //----------------------------------------------------------------------------------------------------
4101 // Calls
4102 
4103 void TemplateTable::count_calls(Register method, Register temp) {
4104   // implemented elsewhere


< prev index next >