< prev index next >

src/hotspot/cpu/arm/templateTable_arm.cpp

Print this page




3128 }
3129 
3130 
3131 void TemplateTable::pop_and_check_object(Register r) {
3132   __ pop_ptr(r);
3133   __ null_check(r, Rtemp);  // for field access must check obj.
3134   __ verify_oop(r);
3135 }
3136 
3137 
3138 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3139   transition(vtos, vtos);
3140 
3141   const Register Roffset  = R2_tmp;
3142   const Register Robj     = R3_tmp;
3143   const Register Rcache   = R4_tmp;
3144   const Register Rflagsav = Rtmp_save0;  // R4/R19
3145   const Register Rindex   = R5_tmp;
3146   const Register Rflags   = R5_tmp;
3147 
3148   const bool gen_volatile_check = os::is_MP();
3149 
3150   resolve_cache_and_index(byte_no, Rcache, Rindex, sizeof(u2));
3151   jvmti_post_field_access(Rcache, Rindex, is_static, false);
3152   load_field_cp_cache_entry(Rcache, Rindex, Roffset, Rflags, Robj, is_static);
3153 
3154   if (gen_volatile_check) {
3155     __ mov(Rflagsav, Rflags);
3156   }
3157 
3158   if (!is_static) pop_and_check_object(Robj);
3159 
3160   Label Done, Lint, Ltable, shouldNotReachHere;
3161   Label Lbtos, Lztos, Lctos, Lstos, Litos, Lltos, Lftos, Ldtos, Latos;
3162 
3163   // compute type
3164   __ logical_shift_right(Rflags, Rflags, ConstantPoolCacheEntry::tos_state_shift);
3165   // Make sure we don't need to mask flags after the above shift
3166   ConstantPoolCacheEntry::verify_tos_state_shift();
3167 
3168   // There are actually two versions of implementation of getfield/getstatic:
3169   //
3170   // 32-bit ARM:
3171   // 1) Table switch using add(PC,...) instruction (fast_version)
3172   // 2) Table switch using ldr(PC,...) instruction
3173   //
3174   // AArch64:
3175   // 1) Table switch using adr/add/br instructions (fast_version)
3176   // 2) Table switch using adr/ldr/br instructions


3373   }
3374 
3375   assert(vtos == seq++, "vtos has unexpected value");
3376 
3377   __ bind(shouldNotReachHere);
3378   __ should_not_reach_here();
3379 
3380   // itos and atos cases are frequent so it makes sense to move them out of table switch
3381   // atos case can be merged with itos case (and thus moved out of table switch) on 32-bit ARM, fast version only
3382 
3383   __ bind(Lint);
3384   __ access_load_at(T_INT, IN_HEAP, Address(Robj, Roffset), R0_tos, noreg, noreg, noreg);
3385   __ push(itos);
3386   // Rewrite bytecode to be faster
3387   if (!is_static && rc == may_rewrite) {
3388     patch_bytecode(Bytecodes::_fast_igetfield, R0_tmp, Rtemp);
3389   }
3390 
3391   __ bind(Done);
3392 
3393   if (gen_volatile_check) {
3394     // Check for volatile field
3395     Label notVolatile;
3396     __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3397 
3398     volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
3399 
3400     __ bind(notVolatile);
3401   }
3402 
3403 }
3404 
3405 void TemplateTable::getfield(int byte_no) {
3406   getfield_or_static(byte_no, false);
3407 }
3408 
3409 void TemplateTable::nofast_getfield(int byte_no) {
3410   getfield_or_static(byte_no, false, may_not_rewrite);
3411 }
3412 
3413 void TemplateTable::getstatic(int byte_no) {
3414   getfield_or_static(byte_no, true);
3415 }
3416 
3417 
3418 // The registers cache and index expected to be set before call, and should not be R1 or Rtemp.
3419 // Blows volatile registers (R0-R3 on 32-bit ARM, R0-R18 on AArch64), Rtemp, LR,
3420 // except cache and index registers which are preserved.
3421 void TemplateTable::jvmti_post_field_mod(Register Rcache, Register Rindex, bool is_static) {
3422   ByteSize cp_base_offset = ConstantPoolCache::base_offset();


3474     // R3: value object on the stack
3475     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification),
3476                R1, R2, R3);
3477     __ get_cache_and_index_at_bcp(Rcache, Rindex, 1);
3478 
3479     __ bind(Lcontinue);
3480   }
3481 }
3482 
3483 
3484 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3485   transition(vtos, vtos);
3486 
3487   const Register Roffset  = R2_tmp;
3488   const Register Robj     = R3_tmp;
3489   const Register Rcache   = R4_tmp;
3490   const Register Rflagsav = Rtmp_save0;  // R4/R19
3491   const Register Rindex   = R5_tmp;
3492   const Register Rflags   = R5_tmp;
3493 
3494   const bool gen_volatile_check = os::is_MP();
3495 
3496   resolve_cache_and_index(byte_no, Rcache, Rindex, sizeof(u2));
3497   jvmti_post_field_mod(Rcache, Rindex, is_static);
3498   load_field_cp_cache_entry(Rcache, Rindex, Roffset, Rflags, Robj, is_static);
3499 
3500   if (gen_volatile_check) {
3501     // Check for volatile field
3502     Label notVolatile;
3503     __ mov(Rflagsav, Rflags);
3504     __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3505 
3506     volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::StoreStore | MacroAssembler::LoadStore), Rtemp);
3507 
3508     __ bind(notVolatile);
3509   }
3510 
3511   Label Done, Lint, shouldNotReachHere;
3512   Label Ltable, Lbtos, Lztos, Lctos, Lstos, Litos, Lltos, Lftos, Ldtos, Latos;
3513 
3514   // compute type
3515   __ logical_shift_right(Rflags, Rflags, ConstantPoolCacheEntry::tos_state_shift);
3516   // Make sure we don't need to mask flags after the above shift
3517   ConstantPoolCacheEntry::verify_tos_state_shift();
3518 
3519   // There are actually two versions of implementation of putfield/putstatic:
3520   //
3521   // 32-bit ARM:
3522   // 1) Table switch using add(PC,...) instruction (fast_version)
3523   // 2) Table switch using ldr(PC,...) instruction
3524   //
3525   // AArch64:
3526   // 1) Table switch using adr/add/br instructions (fast_version)
3527   // 2) Table switch using adr/ldr/br instructions
3528   //
3529   // First version requires fixed size of code block for each case and


3715     if (!is_static && rc == may_rewrite) {
3716       patch_bytecode(Bytecodes::_fast_aputfield, R0_tmp, Rtemp, true, byte_no);
3717     }
3718     __ b(Done);
3719   }
3720 
3721   __ bind(shouldNotReachHere);
3722   __ should_not_reach_here();
3723 
3724   // itos case is frequent and is moved outside table switch
3725   __ bind(Lint);
3726   __ pop(itos);
3727   if (!is_static) pop_and_check_object(Robj);
3728   __ access_store_at(T_INT, IN_HEAP, Address(Robj, Roffset), R0_tos, noreg, noreg, noreg, false);
3729   if (!is_static && rc == may_rewrite) {
3730     patch_bytecode(Bytecodes::_fast_iputfield, R0_tmp, Rtemp, true, byte_no);
3731   }
3732 
3733   __ bind(Done);
3734 
3735   if (gen_volatile_check) {
3736     Label notVolatile;
3737     if (is_static) {
3738       // Just check for volatile. Memory barrier for static final field
3739       // is handled by class initialization.
3740       __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3741       volatile_barrier(MacroAssembler::StoreLoad, Rtemp);
3742       __ bind(notVolatile);
3743     } else {
3744       // Check for volatile field and final field
3745       Label skipMembar;
3746 
3747       __ tst(Rflagsav, 1 << ConstantPoolCacheEntry::is_volatile_shift |
3748                        1 << ConstantPoolCacheEntry::is_final_shift);
3749       __ b(skipMembar, eq);
3750 
3751       __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3752 
3753       // StoreLoad barrier after volatile field write
3754       volatile_barrier(MacroAssembler::StoreLoad, Rtemp);
3755       __ b(skipMembar);
3756 
3757       // StoreStore barrier after final field write
3758       __ bind(notVolatile);
3759       volatile_barrier(MacroAssembler::StoreStore, Rtemp);
3760 
3761       __ bind(skipMembar);
3762     }
3763   }
3764 
3765 }
3766 
3767 void TemplateTable::putfield(int byte_no) {
3768   putfield_or_static(byte_no, false);
3769 }
3770 
3771 void TemplateTable::nofast_putfield(int byte_no) {
3772   putfield_or_static(byte_no, false, may_not_rewrite);
3773 }
3774 
3775 void TemplateTable::putstatic(int byte_no) {
3776   putfield_or_static(byte_no, true);
3777 }
3778 
3779 
3780 void TemplateTable::jvmti_post_fast_field_mod() {
3781   // This version of jvmti_post_fast_field_mod() is not used on ARM
3782   Unimplemented();
3783 }
3784 


3814     __ pop(state);                // restore value
3815 
3816     __ bind(done);
3817   }
3818 }
3819 
3820 
3821 void TemplateTable::fast_storefield(TosState state) {
3822   transition(state, vtos);
3823 
3824   ByteSize base = ConstantPoolCache::base_offset();
3825 
3826   jvmti_post_fast_field_mod(state);
3827 
3828   const Register Rcache  = R2_tmp;
3829   const Register Rindex  = R3_tmp;
3830   const Register Roffset = R3_tmp;
3831   const Register Rflags  = Rtmp_save0; // R4/R19
3832   const Register Robj    = R5_tmp;
3833 
3834   const bool gen_volatile_check = os::is_MP();
3835 
3836   // access constant pool cache
3837   __ get_cache_and_index_at_bcp(Rcache, Rindex, 1);
3838 
3839   __ add(Rcache, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord));
3840 
3841   if (gen_volatile_check) {
3842     // load flags to test volatile
3843     __ ldr_u32(Rflags, Address(Rcache, base + ConstantPoolCacheEntry::flags_offset()));
3844   }
3845 
3846   // replace index with field offset from cache entry
3847   __ ldr(Roffset, Address(Rcache, base + ConstantPoolCacheEntry::f2_offset()));
3848 
3849   if (gen_volatile_check) {
3850     // Check for volatile store
3851     Label notVolatile;
3852     __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3853 
3854     // TODO-AARCH64 on AArch64, store-release instructions can be used to get rid of this explict barrier
3855     volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::StoreStore | MacroAssembler::LoadStore), Rtemp);
3856 
3857     __ bind(notVolatile);
3858   }
3859 
3860   // Get object from stack
3861   pop_and_check_object(Robj);
3862 
3863   Address addr = Address(Robj, Roffset);
3864   // access field
3865   switch (bytecode()) {
3866     case Bytecodes::_fast_zputfield:
3867       __ access_store_at(T_BOOLEAN, IN_HEAP, addr, R0_tos, noreg, noreg, noreg, false);
3868       break;
3869     case Bytecodes::_fast_bputfield:
3870       __ access_store_at(T_BYTE, IN_HEAP, addr, R0_tos, noreg, noreg, noreg, false);
3871       break;
3872     case Bytecodes::_fast_sputfield:
3873       __ access_store_at(T_SHORT, IN_HEAP, addr, R0_tos, noreg, noreg, noreg, false);
3874       break;
3875     case Bytecodes::_fast_cputfield:
3876       __ access_store_at(T_CHAR, IN_HEAP, addr, R0_tos, noreg, noreg, noreg,false);
3877       break;
3878     case Bytecodes::_fast_iputfield:


3885 #else
3886     case Bytecodes::_fast_lputfield:
3887       __ access_store_at(T_LONG, IN_HEAP, addr, noreg, noreg, noreg, noreg, false);
3888       break;
3889     case Bytecodes::_fast_fputfield:
3890       __ access_store_at(T_FLOAT, IN_HEAP, addr, noreg, noreg, noreg, noreg, false);
3891       break;
3892     case Bytecodes::_fast_dputfield:
3893       __ access_store_at(T_DOUBLE, IN_HEAP, addr, noreg, noreg, noreg, noreg, false);
3894       break;
3895 #endif // AARCH64
3896 
3897     case Bytecodes::_fast_aputfield:
3898       do_oop_store(_masm, addr, R0_tos, Rtemp, R1_tmp, R2_tmp, false);
3899       break;
3900 
3901     default:
3902       ShouldNotReachHere();
3903   }
3904 
3905   if (gen_volatile_check) {
3906     Label notVolatile;
3907     Label skipMembar;
3908     __ tst(Rflags, 1 << ConstantPoolCacheEntry::is_volatile_shift |
3909                    1 << ConstantPoolCacheEntry::is_final_shift);
3910     __ b(skipMembar, eq);
3911 
3912     __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3913 
3914     // StoreLoad barrier after volatile field write
3915     volatile_barrier(MacroAssembler::StoreLoad, Rtemp);
3916     __ b(skipMembar);
3917 
3918     // StoreStore barrier after final field write
3919     __ bind(notVolatile);
3920     volatile_barrier(MacroAssembler::StoreStore, Rtemp);
3921 
3922     __ bind(skipMembar);
3923   }
3924 }
3925 
3926 
3927 void TemplateTable::fast_accessfield(TosState state) {
3928   transition(atos, state);
3929 
3930   // do the JVMTI work here to avoid disturbing the register state below
3931   if (__ can_post_field_access()) {
3932     // Check to see if a field access watch has been set before we take
3933     // the time to call into the VM.
3934     Label done;
3935     __ ldr_global_s32(R2, (address) JvmtiExport::get_field_access_count_addr());
3936     __ cbz(R2, done);
3937     // access constant pool cache entry
3938     __ get_cache_entry_pointer_at_bcp(R2, R1, 1);
3939     __ push_ptr(R0_tos);  // save object pointer before call_VM() clobbers it
3940     __ verify_oop(R0_tos);
3941     __ mov(R1, R0_tos);
3942     // R1: object pointer copied above
3943     // R2: cache entry pointer
3944     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), R1, R2);
3945     __ pop_ptr(R0_tos);   // restore object pointer
3946 
3947     __ bind(done);
3948   }
3949 
3950   const Register Robj    = R0_tos;
3951   const Register Rcache  = R2_tmp;
3952   const Register Rflags  = R2_tmp;
3953   const Register Rindex  = R3_tmp;
3954   const Register Roffset = R3_tmp;
3955 
3956   const bool gen_volatile_check = os::is_MP();
3957 
3958   // access constant pool cache
3959   __ get_cache_and_index_at_bcp(Rcache, Rindex, 1);
3960   // replace index with field offset from cache entry
3961   __ add(Rtemp, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord));
3962   __ ldr(Roffset, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f2_offset()));
3963 
3964   if (gen_volatile_check) {
3965     // load flags to test volatile
3966     __ ldr_u32(Rflags, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset()));
3967   }
3968 
3969   __ verify_oop(Robj);
3970   __ null_check(Robj, Rtemp);
3971 
3972   Address addr = Address(Robj, Roffset);
3973   // access field
3974   switch (bytecode()) {
3975     case Bytecodes::_fast_bgetfield:
3976       __ access_load_at(T_BYTE, IN_HEAP, addr, R0_tos, noreg, noreg, noreg);
3977       break;
3978     case Bytecodes::_fast_sgetfield:
3979       __ access_load_at(T_SHORT, IN_HEAP, addr, R0_tos, noreg, noreg, noreg);
3980       break;
3981     case Bytecodes::_fast_cgetfield:
3982       __ access_load_at(T_CHAR, IN_HEAP, addr, R0_tos, noreg, noreg, noreg);
3983       break;
3984     case Bytecodes::_fast_igetfield:
3985       __ access_load_at(T_INT, IN_HEAP, addr, R0_tos, noreg, noreg, noreg);
3986       break;
3987 #ifdef AARCH64


3990     case Bytecodes::_fast_dgetfield: __ ldr_d(D0_tos, addr); break;
3991 #else
3992     case Bytecodes::_fast_lgetfield:
3993       __ access_load_at(T_LONG, IN_HEAP, addr, noreg, noreg, noreg, noreg);
3994       break;
3995     case Bytecodes::_fast_fgetfield:
3996       __ access_load_at(T_FLOAT, IN_HEAP, addr, noreg, noreg, noreg, noreg);
3997       break;
3998     case Bytecodes::_fast_dgetfield:
3999       __ access_load_at(T_DOUBLE, IN_HEAP, addr, noreg, noreg, noreg, noreg);
4000       break;
4001 #endif // AARCH64
4002     case Bytecodes::_fast_agetfield:
4003       do_oop_load(_masm, R0_tos, addr);
4004       __ verify_oop(R0_tos);
4005       break;
4006     default:
4007       ShouldNotReachHere();
4008   }
4009 
4010   if (gen_volatile_check) {
4011     // Check for volatile load
4012     Label notVolatile;
4013     __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
4014 
4015     // TODO-AARCH64 on AArch64, load-acquire instructions can be used to get rid of this explict barrier
4016     volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
4017 
4018     __ bind(notVolatile);
4019   }
4020 }
4021 
4022 
4023 void TemplateTable::fast_xaccess(TosState state) {
4024   transition(vtos, state);
4025 
4026   const Register Robj = R1_tmp;
4027   const Register Rcache = R2_tmp;
4028   const Register Rindex = R3_tmp;
4029   const Register Roffset = R3_tmp;
4030   const Register Rflags = R4_tmp;
4031   Label done;
4032 
4033   // get receiver
4034   __ ldr(Robj, aaddress(0));
4035 
4036   // access constant pool cache
4037   __ get_cache_and_index_at_bcp(Rcache, Rindex, 2);
4038   __ add(Rtemp, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord));
4039   __ ldr(Roffset, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f2_offset()));
4040 
4041   const bool gen_volatile_check = os::is_MP();
4042 
4043   if (gen_volatile_check) {
4044     // load flags to test volatile
4045     __ ldr_u32(Rflags, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset()));
4046   }
4047 
4048   // make sure exception is reported in correct bcp range (getfield is next instruction)
4049   __ add(Rbcp, Rbcp, 1);
4050   __ null_check(Robj, Rtemp);
4051   __ sub(Rbcp, Rbcp, 1);
4052 
4053 #ifdef AARCH64
4054   if (gen_volatile_check) {
4055     Label notVolatile;
4056     __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
4057 
4058     __ add(Rtemp, Robj, Roffset);
4059 
4060     if (state == itos) {
4061       __ ldar_w(R0_tos, Rtemp);
4062     } else if (state == atos) {
4063       if (UseCompressedOops) {
4064         __ ldar_w(R0_tos, Rtemp);
4065         __ decode_heap_oop(R0_tos);
4066       } else {
4067         __ ldar(R0_tos, Rtemp);
4068       }
4069       __ verify_oop(R0_tos);
4070     } else if (state == ftos) {
4071       __ ldar_w(R0_tos, Rtemp);
4072       __ fmov_sw(S0_tos, R0_tos);
4073     } else {
4074       ShouldNotReachHere();
4075     }
4076     __ b(done);
4077 
4078     __ bind(notVolatile);
4079   }
4080 #endif // AARCH64
4081 
4082   if (state == itos) {
4083     __ access_load_at(T_INT, IN_HEAP, Address(Robj, Roffset), R0_tos, noreg, noreg, noreg);
4084   } else if (state == atos) {
4085     do_oop_load(_masm, R0_tos, Address(Robj, Roffset));
4086     __ verify_oop(R0_tos);
4087   } else if (state == ftos) {
4088 #ifdef AARCH64
4089     __ ldr_s(S0_tos, Address(Robj, Roffset));
4090 #else
4091 #ifdef __SOFTFP__
4092     __ ldr(R0_tos, Address(Robj, Roffset));
4093 #else
4094     __ access_load_at(T_FLOAT, IN_HEAP, Address(Robj, Roffset), noreg /* ftos */, noreg, noreg, noreg);
4095 #endif // __SOFTFP__
4096 #endif // AARCH64
4097   } else {
4098     ShouldNotReachHere();
4099   }
4100 
4101 #ifndef AARCH64
4102   if (gen_volatile_check) {
4103     // Check for volatile load
4104     Label notVolatile;
4105     __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
4106 
4107     volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
4108 
4109     __ bind(notVolatile);
4110   }
4111 #endif // !AARCH64
4112 
4113   __ bind(done);
4114 }
4115 
4116 
4117 
4118 //----------------------------------------------------------------------------------------------------
4119 // Calls
4120 
4121 void TemplateTable::count_calls(Register method, Register temp) {
4122   // implemented elsewhere
4123   ShouldNotReachHere();
4124 }
4125 
4126 
4127 void TemplateTable::prepare_invoke(int byte_no,
4128                                    Register method,  // linked method (or i-klass)
4129                                    Register index,   // itable index, MethodType, etc.
4130                                    Register recv,    // if caller wants to see it




3128 }
3129 
3130 
3131 void TemplateTable::pop_and_check_object(Register r) {
3132   __ pop_ptr(r);
3133   __ null_check(r, Rtemp);  // for field access must check obj.
3134   __ verify_oop(r);
3135 }
3136 
3137 
3138 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3139   transition(vtos, vtos);
3140 
3141   const Register Roffset  = R2_tmp;
3142   const Register Robj     = R3_tmp;
3143   const Register Rcache   = R4_tmp;
3144   const Register Rflagsav = Rtmp_save0;  // R4/R19
3145   const Register Rindex   = R5_tmp;
3146   const Register Rflags   = R5_tmp;
3147 


3148   resolve_cache_and_index(byte_no, Rcache, Rindex, sizeof(u2));
3149   jvmti_post_field_access(Rcache, Rindex, is_static, false);
3150   load_field_cp_cache_entry(Rcache, Rindex, Roffset, Rflags, Robj, is_static);
3151 

3152   __ mov(Rflagsav, Rflags);

3153 
3154   if (!is_static) pop_and_check_object(Robj);
3155 
3156   Label Done, Lint, Ltable, shouldNotReachHere;
3157   Label Lbtos, Lztos, Lctos, Lstos, Litos, Lltos, Lftos, Ldtos, Latos;
3158 
3159   // compute type
3160   __ logical_shift_right(Rflags, Rflags, ConstantPoolCacheEntry::tos_state_shift);
3161   // Make sure we don't need to mask flags after the above shift
3162   ConstantPoolCacheEntry::verify_tos_state_shift();
3163 
3164   // There are actually two versions of implementation of getfield/getstatic:
3165   //
3166   // 32-bit ARM:
3167   // 1) Table switch using add(PC,...) instruction (fast_version)
3168   // 2) Table switch using ldr(PC,...) instruction
3169   //
3170   // AArch64:
3171   // 1) Table switch using adr/add/br instructions (fast_version)
3172   // 2) Table switch using adr/ldr/br instructions


3369   }
3370 
3371   assert(vtos == seq++, "vtos has unexpected value");
3372 
3373   __ bind(shouldNotReachHere);
3374   __ should_not_reach_here();
3375 
3376   // itos and atos cases are frequent so it makes sense to move them out of table switch
3377   // atos case can be merged with itos case (and thus moved out of table switch) on 32-bit ARM, fast version only
3378 
3379   __ bind(Lint);
3380   __ access_load_at(T_INT, IN_HEAP, Address(Robj, Roffset), R0_tos, noreg, noreg, noreg);
3381   __ push(itos);
3382   // Rewrite bytecode to be faster
3383   if (!is_static && rc == may_rewrite) {
3384     patch_bytecode(Bytecodes::_fast_igetfield, R0_tmp, Rtemp);
3385   }
3386 
3387   __ bind(Done);
3388 

3389   // Check for volatile field
3390   Label notVolatile;
3391   __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3392 
3393   volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
3394 
3395   __ bind(notVolatile);


3396 }
3397 
3398 void TemplateTable::getfield(int byte_no) {
3399   getfield_or_static(byte_no, false);
3400 }
3401 
3402 void TemplateTable::nofast_getfield(int byte_no) {
3403   getfield_or_static(byte_no, false, may_not_rewrite);
3404 }
3405 
3406 void TemplateTable::getstatic(int byte_no) {
3407   getfield_or_static(byte_no, true);
3408 }
3409 
3410 
3411 // The registers cache and index expected to be set before call, and should not be R1 or Rtemp.
3412 // Blows volatile registers (R0-R3 on 32-bit ARM, R0-R18 on AArch64), Rtemp, LR,
3413 // except cache and index registers which are preserved.
3414 void TemplateTable::jvmti_post_field_mod(Register Rcache, Register Rindex, bool is_static) {
3415   ByteSize cp_base_offset = ConstantPoolCache::base_offset();


3467     // R3: value object on the stack
3468     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification),
3469                R1, R2, R3);
3470     __ get_cache_and_index_at_bcp(Rcache, Rindex, 1);
3471 
3472     __ bind(Lcontinue);
3473   }
3474 }
3475 
3476 
3477 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3478   transition(vtos, vtos);
3479 
3480   const Register Roffset  = R2_tmp;
3481   const Register Robj     = R3_tmp;
3482   const Register Rcache   = R4_tmp;
3483   const Register Rflagsav = Rtmp_save0;  // R4/R19
3484   const Register Rindex   = R5_tmp;
3485   const Register Rflags   = R5_tmp;
3486 


3487   resolve_cache_and_index(byte_no, Rcache, Rindex, sizeof(u2));
3488   jvmti_post_field_mod(Rcache, Rindex, is_static);
3489   load_field_cp_cache_entry(Rcache, Rindex, Roffset, Rflags, Robj, is_static);
3490 

3491   // Check for volatile field
3492   Label notVolatile;
3493   __ mov(Rflagsav, Rflags);
3494   __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3495 
3496   volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::StoreStore | MacroAssembler::LoadStore), Rtemp);
3497 
3498   __ bind(notVolatile);

3499 
3500   Label Done, Lint, shouldNotReachHere;
3501   Label Ltable, Lbtos, Lztos, Lctos, Lstos, Litos, Lltos, Lftos, Ldtos, Latos;
3502 
3503   // compute type
3504   __ logical_shift_right(Rflags, Rflags, ConstantPoolCacheEntry::tos_state_shift);
3505   // Make sure we don't need to mask flags after the above shift
3506   ConstantPoolCacheEntry::verify_tos_state_shift();
3507 
3508   // There are actually two versions of implementation of putfield/putstatic:
3509   //
3510   // 32-bit ARM:
3511   // 1) Table switch using add(PC,...) instruction (fast_version)
3512   // 2) Table switch using ldr(PC,...) instruction
3513   //
3514   // AArch64:
3515   // 1) Table switch using adr/add/br instructions (fast_version)
3516   // 2) Table switch using adr/ldr/br instructions
3517   //
3518   // First version requires fixed size of code block for each case and


3704     if (!is_static && rc == may_rewrite) {
3705       patch_bytecode(Bytecodes::_fast_aputfield, R0_tmp, Rtemp, true, byte_no);
3706     }
3707     __ b(Done);
3708   }
3709 
3710   __ bind(shouldNotReachHere);
3711   __ should_not_reach_here();
3712 
3713   // itos case is frequent and is moved outside table switch
3714   __ bind(Lint);
3715   __ pop(itos);
3716   if (!is_static) pop_and_check_object(Robj);
3717   __ access_store_at(T_INT, IN_HEAP, Address(Robj, Roffset), R0_tos, noreg, noreg, noreg, false);
3718   if (!is_static && rc == may_rewrite) {
3719     patch_bytecode(Bytecodes::_fast_iputfield, R0_tmp, Rtemp, true, byte_no);
3720   }
3721 
3722   __ bind(Done);
3723 
3724   Label notVolatile2;

3725   if (is_static) {
3726     // Just check for volatile. Memory barrier for static final field
3727     // is handled by class initialization.
3728     __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile2);
3729     volatile_barrier(MacroAssembler::StoreLoad, Rtemp);
3730     __ bind(notVolatile2);
3731   } else {
3732     // Check for volatile field and final field
3733     Label skipMembar;
3734 
3735     __ tst(Rflagsav, 1 << ConstantPoolCacheEntry::is_volatile_shift |
3736            1 << ConstantPoolCacheEntry::is_final_shift);
3737     __ b(skipMembar, eq);
3738 
3739     __ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile2);
3740 
3741     // StoreLoad barrier after volatile field write
3742     volatile_barrier(MacroAssembler::StoreLoad, Rtemp);
3743     __ b(skipMembar);
3744 
3745     // StoreStore barrier after final field write
3746     __ bind(notVolatile2);
3747     volatile_barrier(MacroAssembler::StoreStore, Rtemp);
3748 
3749     __ bind(skipMembar);
3750   }


3751 }
3752 
3753 void TemplateTable::putfield(int byte_no) {
3754   putfield_or_static(byte_no, false);
3755 }
3756 
3757 void TemplateTable::nofast_putfield(int byte_no) {
3758   putfield_or_static(byte_no, false, may_not_rewrite);
3759 }
3760 
3761 void TemplateTable::putstatic(int byte_no) {
3762   putfield_or_static(byte_no, true);
3763 }
3764 
3765 
3766 void TemplateTable::jvmti_post_fast_field_mod() {
3767   // This version of jvmti_post_fast_field_mod() is not used on ARM
3768   Unimplemented();
3769 }
3770 


3800     __ pop(state);                // restore value
3801 
3802     __ bind(done);
3803   }
3804 }
3805 
3806 
3807 void TemplateTable::fast_storefield(TosState state) {
3808   transition(state, vtos);
3809 
3810   ByteSize base = ConstantPoolCache::base_offset();
3811 
3812   jvmti_post_fast_field_mod(state);
3813 
3814   const Register Rcache  = R2_tmp;
3815   const Register Rindex  = R3_tmp;
3816   const Register Roffset = R3_tmp;
3817   const Register Rflags  = Rtmp_save0; // R4/R19
3818   const Register Robj    = R5_tmp;
3819 


3820   // access constant pool cache
3821   __ get_cache_and_index_at_bcp(Rcache, Rindex, 1);
3822 
3823   __ add(Rcache, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord));
3824 

3825   // load flags to test volatile
3826   __ ldr_u32(Rflags, Address(Rcache, base + ConstantPoolCacheEntry::flags_offset()));

3827 
3828   // replace index with field offset from cache entry
3829   __ ldr(Roffset, Address(Rcache, base + ConstantPoolCacheEntry::f2_offset()));
3830 

3831   // Check for volatile store
3832   Label notVolatile;
3833   __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3834 
3835   // TODO-AARCH64 on AArch64, store-release instructions can be used to get rid of this explict barrier
3836   volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::StoreStore | MacroAssembler::LoadStore), Rtemp);
3837 
3838   __ bind(notVolatile);

3839 
3840   // Get object from stack
3841   pop_and_check_object(Robj);
3842 
3843   Address addr = Address(Robj, Roffset);
3844   // access field
3845   switch (bytecode()) {
3846     case Bytecodes::_fast_zputfield:
3847       __ access_store_at(T_BOOLEAN, IN_HEAP, addr, R0_tos, noreg, noreg, noreg, false);
3848       break;
3849     case Bytecodes::_fast_bputfield:
3850       __ access_store_at(T_BYTE, IN_HEAP, addr, R0_tos, noreg, noreg, noreg, false);
3851       break;
3852     case Bytecodes::_fast_sputfield:
3853       __ access_store_at(T_SHORT, IN_HEAP, addr, R0_tos, noreg, noreg, noreg, false);
3854       break;
3855     case Bytecodes::_fast_cputfield:
3856       __ access_store_at(T_CHAR, IN_HEAP, addr, R0_tos, noreg, noreg, noreg,false);
3857       break;
3858     case Bytecodes::_fast_iputfield:


3865 #else
3866     case Bytecodes::_fast_lputfield:
3867       __ access_store_at(T_LONG, IN_HEAP, addr, noreg, noreg, noreg, noreg, false);
3868       break;
3869     case Bytecodes::_fast_fputfield:
3870       __ access_store_at(T_FLOAT, IN_HEAP, addr, noreg, noreg, noreg, noreg, false);
3871       break;
3872     case Bytecodes::_fast_dputfield:
3873       __ access_store_at(T_DOUBLE, IN_HEAP, addr, noreg, noreg, noreg, noreg, false);
3874       break;
3875 #endif // AARCH64
3876 
3877     case Bytecodes::_fast_aputfield:
3878       do_oop_store(_masm, addr, R0_tos, Rtemp, R1_tmp, R2_tmp, false);
3879       break;
3880 
3881     default:
3882       ShouldNotReachHere();
3883   }
3884 
3885   Label notVolatile2;

3886   Label skipMembar;
3887   __ tst(Rflags, 1 << ConstantPoolCacheEntry::is_volatile_shift |
3888          1 << ConstantPoolCacheEntry::is_final_shift);
3889   __ b(skipMembar, eq);
3890 
3891   __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile2);
3892 
3893   // StoreLoad barrier after volatile field write
3894   volatile_barrier(MacroAssembler::StoreLoad, Rtemp);
3895   __ b(skipMembar);
3896 
3897   // StoreStore barrier after final field write
3898   __ bind(notVolatile2);
3899   volatile_barrier(MacroAssembler::StoreStore, Rtemp);
3900 
3901   __ bind(skipMembar);

3902 }
3903 

3904 void TemplateTable::fast_accessfield(TosState state) {
3905   transition(atos, state);
3906 
3907   // do the JVMTI work here to avoid disturbing the register state below
3908   if (__ can_post_field_access()) {
3909     // Check to see if a field access watch has been set before we take
3910     // the time to call into the VM.
3911     Label done;
3912     __ ldr_global_s32(R2, (address) JvmtiExport::get_field_access_count_addr());
3913     __ cbz(R2, done);
3914     // access constant pool cache entry
3915     __ get_cache_entry_pointer_at_bcp(R2, R1, 1);
3916     __ push_ptr(R0_tos);  // save object pointer before call_VM() clobbers it
3917     __ verify_oop(R0_tos);
3918     __ mov(R1, R0_tos);
3919     // R1: object pointer copied above
3920     // R2: cache entry pointer
3921     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), R1, R2);
3922     __ pop_ptr(R0_tos);   // restore object pointer
3923 
3924     __ bind(done);
3925   }
3926 
3927   const Register Robj    = R0_tos;
3928   const Register Rcache  = R2_tmp;
3929   const Register Rflags  = R2_tmp;
3930   const Register Rindex  = R3_tmp;
3931   const Register Roffset = R3_tmp;
3932 


3933   // access constant pool cache
3934   __ get_cache_and_index_at_bcp(Rcache, Rindex, 1);
3935   // replace index with field offset from cache entry
3936   __ add(Rtemp, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord));
3937   __ ldr(Roffset, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f2_offset()));
3938 

3939   // load flags to test volatile
3940   __ ldr_u32(Rflags, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset()));

3941 
3942   __ verify_oop(Robj);
3943   __ null_check(Robj, Rtemp);
3944 
3945   Address addr = Address(Robj, Roffset);
3946   // access field
3947   switch (bytecode()) {
3948     case Bytecodes::_fast_bgetfield:
3949       __ access_load_at(T_BYTE, IN_HEAP, addr, R0_tos, noreg, noreg, noreg);
3950       break;
3951     case Bytecodes::_fast_sgetfield:
3952       __ access_load_at(T_SHORT, IN_HEAP, addr, R0_tos, noreg, noreg, noreg);
3953       break;
3954     case Bytecodes::_fast_cgetfield:
3955       __ access_load_at(T_CHAR, IN_HEAP, addr, R0_tos, noreg, noreg, noreg);
3956       break;
3957     case Bytecodes::_fast_igetfield:
3958       __ access_load_at(T_INT, IN_HEAP, addr, R0_tos, noreg, noreg, noreg);
3959       break;
3960 #ifdef AARCH64


3963     case Bytecodes::_fast_dgetfield: __ ldr_d(D0_tos, addr); break;
3964 #else
3965     case Bytecodes::_fast_lgetfield:
3966       __ access_load_at(T_LONG, IN_HEAP, addr, noreg, noreg, noreg, noreg);
3967       break;
3968     case Bytecodes::_fast_fgetfield:
3969       __ access_load_at(T_FLOAT, IN_HEAP, addr, noreg, noreg, noreg, noreg);
3970       break;
3971     case Bytecodes::_fast_dgetfield:
3972       __ access_load_at(T_DOUBLE, IN_HEAP, addr, noreg, noreg, noreg, noreg);
3973       break;
3974 #endif // AARCH64
3975     case Bytecodes::_fast_agetfield:
3976       do_oop_load(_masm, R0_tos, addr);
3977       __ verify_oop(R0_tos);
3978       break;
3979     default:
3980       ShouldNotReachHere();
3981   }
3982 

3983   // Check for volatile load
3984   Label notVolatile;
3985   __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3986 
3987   // TODO-AARCH64 on AArch64, load-acquire instructions can be used to get rid of this explict barrier
3988   volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
3989 
3990   __ bind(notVolatile);

3991 }
3992 
3993 
3994 void TemplateTable::fast_xaccess(TosState state) {
3995   transition(vtos, state);
3996 
3997   const Register Robj = R1_tmp;
3998   const Register Rcache = R2_tmp;
3999   const Register Rindex = R3_tmp;
4000   const Register Roffset = R3_tmp;
4001   const Register Rflags = R4_tmp;
4002   Label done;
4003 
4004   // get receiver
4005   __ ldr(Robj, aaddress(0));
4006 
4007   // access constant pool cache
4008   __ get_cache_and_index_at_bcp(Rcache, Rindex, 2);
4009   __ add(Rtemp, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord));
4010   __ ldr(Roffset, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f2_offset()));
4011 



4012   // load flags to test volatile
4013   __ ldr_u32(Rflags, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset()));

4014 
4015   // make sure exception is reported in correct bcp range (getfield is next instruction)
4016   __ add(Rbcp, Rbcp, 1);
4017   __ null_check(Robj, Rtemp);
4018   __ sub(Rbcp, Rbcp, 1);
4019 
4020 #ifdef AARCH64

4021   Label notVolatile;
4022   __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
4023 
4024   __ add(Rtemp, Robj, Roffset);
4025 
4026   if (state == itos) {
4027     __ ldar_w(R0_tos, Rtemp);
4028   } else if (state == atos) {
4029     if (UseCompressedOops) {
4030       __ ldar_w(R0_tos, Rtemp);
4031       __ decode_heap_oop(R0_tos);
4032     } else {
4033       __ ldar(R0_tos, Rtemp);
4034     }
4035     __ verify_oop(R0_tos);
4036   } else if (state == ftos) {
4037     __ ldar_w(R0_tos, Rtemp);
4038     __ fmov_sw(S0_tos, R0_tos);
4039   } else {
4040     ShouldNotReachHere();
4041   }
4042   __ b(done);
4043 
4044   __ bind(notVolatile);

4045 #endif // AARCH64
4046 
4047   if (state == itos) {
4048     __ access_load_at(T_INT, IN_HEAP, Address(Robj, Roffset), R0_tos, noreg, noreg, noreg);
4049   } else if (state == atos) {
4050     do_oop_load(_masm, R0_tos, Address(Robj, Roffset));
4051     __ verify_oop(R0_tos);
4052   } else if (state == ftos) {
4053 #ifdef AARCH64
4054     __ ldr_s(S0_tos, Address(Robj, Roffset));
4055 #else
4056 #ifdef __SOFTFP__
4057     __ ldr(R0_tos, Address(Robj, Roffset));
4058 #else
4059     __ access_load_at(T_FLOAT, IN_HEAP, Address(Robj, Roffset), noreg /* ftos */, noreg, noreg, noreg);
4060 #endif // __SOFTFP__
4061 #endif // AARCH64
4062   } else {
4063     ShouldNotReachHere();
4064   }
4065 
4066 #ifndef AARCH64

4067   // Check for volatile load
4068   Label notVolatile;
4069   __ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
4070 
4071   volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
4072 
4073   __ bind(notVolatile);

4074 #endif // !AARCH64
4075 
4076   __ bind(done);
4077 }
4078 
4079 
4080 
4081 //----------------------------------------------------------------------------------------------------
4082 // Calls
4083 
4084 void TemplateTable::count_calls(Register method, Register temp) {
4085   // implemented elsewhere
4086   ShouldNotReachHere();
4087 }
4088 
4089 
4090 void TemplateTable::prepare_invoke(int byte_no,
4091                                    Register method,  // linked method (or i-klass)
4092                                    Register index,   // itable index, MethodType, etc.
4093                                    Register recv,    // if caller wants to see it


< prev index next >