< prev index next >

src/hotspot/cpu/x86/templateTable_x86.cpp

Print this page
rev 50390 : 8200623: Primitive heap access for interpreter BarrierSetAssembler/x86


 753   // check index
 754   __ cmpl(index, Address(array, arrayOopDesc::length_offset_in_bytes()));
 755   if (index != rbx) {
 756     // ??? convention: move aberrant index into rbx for exception message
 757     assert(rbx != array, "different registers");
 758     __ movl(rbx, index);
 759   }
 760   Label skip;
 761   __ jccb(Assembler::below, skip);
 762   // Pass array to create more detailed exceptions.
 763   __ mov(NOT_LP64(rax) LP64_ONLY(c_rarg1), array);
 764   __ jump(ExternalAddress(Interpreter::_throw_ArrayIndexOutOfBoundsException_entry));
 765   __ bind(skip);
 766 }
 767 
 768 void TemplateTable::iaload() {
 769   transition(itos, itos);
 770   // rax: index
 771   // rdx: array
 772   index_check(rdx, rax); // kills rbx
 773   __ movl(rax, Address(rdx, rax,
 774                        Address::times_4,
 775                        arrayOopDesc::base_offset_in_bytes(T_INT)));

 776 }
 777 
 778 void TemplateTable::laload() {
 779   transition(itos, ltos);
 780   // rax: index
 781   // rdx: array
 782   index_check(rdx, rax); // kills rbx
 783   NOT_LP64(__ mov(rbx, rax));
 784   // rbx,: index
 785   __ movptr(rax, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 0 * wordSize));
 786   NOT_LP64(__ movl(rdx, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 1 * wordSize)));


 787 }
 788 
 789 
 790 
 791 void TemplateTable::faload() {
 792   transition(itos, ftos);
 793   // rax: index
 794   // rdx: array
 795   index_check(rdx, rax); // kills rbx
 796   __ load_float(Address(rdx, rax,

 797                         Address::times_4,
 798                         arrayOopDesc::base_offset_in_bytes(T_FLOAT)));

 799 }
 800 
 801 void TemplateTable::daload() {
 802   transition(itos, dtos);
 803   // rax: index
 804   // rdx: array
 805   index_check(rdx, rax); // kills rbx
 806   __ load_double(Address(rdx, rax,

 807                          Address::times_8,
 808                          arrayOopDesc::base_offset_in_bytes(T_DOUBLE)));

 809 }
 810 
 811 void TemplateTable::aaload() {
 812   transition(itos, atos);
 813   // rax: index
 814   // rdx: array
 815   index_check(rdx, rax); // kills rbx
 816   do_oop_load(_masm,
 817               Address(rdx, rax,
 818                       UseCompressedOops ? Address::times_4 : Address::times_ptr,
 819                       arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
 820               rax,
 821               IN_HEAP_ARRAY);
 822 }
 823 
 824 void TemplateTable::baload() {
 825   transition(itos, itos);
 826   // rax: index
 827   // rdx: array
 828   index_check(rdx, rax); // kills rbx
 829   __ load_signed_byte(rax, Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)));


 830 }
 831 
 832 void TemplateTable::caload() {
 833   transition(itos, itos);
 834   // rax: index
 835   // rdx: array
 836   index_check(rdx, rax); // kills rbx
 837   __ load_unsigned_short(rax, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)));


 838 }
 839 
 840 // iload followed by caload frequent pair
 841 void TemplateTable::fast_icaload() {
 842   transition(vtos, itos);
 843   // load index out of locals
 844   locals_index(rbx);
 845   __ movl(rax, iaddress(rbx));
 846 
 847   // rax: index
 848   // rdx: array
 849   index_check(rdx, rax); // kills rbx
 850   __ load_unsigned_short(rax,
 851                          Address(rdx, rax,
 852                                  Address::times_2,
 853                                  arrayOopDesc::base_offset_in_bytes(T_CHAR)));
 854 }
 855 
 856 
 857 void TemplateTable::saload() {
 858   transition(itos, itos);
 859   // rax: index
 860   // rdx: array
 861   index_check(rdx, rax); // kills rbx
 862   __ load_signed_short(rax, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_SHORT)));


 863 }
 864 
 865 void TemplateTable::iload(int n) {
 866   transition(vtos, itos);
 867   __ movl(rax, iaddress(n));
 868 }
 869 
 870 void TemplateTable::lload(int n) {
 871   transition(vtos, ltos);
 872   __ movptr(rax, laddress(n));
 873   NOT_LP64(__ movptr(rdx, haddress(n)));
 874 }
 875 
 876 void TemplateTable::fload(int n) {
 877   transition(vtos, ftos);
 878   __ load_float(faddress(n));
 879 }
 880 
 881 void TemplateTable::dload(int n) {
 882   transition(vtos, dtos);


1034   __ movdbl(daddress(rbx), xmm0);
1035 #else
1036   wide_lstore();
1037 #endif
1038 }
1039 
1040 void TemplateTable::wide_astore() {
1041   transition(vtos, vtos);
1042   __ pop_ptr(rax);
1043   locals_index_wide(rbx);
1044   __ movptr(aaddress(rbx), rax);
1045 }
1046 
1047 void TemplateTable::iastore() {
1048   transition(itos, vtos);
1049   __ pop_i(rbx);
1050   // rax: value
1051   // rbx: index
1052   // rdx: array
1053   index_check(rdx, rbx); // prefer index in rbx
1054   __ movl(Address(rdx, rbx,
1055                   Address::times_4,
1056                   arrayOopDesc::base_offset_in_bytes(T_INT)),
1057           rax);
1058 }
1059 
1060 void TemplateTable::lastore() {
1061   transition(ltos, vtos);
1062   __ pop_i(rbx);
1063   // rax,: low(value)
1064   // rcx: array
1065   // rdx: high(value)
1066   index_check(rcx, rbx);  // prefer index in rbx,
1067   // rbx,: index
1068   __ movptr(Address(rcx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 0 * wordSize), rax);
1069   NOT_LP64(__ movl(Address(rcx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 1 * wordSize), rdx));


1070 }
1071 
1072 
1073 void TemplateTable::fastore() {
1074   transition(ftos, vtos);
1075   __ pop_i(rbx);
1076   // value is in UseSSE >= 1 ? xmm0 : ST(0)
1077   // rbx:  index
1078   // rdx:  array
1079   index_check(rdx, rbx); // prefer index in rbx
1080   __ store_float(Address(rdx, rbx, Address::times_4, arrayOopDesc::base_offset_in_bytes(T_FLOAT)));



1081 }
1082 
1083 void TemplateTable::dastore() {
1084   transition(dtos, vtos);
1085   __ pop_i(rbx);
1086   // value is in UseSSE >= 2 ? xmm0 : ST(0)
1087   // rbx:  index
1088   // rdx:  array
1089   index_check(rdx, rbx); // prefer index in rbx
1090   __ store_double(Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_DOUBLE)));



1091 }
1092 
1093 void TemplateTable::aastore() {
1094   Label is_null, ok_is_subtype, done;
1095   transition(vtos, vtos);
1096   // stack: ..., array, index, value
1097   __ movptr(rax, at_tos());    // value
1098   __ movl(rcx, at_tos_p1()); // index
1099   __ movptr(rdx, at_tos_p2()); // array
1100 
1101   Address element_address(rdx, rcx,
1102                           UseCompressedOops? Address::times_4 : Address::times_ptr,
1103                           arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1104 
1105   index_check_without_pop(rdx, rcx);     // kills rbx
1106   __ testptr(rax, rax);
1107   __ jcc(Assembler::zero, is_null);
1108 
1109   // Move subklass into rbx
1110   __ load_klass(rbx, rax);


1143   __ addptr(rsp, 3 * Interpreter::stackElementSize);
1144 }
1145 
1146 void TemplateTable::bastore() {
1147   transition(itos, vtos);
1148   __ pop_i(rbx);
1149   // rax: value
1150   // rbx: index
1151   // rdx: array
1152   index_check(rdx, rbx); // prefer index in rbx
1153   // Need to check whether array is boolean or byte
1154   // since both types share the bastore bytecode.
1155   __ load_klass(rcx, rdx);
1156   __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1157   int diffbit = Klass::layout_helper_boolean_diffbit();
1158   __ testl(rcx, diffbit);
1159   Label L_skip;
1160   __ jccb(Assembler::zero, L_skip);
1161   __ andl(rax, 1);  // if it is a T_BOOLEAN array, mask the stored value to 0/1
1162   __ bind(L_skip);
1163   __ movb(Address(rdx, rbx,
1164                   Address::times_1,
1165                   arrayOopDesc::base_offset_in_bytes(T_BYTE)),
1166           rax);
1167 }
1168 
1169 void TemplateTable::castore() {
1170   transition(itos, vtos);
1171   __ pop_i(rbx);
1172   // rax: value
1173   // rbx: index
1174   // rdx: array
1175   index_check(rdx, rbx);  // prefer index in rbx
1176   __ movw(Address(rdx, rbx,
1177                   Address::times_2,
1178                   arrayOopDesc::base_offset_in_bytes(T_CHAR)),
1179           rax);
1180 }
1181 
1182 
1183 void TemplateTable::sastore() {
1184   castore();
1185 }
1186 
1187 void TemplateTable::istore(int n) {
1188   transition(itos, vtos);
1189   __ movl(iaddress(n), rax);
1190 }
1191 
1192 void TemplateTable::lstore(int n) {
1193   transition(ltos, vtos);
1194   __ movptr(laddress(n), rax);
1195   NOT_LP64(__ movptr(haddress(n), rdx));
1196 }
1197 
1198 void TemplateTable::fstore(int n) {
1199   transition(ftos, vtos);


2835   __ verify_oop(r);
2836 }
2837 
2838 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2839   transition(vtos, vtos);
2840 
2841   const Register cache = rcx;
2842   const Register index = rdx;
2843   const Register obj   = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
2844   const Register off   = rbx;
2845   const Register flags = rax;
2846   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // uses same reg as obj, so don't mix them
2847 
2848   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2849   jvmti_post_field_access(cache, index, is_static, false);
2850   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2851 
2852   if (!is_static) pop_and_check_object(obj);
2853 
2854   const Address field(obj, off, Address::times_1, 0*wordSize);
2855   NOT_LP64(const Address hi(obj, off, Address::times_1, 1*wordSize));
2856 
2857   Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj, notDouble;
2858 
2859   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
2860   // Make sure we don't need to mask edx after the above shift
2861   assert(btos == 0, "change code, btos != 0");
2862 
2863   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
2864 
2865   __ jcc(Assembler::notZero, notByte);
2866   // btos
2867   __ load_signed_byte(rax, field);
2868   __ push(btos);
2869   // Rewrite bytecode to be faster
2870   if (!is_static && rc == may_rewrite) {
2871     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2872   }
2873   __ jmp(Done);
2874 
2875   __ bind(notByte);
2876   __ cmpl(flags, ztos);
2877   __ jcc(Assembler::notEqual, notBool);
2878 
2879   // ztos (same code as btos)
2880   __ load_signed_byte(rax, field);
2881   __ push(ztos);
2882   // Rewrite bytecode to be faster
2883   if (!is_static && rc == may_rewrite) {
2884     // use btos rewriting, no truncating to t/f bit is needed for getfield.
2885     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2886   }
2887   __ jmp(Done);
2888 
2889   __ bind(notBool);
2890   __ cmpl(flags, atos);
2891   __ jcc(Assembler::notEqual, notObj);
2892   // atos
2893   do_oop_load(_masm, field, rax);
2894   __ push(atos);
2895   if (!is_static && rc == may_rewrite) {
2896     patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
2897   }
2898   __ jmp(Done);
2899 
2900   __ bind(notObj);
2901   __ cmpl(flags, itos);
2902   __ jcc(Assembler::notEqual, notInt);
2903   // itos
2904   __ movl(rax, field);
2905   __ push(itos);
2906   // Rewrite bytecode to be faster
2907   if (!is_static && rc == may_rewrite) {
2908     patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
2909   }
2910   __ jmp(Done);
2911 
2912   __ bind(notInt);
2913   __ cmpl(flags, ctos);
2914   __ jcc(Assembler::notEqual, notChar);
2915   // ctos
2916   __ load_unsigned_short(rax, field);
2917   __ push(ctos);
2918   // Rewrite bytecode to be faster
2919   if (!is_static && rc == may_rewrite) {
2920     patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);
2921   }
2922   __ jmp(Done);
2923 
2924   __ bind(notChar);
2925   __ cmpl(flags, stos);
2926   __ jcc(Assembler::notEqual, notShort);
2927   // stos
2928   __ load_signed_short(rax, field);
2929   __ push(stos);
2930   // Rewrite bytecode to be faster
2931   if (!is_static && rc == may_rewrite) {
2932     patch_bytecode(Bytecodes::_fast_sgetfield, bc, rbx);
2933   }
2934   __ jmp(Done);
2935 
2936   __ bind(notShort);
2937   __ cmpl(flags, ltos);
2938   __ jcc(Assembler::notEqual, notLong);
2939   // ltos
2940 
2941 #ifndef _LP64
2942   // Generate code as if volatile.  There just aren't enough registers to
2943   // save that information and this code is faster than the test.
2944   __ fild_d(field);                // Must load atomically
2945   __ subptr(rsp,2*wordSize);    // Make space for store
2946   __ fistp_d(Address(rsp,0));
2947   __ pop(rax);
2948   __ pop(rdx);
2949 #else
2950   __ movq(rax, field);
2951 #endif
2952 
2953   __ push(ltos);
2954   // Rewrite bytecode to be faster
2955   LP64_ONLY(if (!is_static && rc == may_rewrite) patch_bytecode(Bytecodes::_fast_lgetfield, bc, rbx));
2956   __ jmp(Done);
2957 
2958   __ bind(notLong);
2959   __ cmpl(flags, ftos);
2960   __ jcc(Assembler::notEqual, notFloat);
2961   // ftos
2962 
2963   __ load_float(field);
2964   __ push(ftos);
2965   // Rewrite bytecode to be faster
2966   if (!is_static && rc == may_rewrite) {
2967     patch_bytecode(Bytecodes::_fast_fgetfield, bc, rbx);
2968   }
2969   __ jmp(Done);
2970 
2971   __ bind(notFloat);
2972 #ifdef ASSERT
2973   __ cmpl(flags, dtos);
2974   __ jcc(Assembler::notEqual, notDouble);
2975 #endif
2976   // dtos
2977   __ load_double(field);
2978   __ push(dtos);
2979   // Rewrite bytecode to be faster
2980   if (!is_static && rc == may_rewrite) {
2981     patch_bytecode(Bytecodes::_fast_dgetfield, bc, rbx);
2982   }
2983 #ifdef ASSERT
2984   __ jmp(Done);
2985 
2986 
2987   __ bind(notDouble);
2988   __ stop("Bad state");
2989 #endif
2990 
2991   __ bind(Done);
2992   // [jk] not needed currently
2993   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
2994   //                                              Assembler::LoadStore));
2995 }
2996 
2997 void TemplateTable::getfield(int byte_no) {


3116   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3117   __ andl(rdx, 0x1);
3118 
3119   // field addresses
3120   const Address field(obj, off, Address::times_1, 0*wordSize);
3121   NOT_LP64( const Address hi(obj, off, Address::times_1, 1*wordSize);)
3122 
3123   Label notByte, notBool, notInt, notShort, notChar,
3124         notLong, notFloat, notObj, notDouble;
3125 
3126   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
3127 
3128   assert(btos == 0, "change code, btos != 0");
3129   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
3130   __ jcc(Assembler::notZero, notByte);
3131 
3132   // btos
3133   {
3134     __ pop(btos);
3135     if (!is_static) pop_and_check_object(obj);
3136     __ movb(field, rax);
3137     if (!is_static && rc == may_rewrite) {
3138       patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
3139     }
3140     __ jmp(Done);
3141   }
3142 
3143   __ bind(notByte);
3144   __ cmpl(flags, ztos);
3145   __ jcc(Assembler::notEqual, notBool);
3146 
3147   // ztos
3148   {
3149     __ pop(ztos);
3150     if (!is_static) pop_and_check_object(obj);
3151     __ andl(rax, 0x1);
3152     __ movb(field, rax);
3153     if (!is_static && rc == may_rewrite) {
3154       patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
3155     }
3156     __ jmp(Done);
3157   }
3158 
3159   __ bind(notBool);
3160   __ cmpl(flags, atos);
3161   __ jcc(Assembler::notEqual, notObj);
3162 
3163   // atos
3164   {
3165     __ pop(atos);
3166     if (!is_static) pop_and_check_object(obj);
3167     // Store into the field
3168     do_oop_store(_masm, field, rax);
3169     if (!is_static && rc == may_rewrite) {
3170       patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3171     }
3172     __ jmp(Done);
3173   }
3174 
3175   __ bind(notObj);
3176   __ cmpl(flags, itos);
3177   __ jcc(Assembler::notEqual, notInt);
3178 
3179   // itos
3180   {
3181     __ pop(itos);
3182     if (!is_static) pop_and_check_object(obj);
3183     __ movl(field, rax);
3184     if (!is_static && rc == may_rewrite) {
3185       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3186     }
3187     __ jmp(Done);
3188   }
3189 
3190   __ bind(notInt);
3191   __ cmpl(flags, ctos);
3192   __ jcc(Assembler::notEqual, notChar);
3193 
3194   // ctos
3195   {
3196     __ pop(ctos);
3197     if (!is_static) pop_and_check_object(obj);
3198     __ movw(field, rax);
3199     if (!is_static && rc == may_rewrite) {
3200       patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx, true, byte_no);
3201     }
3202     __ jmp(Done);
3203   }
3204 
3205   __ bind(notChar);
3206   __ cmpl(flags, stos);
3207   __ jcc(Assembler::notEqual, notShort);
3208 
3209   // stos
3210   {
3211     __ pop(stos);
3212     if (!is_static) pop_and_check_object(obj);
3213     __ movw(field, rax);
3214     if (!is_static && rc == may_rewrite) {
3215       patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx, true, byte_no);
3216     }
3217     __ jmp(Done);
3218   }
3219 
3220   __ bind(notShort);
3221   __ cmpl(flags, ltos);
3222   __ jcc(Assembler::notEqual, notLong);
3223 
3224   // ltos
3225 #ifdef _LP64
3226   {
3227     __ pop(ltos);
3228     if (!is_static) pop_and_check_object(obj);
3229     __ movq(field, rax);
3230     if (!is_static && rc == may_rewrite) {
3231       patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no);
3232     }
3233     __ jmp(Done);
3234   }
3235 #else
3236   {
3237     Label notVolatileLong;
3238     __ testl(rdx, rdx);
3239     __ jcc(Assembler::zero, notVolatileLong);
3240 
3241     __ pop(ltos);  // overwrites rdx, do this after testing volatile.
3242     if (!is_static) pop_and_check_object(obj);
3243 
3244     // Replace with real volatile test
3245     __ push(rdx);
3246     __ push(rax);                 // Must update atomically with FIST
3247     __ fild_d(Address(rsp,0));    // So load into FPU register
3248     __ fistp_d(field);            // and put into memory atomically
3249     __ addptr(rsp, 2*wordSize);
3250     // volatile_barrier();
3251     volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3252                                                  Assembler::StoreStore));
3253     // Don't rewrite volatile version
3254     __ jmp(notVolatile);
3255 
3256     __ bind(notVolatileLong);
3257 
3258     __ pop(ltos);  // overwrites rdx
3259     if (!is_static) pop_and_check_object(obj);
3260     __ movptr(hi, rdx);
3261     __ movptr(field, rax);
3262     // Don't rewrite to _fast_lputfield for potential volatile case.
3263     __ jmp(notVolatile);
3264   }
3265 #endif // _LP64
3266 
3267   __ bind(notLong);
3268   __ cmpl(flags, ftos);
3269   __ jcc(Assembler::notEqual, notFloat);
3270 
3271   // ftos
3272   {
3273     __ pop(ftos);
3274     if (!is_static) pop_and_check_object(obj);
3275     __ store_float(field);
3276     if (!is_static && rc == may_rewrite) {
3277       patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no);
3278     }
3279     __ jmp(Done);
3280   }
3281 
3282   __ bind(notFloat);
3283 #ifdef ASSERT
3284   __ cmpl(flags, dtos);
3285   __ jcc(Assembler::notEqual, notDouble);
3286 #endif
3287 
3288   // dtos
3289   {
3290     __ pop(dtos);
3291     if (!is_static) pop_and_check_object(obj);
3292     __ store_double(field);
3293     if (!is_static && rc == may_rewrite) {
3294       patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no);
3295     }
3296   }
3297 
3298 #ifdef ASSERT
3299   __ jmp(Done);
3300 
3301   __ bind(notDouble);
3302   __ stop("Bad state");
3303 #endif
3304 
3305   __ bind(Done);
3306 
3307   // Check for volatile store
3308   __ testl(rdx, rdx);
3309   __ jcc(Assembler::zero, notVolatile);
3310   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3311                                                Assembler::StoreStore));
3312   __ bind(notVolatile);


3405   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3406   //                                              Assembler::StoreStore));
3407 
3408   Label notVolatile;
3409   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3410   __ andl(rdx, 0x1);
3411 
3412   // Get object from stack
3413   pop_and_check_object(rcx);
3414 
3415   // field address
3416   const Address field(rcx, rbx, Address::times_1);
3417 
3418   // access field
3419   switch (bytecode()) {
3420   case Bytecodes::_fast_aputfield:
3421     do_oop_store(_masm, field, rax);
3422     break;
3423   case Bytecodes::_fast_lputfield:
3424 #ifdef _LP64
3425   __ movq(field, rax);
3426 #else
3427   __ stop("should not be rewritten");
3428 #endif
3429     break;
3430   case Bytecodes::_fast_iputfield:
3431     __ movl(field, rax);
3432     break;
3433   case Bytecodes::_fast_zputfield:
3434     __ andl(rax, 0x1);  // boolean is true if LSB is 1
3435     // fall through to bputfield
3436   case Bytecodes::_fast_bputfield:
3437     __ movb(field, rax);
3438     break;
3439   case Bytecodes::_fast_sputfield:
3440     // fall through

3441   case Bytecodes::_fast_cputfield:
3442     __ movw(field, rax);
3443     break;
3444   case Bytecodes::_fast_fputfield:
3445     __ store_float(field);
3446     break;
3447   case Bytecodes::_fast_dputfield:
3448     __ store_double(field);
3449     break;
3450   default:
3451     ShouldNotReachHere();
3452   }
3453 
3454   // Check for volatile store
3455   __ testl(rdx, rdx);
3456   __ jcc(Assembler::zero, notVolatile);
3457   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3458                                                Assembler::StoreStore));
3459   __ bind(notVolatile);
3460 }
3461 
3462 void TemplateTable::fast_accessfield(TosState state) {
3463   transition(atos, state);
3464 
3465   // Do the JVMTI work here to avoid disturbing the register state below
3466   if (JvmtiExport::can_post_field_access()) {
3467     // Check to see if a field access watch has been set before we
3468     // take the time to call into the VM.


3495   //   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3496   //   __ andl(rdx, 0x1);
3497   // }
3498   __ movptr(rbx, Address(rcx, rbx, Address::times_ptr,
3499                          in_bytes(ConstantPoolCache::base_offset() +
3500                                   ConstantPoolCacheEntry::f2_offset())));
3501 
3502   // rax: object
3503   __ verify_oop(rax);
3504   __ null_check(rax);
3505   Address field(rax, rbx, Address::times_1);
3506 
3507   // access field
3508   switch (bytecode()) {
3509   case Bytecodes::_fast_agetfield:
3510     do_oop_load(_masm, field, rax);
3511     __ verify_oop(rax);
3512     break;
3513   case Bytecodes::_fast_lgetfield:
3514 #ifdef _LP64
3515   __ movq(rax, field);
3516 #else
3517   __ stop("should not be rewritten");
3518 #endif
3519     break;
3520   case Bytecodes::_fast_igetfield:
3521     __ movl(rax, field);
3522     break;
3523   case Bytecodes::_fast_bgetfield:
3524     __ movsbl(rax, field);
3525     break;
3526   case Bytecodes::_fast_sgetfield:
3527     __ load_signed_short(rax, field);
3528     break;
3529   case Bytecodes::_fast_cgetfield:
3530     __ load_unsigned_short(rax, field);
3531     break;
3532   case Bytecodes::_fast_fgetfield:
3533     __ load_float(field);
3534     break;
3535   case Bytecodes::_fast_dgetfield:
3536     __ load_double(field);
3537     break;
3538   default:
3539     ShouldNotReachHere();
3540   }
3541   // [jk] not needed currently
3542   // if (os::is_MP()) {
3543   //   Label notVolatile;
3544   //   __ testl(rdx, rdx);
3545   //   __ jcc(Assembler::zero, notVolatile);
3546   //   __ membar(Assembler::LoadLoad);
3547   //   __ bind(notVolatile);
3548   //};
3549 }
3550 
3551 void TemplateTable::fast_xaccess(TosState state) {
3552   transition(vtos, state);
3553 
3554   // get receiver
3555   __ movptr(rax, aaddress(0));
3556   // access constant pool cache
3557   __ get_cache_and_index_at_bcp(rcx, rdx, 2);
3558   __ movptr(rbx,
3559             Address(rcx, rdx, Address::times_ptr,
3560                     in_bytes(ConstantPoolCache::base_offset() +
3561                              ConstantPoolCacheEntry::f2_offset())));
3562   // make sure exception is reported in correct bcp range (getfield is
3563   // next instruction)
3564   __ increment(rbcp);
3565   __ null_check(rax);
3566   const Address field = Address(rax, rbx, Address::times_1, 0*wordSize);
3567   switch (state) {
3568   case itos:
3569     __ movl(rax, field);
3570     break;
3571   case atos:
3572     do_oop_load(_masm, field, rax);
3573     __ verify_oop(rax);
3574     break;
3575   case ftos:
3576     __ load_float(field);
3577     break;
3578   default:
3579     ShouldNotReachHere();
3580   }
3581 
3582   // [jk] not needed currently
3583   // if (os::is_MP()) {
3584   //   Label notVolatile;
3585   //   __ movl(rdx, Address(rcx, rdx, Address::times_8,
3586   //                        in_bytes(ConstantPoolCache::base_offset() +
3587   //                                 ConstantPoolCacheEntry::flags_offset())));
3588   //   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3589   //   __ testl(rdx, 0x1);
3590   //   __ jcc(Assembler::zero, notVolatile);
3591   //   __ membar(Assembler::LoadLoad);
3592   //   __ bind(notVolatile);
3593   // }
3594 
3595   __ decrement(rbcp);
3596 }




 753   // check index
 754   __ cmpl(index, Address(array, arrayOopDesc::length_offset_in_bytes()));
 755   if (index != rbx) {
 756     // ??? convention: move aberrant index into rbx for exception message
 757     assert(rbx != array, "different registers");
 758     __ movl(rbx, index);
 759   }
 760   Label skip;
 761   __ jccb(Assembler::below, skip);
 762   // Pass array to create more detailed exceptions.
 763   __ mov(NOT_LP64(rax) LP64_ONLY(c_rarg1), array);
 764   __ jump(ExternalAddress(Interpreter::_throw_ArrayIndexOutOfBoundsException_entry));
 765   __ bind(skip);
 766 }
 767 
 768 void TemplateTable::iaload() {
 769   transition(itos, itos);
 770   // rax: index
 771   // rdx: array
 772   index_check(rdx, rax); // kills rbx
 773   __ access_load_at(T_INT, IN_HEAP | IN_HEAP_ARRAY, rax,
 774                     Address(rdx, rax, Address::times_4,
 775                             arrayOopDesc::base_offset_in_bytes(T_INT)),
 776                     noreg, noreg);
 777 }
 778 
 779 void TemplateTable::laload() {
 780   transition(itos, ltos);
 781   // rax: index
 782   // rdx: array
 783   index_check(rdx, rax); // kills rbx
 784   NOT_LP64(__ mov(rbx, rax));
 785   // rbx,: index
 786   __ access_load_at(T_LONG, IN_HEAP | IN_HEAP_ARRAY, noreg /* ltos */,
 787                     Address(rdx, rbx, Address::times_8,
 788                             arrayOopDesc::base_offset_in_bytes(T_LONG)),
 789                     noreg, noreg);
 790 }
 791 
 792 
 793 
 794 void TemplateTable::faload() {
 795   transition(itos, ftos);
 796   // rax: index
 797   // rdx: array
 798   index_check(rdx, rax); // kills rbx
 799   __ access_load_at(T_FLOAT, IN_HEAP | IN_HEAP_ARRAY, noreg /* ftos */,
 800                     Address(rdx, rax,
 801                             Address::times_4,
 802                             arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
 803                     noreg, noreg);
 804 }
 805 
 806 void TemplateTable::daload() {
 807   transition(itos, dtos);
 808   // rax: index
 809   // rdx: array
 810   index_check(rdx, rax); // kills rbx
 811   __ access_load_at(T_DOUBLE, IN_HEAP | IN_HEAP_ARRAY, noreg /* dtos */,
 812                     Address(rdx, rax,
 813                             Address::times_8,
 814                             arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
 815                     noreg, noreg);
 816 }
 817 
 818 void TemplateTable::aaload() {
 819   transition(itos, atos);
 820   // rax: index
 821   // rdx: array
 822   index_check(rdx, rax); // kills rbx
 823   do_oop_load(_masm,
 824               Address(rdx, rax,
 825                       UseCompressedOops ? Address::times_4 : Address::times_ptr,
 826                       arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
 827               rax,
 828               IN_HEAP_ARRAY);
 829 }
 830 
 831 void TemplateTable::baload() {
 832   transition(itos, itos);
 833   // rax: index
 834   // rdx: array
 835   index_check(rdx, rax); // kills rbx
 836   __ access_load_at(T_BYTE, IN_HEAP | IN_HEAP_ARRAY, rax,
 837                     Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
 838                     noreg, noreg);
 839 }
 840 
 841 void TemplateTable::caload() {
 842   transition(itos, itos);
 843   // rax: index
 844   // rdx: array
 845   index_check(rdx, rax); // kills rbx
 846   __ access_load_at(T_CHAR, IN_HEAP | IN_HEAP_ARRAY, rax,
 847                     Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
 848                     noreg, noreg);
 849 }
 850 
 851 // iload followed by caload frequent pair
 852 void TemplateTable::fast_icaload() {
 853   transition(vtos, itos);
 854   // load index out of locals
 855   locals_index(rbx);
 856   __ movl(rax, iaddress(rbx));
 857 
 858   // rax: index
 859   // rdx: array
 860   index_check(rdx, rax); // kills rbx
 861   __ access_load_at(T_CHAR, IN_HEAP | IN_HEAP_ARRAY, rax,
 862                     Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
 863                     noreg, noreg);

 864 }
 865 
 866 
 867 void TemplateTable::saload() {
 868   transition(itos, itos);
 869   // rax: index
 870   // rdx: array
 871   index_check(rdx, rax); // kills rbx
 872   __ access_load_at(T_SHORT, IN_HEAP | IN_HEAP_ARRAY, rax,
 873                     Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_SHORT)),
 874                     noreg, noreg);
 875 }
 876 
 877 void TemplateTable::iload(int n) {
 878   transition(vtos, itos);
 879   __ movl(rax, iaddress(n));
 880 }
 881 
 882 void TemplateTable::lload(int n) {
 883   transition(vtos, ltos);
 884   __ movptr(rax, laddress(n));
 885   NOT_LP64(__ movptr(rdx, haddress(n)));
 886 }
 887 
 888 void TemplateTable::fload(int n) {
 889   transition(vtos, ftos);
 890   __ load_float(faddress(n));
 891 }
 892 
 893 void TemplateTable::dload(int n) {
 894   transition(vtos, dtos);


1046   __ movdbl(daddress(rbx), xmm0);
1047 #else
1048   wide_lstore();
1049 #endif
1050 }
1051 
1052 void TemplateTable::wide_astore() {
1053   transition(vtos, vtos);
1054   __ pop_ptr(rax);
1055   locals_index_wide(rbx);
1056   __ movptr(aaddress(rbx), rax);
1057 }
1058 
1059 void TemplateTable::iastore() {
1060   transition(itos, vtos);
1061   __ pop_i(rbx);
1062   // rax: value
1063   // rbx: index
1064   // rdx: array
1065   index_check(rdx, rbx); // prefer index in rbx
1066   __ access_store_at(T_INT, IN_HEAP | IN_HEAP_ARRAY,
1067                      Address(rdx, rbx, Address::times_4,
1068                              arrayOopDesc::base_offset_in_bytes(T_INT)),
1069                      rax, noreg, noreg);
1070 }
1071 
1072 void TemplateTable::lastore() {
1073   transition(ltos, vtos);
1074   __ pop_i(rbx);
1075   // rax,: low(value)
1076   // rcx: array
1077   // rdx: high(value)
1078   index_check(rcx, rbx);  // prefer index in rbx,
1079   // rbx,: index
1080   __ access_store_at(T_LONG, IN_HEAP | IN_HEAP_ARRAY,
1081                      Address(rcx, rbx, Address::times_8,
1082                              arrayOopDesc::base_offset_in_bytes(T_LONG)),
1083                      noreg /* ltos */, noreg, noreg);
1084 }
1085 
1086 
1087 void TemplateTable::fastore() {
1088   transition(ftos, vtos);
1089   __ pop_i(rbx);
1090   // value is in UseSSE >= 1 ? xmm0 : ST(0)
1091   // rbx:  index
1092   // rdx:  array
1093   index_check(rdx, rbx); // prefer index in rbx
1094   __ access_store_at(T_FLOAT, IN_HEAP | IN_HEAP_ARRAY,
1095                      Address(rdx, rbx, Address::times_4,
1096                              arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1097                      noreg /* ftos */, noreg, noreg);
1098 }
1099 
1100 void TemplateTable::dastore() {
1101   transition(dtos, vtos);
1102   __ pop_i(rbx);
1103   // value is in UseSSE >= 2 ? xmm0 : ST(0)
1104   // rbx:  index
1105   // rdx:  array
1106   index_check(rdx, rbx); // prefer index in rbx
1107   __ access_store_at(T_DOUBLE, IN_HEAP | IN_HEAP_ARRAY,
1108                      Address(rdx, rbx, Address::times_8,
1109                              arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1110                      noreg /* dtos */, noreg, noreg);
1111 }
1112 
1113 void TemplateTable::aastore() {
1114   Label is_null, ok_is_subtype, done;
1115   transition(vtos, vtos);
1116   // stack: ..., array, index, value
1117   __ movptr(rax, at_tos());    // value
1118   __ movl(rcx, at_tos_p1()); // index
1119   __ movptr(rdx, at_tos_p2()); // array
1120 
1121   Address element_address(rdx, rcx,
1122                           UseCompressedOops? Address::times_4 : Address::times_ptr,
1123                           arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1124 
1125   index_check_without_pop(rdx, rcx);     // kills rbx
1126   __ testptr(rax, rax);
1127   __ jcc(Assembler::zero, is_null);
1128 
1129   // Move subklass into rbx
1130   __ load_klass(rbx, rax);


1163   __ addptr(rsp, 3 * Interpreter::stackElementSize);
1164 }
1165 
1166 void TemplateTable::bastore() {
1167   transition(itos, vtos);
1168   __ pop_i(rbx);
1169   // rax: value
1170   // rbx: index
1171   // rdx: array
1172   index_check(rdx, rbx); // prefer index in rbx
1173   // Need to check whether array is boolean or byte
1174   // since both types share the bastore bytecode.
1175   __ load_klass(rcx, rdx);
1176   __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1177   int diffbit = Klass::layout_helper_boolean_diffbit();
1178   __ testl(rcx, diffbit);
1179   Label L_skip;
1180   __ jccb(Assembler::zero, L_skip);
1181   __ andl(rax, 1);  // if it is a T_BOOLEAN array, mask the stored value to 0/1
1182   __ bind(L_skip);
1183   __ access_store_at(T_BYTE, IN_HEAP | IN_HEAP_ARRAY,
1184                      Address(rdx, rbx,Address::times_1,
1185                              arrayOopDesc::base_offset_in_bytes(T_BYTE)),
1186                      rax, noreg, noreg);
1187 }
1188 
1189 void TemplateTable::castore() {
1190   transition(itos, vtos);
1191   __ pop_i(rbx);
1192   // rax: value
1193   // rbx: index
1194   // rdx: array
1195   index_check(rdx, rbx);  // prefer index in rbx
1196   __ access_store_at(T_CHAR, IN_HEAP | IN_HEAP_ARRAY,
1197                      Address(rdx, rbx, Address::times_2,
1198                              arrayOopDesc::base_offset_in_bytes(T_CHAR)),
1199                      rax, noreg, noreg);
1200 }
1201 
1202 
1203 void TemplateTable::sastore() {
1204   castore();
1205 }
1206 
1207 void TemplateTable::istore(int n) {
1208   transition(itos, vtos);
1209   __ movl(iaddress(n), rax);
1210 }
1211 
1212 void TemplateTable::lstore(int n) {
1213   transition(ltos, vtos);
1214   __ movptr(laddress(n), rax);
1215   NOT_LP64(__ movptr(haddress(n), rdx));
1216 }
1217 
1218 void TemplateTable::fstore(int n) {
1219   transition(ftos, vtos);


2855   __ verify_oop(r);
2856 }
2857 
2858 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2859   transition(vtos, vtos);
2860 
2861   const Register cache = rcx;
2862   const Register index = rdx;
2863   const Register obj   = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
2864   const Register off   = rbx;
2865   const Register flags = rax;
2866   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // uses same reg as obj, so don't mix them
2867 
2868   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2869   jvmti_post_field_access(cache, index, is_static, false);
2870   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2871 
2872   if (!is_static) pop_and_check_object(obj);
2873 
2874   const Address field(obj, off, Address::times_1, 0*wordSize);

2875 
2876   Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj, notDouble;
2877 
2878   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
2879   // Make sure we don't need to mask edx after the above shift
2880   assert(btos == 0, "change code, btos != 0");
2881 
2882   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
2883 
2884   __ jcc(Assembler::notZero, notByte);
2885   // btos
2886   __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
2887   __ push(btos);
2888   // Rewrite bytecode to be faster
2889   if (!is_static && rc == may_rewrite) {
2890     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2891   }
2892   __ jmp(Done);
2893 
2894   __ bind(notByte);
2895   __ cmpl(flags, ztos);
2896   __ jcc(Assembler::notEqual, notBool);
2897 
2898   // ztos (same code as btos)
2899   __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg, noreg);
2900   __ push(ztos);
2901   // Rewrite bytecode to be faster
2902   if (!is_static && rc == may_rewrite) {
2903     // use btos rewriting, no truncating to t/f bit is needed for getfield.
2904     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2905   }
2906   __ jmp(Done);
2907 
2908   __ bind(notBool);
2909   __ cmpl(flags, atos);
2910   __ jcc(Assembler::notEqual, notObj);
2911   // atos
2912   do_oop_load(_masm, field, rax);
2913   __ push(atos);
2914   if (!is_static && rc == may_rewrite) {
2915     patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
2916   }
2917   __ jmp(Done);
2918 
2919   __ bind(notObj);
2920   __ cmpl(flags, itos);
2921   __ jcc(Assembler::notEqual, notInt);
2922   // itos
2923   __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
2924   __ push(itos);
2925   // Rewrite bytecode to be faster
2926   if (!is_static && rc == may_rewrite) {
2927     patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
2928   }
2929   __ jmp(Done);
2930 
2931   __ bind(notInt);
2932   __ cmpl(flags, ctos);
2933   __ jcc(Assembler::notEqual, notChar);
2934   // ctos
2935   __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
2936   __ push(ctos);
2937   // Rewrite bytecode to be faster
2938   if (!is_static && rc == may_rewrite) {
2939     patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);
2940   }
2941   __ jmp(Done);
2942 
2943   __ bind(notChar);
2944   __ cmpl(flags, stos);
2945   __ jcc(Assembler::notEqual, notShort);
2946   // stos
2947   __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
2948   __ push(stos);
2949   // Rewrite bytecode to be faster
2950   if (!is_static && rc == may_rewrite) {
2951     patch_bytecode(Bytecodes::_fast_sgetfield, bc, rbx);
2952   }
2953   __ jmp(Done);
2954 
2955   __ bind(notShort);
2956   __ cmpl(flags, ltos);
2957   __ jcc(Assembler::notEqual, notLong);
2958   // ltos
2959     // Generate code as if volatile (x86_32).  There just aren't enough registers to


2960     // save that information and this code is faster than the test.
2961   __ access_load_at(T_LONG, IN_HEAP | MO_RELAXED, noreg /* ltos */, field, noreg, noreg);








2962   __ push(ltos);
2963   // Rewrite bytecode to be faster
2964   LP64_ONLY(if (!is_static && rc == may_rewrite) patch_bytecode(Bytecodes::_fast_lgetfield, bc, rbx));
2965   __ jmp(Done);
2966 
2967   __ bind(notLong);
2968   __ cmpl(flags, ftos);
2969   __ jcc(Assembler::notEqual, notFloat);
2970   // ftos
2971 
2972   __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
2973   __ push(ftos);
2974   // Rewrite bytecode to be faster
2975   if (!is_static && rc == may_rewrite) {
2976     patch_bytecode(Bytecodes::_fast_fgetfield, bc, rbx);
2977   }
2978   __ jmp(Done);
2979 
2980   __ bind(notFloat);
2981 #ifdef ASSERT
2982   __ cmpl(flags, dtos);
2983   __ jcc(Assembler::notEqual, notDouble);
2984 #endif
2985   // dtos
2986   __ access_load_at(T_DOUBLE, IN_HEAP, noreg /* dtos */, field, noreg, noreg);
2987   __ push(dtos);
2988   // Rewrite bytecode to be faster
2989   if (!is_static && rc == may_rewrite) {
2990     patch_bytecode(Bytecodes::_fast_dgetfield, bc, rbx);
2991   }
2992 #ifdef ASSERT
2993   __ jmp(Done);
2994 
2995 
2996   __ bind(notDouble);
2997   __ stop("Bad state");
2998 #endif
2999 
3000   __ bind(Done);
3001   // [jk] not needed currently
3002   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
3003   //                                              Assembler::LoadStore));
3004 }
3005 
3006 void TemplateTable::getfield(int byte_no) {


3125   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3126   __ andl(rdx, 0x1);
3127 
3128   // field addresses
3129   const Address field(obj, off, Address::times_1, 0*wordSize);
3130   NOT_LP64( const Address hi(obj, off, Address::times_1, 1*wordSize);)
3131 
3132   Label notByte, notBool, notInt, notShort, notChar,
3133         notLong, notFloat, notObj, notDouble;
3134 
3135   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
3136 
3137   assert(btos == 0, "change code, btos != 0");
3138   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
3139   __ jcc(Assembler::notZero, notByte);
3140 
3141   // btos
3142   {
3143     __ pop(btos);
3144     if (!is_static) pop_and_check_object(obj);
3145     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg);
3146     if (!is_static && rc == may_rewrite) {
3147       patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
3148     }
3149     __ jmp(Done);
3150   }
3151 
3152   __ bind(notByte);
3153   __ cmpl(flags, ztos);
3154   __ jcc(Assembler::notEqual, notBool);
3155 
3156   // ztos
3157   {
3158     __ pop(ztos);
3159     if (!is_static) pop_and_check_object(obj);
3160     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg);

3161     if (!is_static && rc == may_rewrite) {
3162       patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
3163     }
3164     __ jmp(Done);
3165   }
3166 
3167   __ bind(notBool);
3168   __ cmpl(flags, atos);
3169   __ jcc(Assembler::notEqual, notObj);
3170 
3171   // atos
3172   {
3173     __ pop(atos);
3174     if (!is_static) pop_and_check_object(obj);
3175     // Store into the field
3176     do_oop_store(_masm, field, rax);
3177     if (!is_static && rc == may_rewrite) {
3178       patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3179     }
3180     __ jmp(Done);
3181   }
3182 
3183   __ bind(notObj);
3184   __ cmpl(flags, itos);
3185   __ jcc(Assembler::notEqual, notInt);
3186 
3187   // itos
3188   {
3189     __ pop(itos);
3190     if (!is_static) pop_and_check_object(obj);
3191     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg);
3192     if (!is_static && rc == may_rewrite) {
3193       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3194     }
3195     __ jmp(Done);
3196   }
3197 
3198   __ bind(notInt);
3199   __ cmpl(flags, ctos);
3200   __ jcc(Assembler::notEqual, notChar);
3201 
3202   // ctos
3203   {
3204     __ pop(ctos);
3205     if (!is_static) pop_and_check_object(obj);
3206     __ access_store_at(T_CHAR, IN_HEAP, field, rax, noreg, noreg);
3207     if (!is_static && rc == may_rewrite) {
3208       patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx, true, byte_no);
3209     }
3210     __ jmp(Done);
3211   }
3212 
3213   __ bind(notChar);
3214   __ cmpl(flags, stos);
3215   __ jcc(Assembler::notEqual, notShort);
3216 
3217   // stos
3218   {
3219     __ pop(stos);
3220     if (!is_static) pop_and_check_object(obj);
3221     __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg);
3222     if (!is_static && rc == may_rewrite) {
3223       patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx, true, byte_no);
3224     }
3225     __ jmp(Done);
3226   }
3227 
3228   __ bind(notShort);
3229   __ cmpl(flags, ltos);
3230   __ jcc(Assembler::notEqual, notLong);
3231 
3232   // ltos
3233 #ifdef _LP64
3234   {
3235     __ pop(ltos);
3236     if (!is_static) pop_and_check_object(obj);
3237     __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos*/, noreg, noreg);
3238     if (!is_static && rc == may_rewrite) {
3239       patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no);
3240     }
3241     __ jmp(Done);
3242   }
3243 #else
3244   {
3245     Label notVolatileLong;
3246     __ testl(rdx, rdx);
3247     __ jcc(Assembler::zero, notVolatileLong);
3248 
3249     __ pop(ltos);  // overwrites rdx, do this after testing volatile.
3250     if (!is_static) pop_and_check_object(obj);
3251 
3252     // Replace with real volatile test
3253     __ access_store_at(T_LONG, IN_HEAP | MO_RELAXED, field, noreg /* ltos */, noreg, noreg);




3254     // volatile_barrier();
3255     volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3256                                                  Assembler::StoreStore));
3257     // Don't rewrite volatile version
3258     __ jmp(notVolatile);
3259 
3260     __ bind(notVolatileLong);
3261 
3262     __ pop(ltos);  // overwrites rdx
3263     if (!is_static) pop_and_check_object(obj);
3264     __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg);

3265     // Don't rewrite to _fast_lputfield for potential volatile case.
3266     __ jmp(notVolatile);
3267   }
3268 #endif // _LP64
3269 
3270   __ bind(notLong);
3271   __ cmpl(flags, ftos);
3272   __ jcc(Assembler::notEqual, notFloat);
3273 
3274   // ftos
3275   {
3276     __ pop(ftos);
3277     if (!is_static) pop_and_check_object(obj);
3278     __ access_store_at(T_FLOAT, IN_HEAP, field, noreg /* ftos */, noreg, noreg);
3279     if (!is_static && rc == may_rewrite) {
3280       patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no);
3281     }
3282     __ jmp(Done);
3283   }
3284 
3285   __ bind(notFloat);
3286 #ifdef ASSERT
3287   __ cmpl(flags, dtos);
3288   __ jcc(Assembler::notEqual, notDouble);
3289 #endif
3290 
3291   // dtos
3292   {
3293     __ pop(dtos);
3294     if (!is_static) pop_and_check_object(obj);
3295     __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* dtos */, noreg, noreg);
3296     if (!is_static && rc == may_rewrite) {
3297       patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no);
3298     }
3299   }
3300 
3301 #ifdef ASSERT
3302   __ jmp(Done);
3303 
3304   __ bind(notDouble);
3305   __ stop("Bad state");
3306 #endif
3307 
3308   __ bind(Done);
3309 
3310   // Check for volatile store
3311   __ testl(rdx, rdx);
3312   __ jcc(Assembler::zero, notVolatile);
3313   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3314                                                Assembler::StoreStore));
3315   __ bind(notVolatile);


3408   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3409   //                                              Assembler::StoreStore));
3410 
3411   Label notVolatile;
3412   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3413   __ andl(rdx, 0x1);
3414 
3415   // Get object from stack
3416   pop_and_check_object(rcx);
3417 
3418   // field address
3419   const Address field(rcx, rbx, Address::times_1);
3420 
3421   // access field
3422   switch (bytecode()) {
3423   case Bytecodes::_fast_aputfield:
3424     do_oop_store(_masm, field, rax);
3425     break;
3426   case Bytecodes::_fast_lputfield:
3427 #ifdef _LP64
3428     __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg);
3429 #else
3430   __ stop("should not be rewritten");
3431 #endif
3432     break;
3433   case Bytecodes::_fast_iputfield:
3434     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg);
3435     break;
3436   case Bytecodes::_fast_zputfield:
3437     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg);
3438     break;
3439   case Bytecodes::_fast_bputfield:
3440     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg);
3441     break;
3442   case Bytecodes::_fast_sputfield:
3443     __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg);
3444     break;
3445   case Bytecodes::_fast_cputfield:
3446     __ access_store_at(T_CHAR, IN_HEAP, field, rax, noreg, noreg);
3447     break;
3448   case Bytecodes::_fast_fputfield:
3449     __ access_store_at(T_FLOAT, IN_HEAP, field, noreg /* ftos*/, noreg, noreg);
3450     break;
3451   case Bytecodes::_fast_dputfield:
3452     __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* dtos*/, noreg, noreg);
3453     break;
3454   default:
3455     ShouldNotReachHere();
3456   }
3457 
3458   // Check for volatile store
3459   __ testl(rdx, rdx);
3460   __ jcc(Assembler::zero, notVolatile);
3461   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3462                                                Assembler::StoreStore));
3463   __ bind(notVolatile);
3464 }
3465 
3466 void TemplateTable::fast_accessfield(TosState state) {
3467   transition(atos, state);
3468 
3469   // Do the JVMTI work here to avoid disturbing the register state below
3470   if (JvmtiExport::can_post_field_access()) {
3471     // Check to see if a field access watch has been set before we
3472     // take the time to call into the VM.


3499   //   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3500   //   __ andl(rdx, 0x1);
3501   // }
3502   __ movptr(rbx, Address(rcx, rbx, Address::times_ptr,
3503                          in_bytes(ConstantPoolCache::base_offset() +
3504                                   ConstantPoolCacheEntry::f2_offset())));
3505 
3506   // rax: object
3507   __ verify_oop(rax);
3508   __ null_check(rax);
3509   Address field(rax, rbx, Address::times_1);
3510 
3511   // access field
3512   switch (bytecode()) {
3513   case Bytecodes::_fast_agetfield:
3514     do_oop_load(_masm, field, rax);
3515     __ verify_oop(rax);
3516     break;
3517   case Bytecodes::_fast_lgetfield:
3518 #ifdef _LP64
3519     __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg);
3520 #else
3521   __ stop("should not be rewritten");
3522 #endif
3523     break;
3524   case Bytecodes::_fast_igetfield:
3525     __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3526     break;
3527   case Bytecodes::_fast_bgetfield:
3528     __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3529     break;
3530   case Bytecodes::_fast_sgetfield:
3531     __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
3532     break;
3533   case Bytecodes::_fast_cgetfield:
3534     __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
3535     break;
3536   case Bytecodes::_fast_fgetfield:
3537     __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
3538     break;
3539   case Bytecodes::_fast_dgetfield:
3540     __ access_load_at(T_DOUBLE, IN_HEAP, noreg /* dtos */, field, noreg, noreg);
3541     break;
3542   default:
3543     ShouldNotReachHere();
3544   }
3545   // [jk] not needed currently
3546   // if (os::is_MP()) {
3547   //   Label notVolatile;
3548   //   __ testl(rdx, rdx);
3549   //   __ jcc(Assembler::zero, notVolatile);
3550   //   __ membar(Assembler::LoadLoad);
3551   //   __ bind(notVolatile);
3552   //};
3553 }
3554 
3555 void TemplateTable::fast_xaccess(TosState state) {
3556   transition(vtos, state);
3557 
3558   // get receiver
3559   __ movptr(rax, aaddress(0));
3560   // access constant pool cache
3561   __ get_cache_and_index_at_bcp(rcx, rdx, 2);
3562   __ movptr(rbx,
3563             Address(rcx, rdx, Address::times_ptr,
3564                     in_bytes(ConstantPoolCache::base_offset() +
3565                              ConstantPoolCacheEntry::f2_offset())));
3566   // make sure exception is reported in correct bcp range (getfield is
3567   // next instruction)
3568   __ increment(rbcp);
3569   __ null_check(rax);
3570   const Address field = Address(rax, rbx, Address::times_1, 0*wordSize);
3571   switch (state) {
3572   case itos:
3573     __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3574     break;
3575   case atos:
3576     do_oop_load(_masm, field, rax);
3577     __ verify_oop(rax);
3578     break;
3579   case ftos:
3580     __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
3581     break;
3582   default:
3583     ShouldNotReachHere();
3584   }
3585 
3586   // [jk] not needed currently
3587   // if (os::is_MP()) {
3588   //   Label notVolatile;
3589   //   __ movl(rdx, Address(rcx, rdx, Address::times_8,
3590   //                        in_bytes(ConstantPoolCache::base_offset() +
3591   //                                 ConstantPoolCacheEntry::flags_offset())));
3592   //   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3593   //   __ testl(rdx, 0x1);
3594   //   __ jcc(Assembler::zero, notVolatile);
3595   //   __ membar(Assembler::LoadLoad);
3596   //   __ bind(notVolatile);
3597   // }
3598 
3599   __ decrement(rbcp);
3600 }


< prev index next >