< prev index next >

src/hotspot/cpu/x86/templateTable_x86.cpp

Print this page
rev 50081 : [mq]: primitives2.patch


 750   __ null_check(array, arrayOopDesc::length_offset_in_bytes());
 751   // sign extend index for use by indexed load
 752   __ movl2ptr(index, index);
 753   // check index
 754   __ cmpl(index, Address(array, arrayOopDesc::length_offset_in_bytes()));
 755   if (index != rbx) {
 756     // ??? convention: move aberrant index into rbx for exception message
 757     assert(rbx != array, "different registers");
 758     __ movl(rbx, index);
 759   }
 760   __ jump_cc(Assembler::aboveEqual,
 761              ExternalAddress(Interpreter::_throw_ArrayIndexOutOfBoundsException_entry));
 762 }
 763 
 764 
 765 void TemplateTable::iaload() {
 766   transition(itos, itos);
 767   // rax: index
 768   // rdx: array
 769   index_check(rdx, rax); // kills rbx
 770   __ resolve_for_read(OOP_NOT_NULL, rdx);
 771   __ movl(rax, Address(rdx, rax,
 772                        Address::times_4,
 773                        arrayOopDesc::base_offset_in_bytes(T_INT)));

 774 }
 775 
 776 void TemplateTable::laload() {
 777   transition(itos, ltos);
 778   // rax: index
 779   // rdx: array
 780   index_check(rdx, rax); // kills rbx
 781   NOT_LP64(__ mov(rbx, rax));
 782   // rbx,: index
 783   __ resolve_for_read(OOP_NOT_NULL, rdx);
 784   __ movptr(rax, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 0 * wordSize));
 785   NOT_LP64(__ movl(rdx, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 1 * wordSize)));

 786 }
 787 
 788 
 789 
 790 void TemplateTable::faload() {
 791   transition(itos, ftos);
 792   // rax: index
 793   // rdx: array
 794   index_check(rdx, rax); // kills rbx
 795   __ resolve_for_read(OOP_NOT_NULL, rdx);
 796   __ load_float(Address(rdx, rax,
 797                         Address::times_4,
 798                         arrayOopDesc::base_offset_in_bytes(T_FLOAT)));

 799 }
 800 
 801 void TemplateTable::daload() {
 802   transition(itos, dtos);
 803   // rax: index
 804   // rdx: array
 805   index_check(rdx, rax); // kills rbx
 806   __ resolve_for_read(OOP_NOT_NULL, rdx);
 807   __ load_double(Address(rdx, rax,
 808                          Address::times_8,
 809                          arrayOopDesc::base_offset_in_bytes(T_DOUBLE)));

 810 }
 811 
 812 void TemplateTable::aaload() {
 813   transition(itos, atos);
 814   // rax: index
 815   // rdx: array
 816   index_check(rdx, rax); // kills rbx
 817   __ resolve_for_read(OOP_NOT_NULL, rdx);
 818   do_oop_load(_masm,
 819               Address(rdx, rax,
 820                       UseCompressedOops ? Address::times_4 : Address::times_ptr,
 821                       arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
 822               rax,
 823               IN_HEAP_ARRAY);
 824 }
 825 
 826 void TemplateTable::baload() {
 827   transition(itos, itos);
 828   // rax: index
 829   // rdx: array
 830   index_check(rdx, rax); // kills rbx
 831   __ resolve_for_read(OOP_NOT_NULL, rdx);
 832   __ load_signed_byte(rax, Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)));

 833 }
 834 
 835 void TemplateTable::caload() {
 836   transition(itos, itos);
 837   // rax: index
 838   // rdx: array
 839   index_check(rdx, rax); // kills rbx
 840   __ resolve_for_read(OOP_NOT_NULL, rdx);
 841   __ load_unsigned_short(rax, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)));

 842 }
 843 
 844 // iload followed by caload frequent pair
 845 void TemplateTable::fast_icaload() {
 846   transition(vtos, itos);
 847   // load index out of locals
 848   locals_index(rbx);
 849   __ movl(rax, iaddress(rbx));
 850 
 851   // rax: index
 852   // rdx: array
 853   index_check(rdx, rax); // kills rbx
 854   __ resolve_for_read(OOP_NOT_NULL, rdx);
 855   __ load_unsigned_short(rax,
 856                          Address(rdx, rax,
 857                                  Address::times_2,
 858                                  arrayOopDesc::base_offset_in_bytes(T_CHAR)));
 859 }
 860 
 861 
 862 void TemplateTable::saload() {
 863   transition(itos, itos);
 864   // rax: index
 865   // rdx: array
 866   index_check(rdx, rax); // kills rbx
 867   __ resolve_for_read(OOP_NOT_NULL, rdx);
 868   __ load_signed_short(rax, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_SHORT)));

 869 }
 870 
 871 void TemplateTable::iload(int n) {
 872   transition(vtos, itos);
 873   __ movl(rax, iaddress(n));
 874 }
 875 
 876 void TemplateTable::lload(int n) {
 877   transition(vtos, ltos);
 878   __ movptr(rax, laddress(n));
 879   NOT_LP64(__ movptr(rdx, haddress(n)));
 880 }
 881 
 882 void TemplateTable::fload(int n) {
 883   transition(vtos, ftos);
 884   __ load_float(faddress(n));
 885 }
 886 
 887 void TemplateTable::dload(int n) {
 888   transition(vtos, dtos);


1040   __ movdbl(daddress(rbx), xmm0);
1041 #else
1042   wide_lstore();
1043 #endif
1044 }
1045 
1046 void TemplateTable::wide_astore() {
1047   transition(vtos, vtos);
1048   __ pop_ptr(rax);
1049   locals_index_wide(rbx);
1050   __ movptr(aaddress(rbx), rax);
1051 }
1052 
1053 void TemplateTable::iastore() {
1054   transition(itos, vtos);
1055   __ pop_i(rbx);
1056   // rax: value
1057   // rbx: index
1058   // rdx: array
1059   index_check(rdx, rbx); // prefer index in rbx
1060   __ resolve_for_write(OOP_NOT_NULL, rdx);
1061   __ movl(Address(rdx, rbx,
1062                   Address::times_4,
1063                   arrayOopDesc::base_offset_in_bytes(T_INT)),
1064           rax);
1065 }
1066 
1067 void TemplateTable::lastore() {
1068   transition(ltos, vtos);
1069   __ pop_i(rbx);
1070   // rax,: low(value)
1071   // rcx: array
1072   // rdx: high(value)
1073   index_check(rcx, rbx);  // prefer index in rbx,
1074   // rbx,: index
1075   __ resolve_for_write(OOP_NOT_NULL, rcx);
1076   __ movptr(Address(rcx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 0 * wordSize), rax);
1077   NOT_LP64(__ movl(Address(rcx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 1 * wordSize), rdx));
1078 }
1079 
1080 
1081 void TemplateTable::fastore() {
1082   transition(ftos, vtos);
1083   __ pop_i(rbx);
1084   // value is in UseSSE >= 1 ? xmm0 : ST(0)
1085   // rbx:  index
1086   // rdx:  array
1087   index_check(rdx, rbx); // prefer index in rbx
1088   __ resolve_for_write(OOP_NOT_NULL, rdx);
1089   __ store_float(Address(rdx, rbx, Address::times_4, arrayOopDesc::base_offset_in_bytes(T_FLOAT)));
1090 }
1091 
1092 void TemplateTable::dastore() {
1093   transition(dtos, vtos);
1094   __ pop_i(rbx);
1095   // value is in UseSSE >= 2 ? xmm0 : ST(0)
1096   // rbx:  index
1097   // rdx:  array
1098   index_check(rdx, rbx); // prefer index in rbx
1099   __ resolve_for_write(OOP_NOT_NULL, rdx);
1100   __ store_double(Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_DOUBLE)));
1101 }
1102 
1103 void TemplateTable::aastore() {
1104   Label is_null, ok_is_subtype, done;
1105   transition(vtos, vtos);
1106   // stack: ..., array, index, value
1107   __ movptr(rax, at_tos());    // value
1108   __ movl(rcx, at_tos_p1()); // index
1109   __ movptr(rdx, at_tos_p2()); // array
1110 
1111   Address element_address(rdx, rcx,
1112                           UseCompressedOops? Address::times_4 : Address::times_ptr,
1113                           arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1114 
1115   index_check_without_pop(rdx, rcx);     // kills rbx
1116   __ resolve_for_write(OOP_NOT_NULL, rdx);
1117   __ testptr(rax, rax);
1118   __ jcc(Assembler::zero, is_null);
1119 
1120   // Move subklass into rbx
1121   __ load_klass(rbx, rax);
1122   // Move superklass into rax
1123   __ load_klass(rax, rdx);
1124   __ movptr(rax, Address(rax,
1125                          ObjArrayKlass::element_klass_offset()));
1126   // Compress array + index*oopSize + 12 into a single register.  Frees rcx.
1127   __ lea(rdx, element_address);
1128 
1129   // Generate subtype check.  Blows rcx, rdi
1130   // Superklass in rax.  Subklass in rbx.
1131   __ gen_subtype_check(rbx, ok_is_subtype);
1132 
1133   // Come here on failure
1134   // object is at TOS
1135   __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
1136 
1137   // Come here on success
1138   __ bind(ok_is_subtype);
1139 
1140   // Get the value we will store
1141   __ movptr(rax, at_tos());

1142   // Now store using the appropriate barrier
1143   do_oop_store(_masm, Address(rdx, 0), rax, IN_HEAP_ARRAY);
1144   __ jmp(done);
1145 
1146   // Have a NULL in rax, rdx=array, ecx=index.  Store NULL at ary[idx]
1147   __ bind(is_null);
1148   __ profile_null_seen(rbx);
1149 
1150   // Store a NULL
1151   do_oop_store(_masm, element_address, noreg, IN_HEAP_ARRAY);
1152 
1153   // Pop stack arguments
1154   __ bind(done);
1155   __ addptr(rsp, 3 * Interpreter::stackElementSize);
1156 }
1157 
1158 void TemplateTable::bastore() {
1159   transition(itos, vtos);
1160   __ pop_i(rbx);
1161   // rax: value
1162   // rbx: index
1163   // rdx: array
1164   index_check(rdx, rbx); // prefer index in rbx
1165   __ resolve_for_write(OOP_NOT_NULL, rdx);
1166   // Need to check whether array is boolean or byte
1167   // since both types share the bastore bytecode.
1168   __ load_klass(rcx, rdx);
1169   __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1170   int diffbit = Klass::layout_helper_boolean_diffbit();
1171   __ testl(rcx, diffbit);
1172   Label L_skip;
1173   __ jccb(Assembler::zero, L_skip);
1174   __ andl(rax, 1);  // if it is a T_BOOLEAN array, mask the stored value to 0/1
1175   __ bind(L_skip);
1176   __ movb(Address(rdx, rbx,
1177                   Address::times_1,
1178                   arrayOopDesc::base_offset_in_bytes(T_BYTE)),
1179           rax);
1180 }
1181 
1182 void TemplateTable::castore() {
1183   transition(itos, vtos);
1184   __ pop_i(rbx);
1185   // rax: value
1186   // rbx: index
1187   // rdx: array
1188   index_check(rdx, rbx);  // prefer index in rbx
1189   __ resolve_for_write(OOP_NOT_NULL, rdx);
1190   __ movw(Address(rdx, rbx,
1191                   Address::times_2,
1192                   arrayOopDesc::base_offset_in_bytes(T_CHAR)),
1193           rax);
1194 }
1195 
1196 
1197 void TemplateTable::sastore() {
1198   castore();
1199 }
1200 
1201 void TemplateTable::istore(int n) {
1202   transition(itos, vtos);
1203   __ movl(iaddress(n), rax);
1204 }
1205 
1206 void TemplateTable::lstore(int n) {
1207   transition(ltos, vtos);
1208   __ movptr(laddress(n), rax);
1209   NOT_LP64(__ movptr(haddress(n), rdx));
1210 }
1211 
1212 void TemplateTable::fstore(int n) {
1213   transition(ftos, vtos);


2847   __ pop_ptr(r);
2848   __ null_check(r);  // for field access must check obj.
2849   __ verify_oop(r);
2850 }
2851 
2852 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2853   transition(vtos, vtos);
2854 
2855   const Register cache = rcx;
2856   const Register index = rdx;
2857   const Register obj   = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
2858   const Register off   = rbx;
2859   const Register flags = rax;
2860   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // uses same reg as obj, so don't mix them
2861 
2862   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2863   jvmti_post_field_access(cache, index, is_static, false);
2864   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2865 
2866   if (!is_static) pop_and_check_object(obj);
2867   __ resolve_for_read(OOP_NOT_NULL, obj);
2868 
2869   const Address field(obj, off, Address::times_1, 0*wordSize);
2870   NOT_LP64(const Address hi(obj, off, Address::times_1, 1*wordSize));
2871 
2872   Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj, notDouble;
2873 
2874   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
2875   // Make sure we don't need to mask edx after the above shift
2876   assert(btos == 0, "change code, btos != 0");
2877 
2878   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
2879 
2880   __ jcc(Assembler::notZero, notByte);
2881   // btos
2882   __ load_signed_byte(rax, field);
2883   __ push(btos);
2884   // Rewrite bytecode to be faster
2885   if (!is_static && rc == may_rewrite) {
2886     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2887   }
2888   __ jmp(Done);
2889 
2890   __ bind(notByte);
2891   __ cmpl(flags, ztos);
2892   __ jcc(Assembler::notEqual, notBool);
2893 
2894   // ztos (same code as btos)
2895   __ load_signed_byte(rax, field);
2896   __ push(ztos);
2897   // Rewrite bytecode to be faster
2898   if (!is_static && rc == may_rewrite) {
2899     // use btos rewriting, no truncating to t/f bit is needed for getfield.
2900     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2901   }
2902   __ jmp(Done);
2903 
2904   __ bind(notBool);
2905   __ cmpl(flags, atos);
2906   __ jcc(Assembler::notEqual, notObj);
2907   // atos
2908   do_oop_load(_masm, field, rax);
2909   __ push(atos);
2910   if (!is_static && rc == may_rewrite) {
2911     patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
2912   }
2913   __ jmp(Done);
2914 
2915   __ bind(notObj);
2916   __ cmpl(flags, itos);
2917   __ jcc(Assembler::notEqual, notInt);
2918   // itos
2919   __ movl(rax, field);
2920   __ push(itos);
2921   // Rewrite bytecode to be faster
2922   if (!is_static && rc == may_rewrite) {
2923     patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
2924   }
2925   __ jmp(Done);
2926 
2927   __ bind(notInt);
2928   __ cmpl(flags, ctos);
2929   __ jcc(Assembler::notEqual, notChar);
2930   // ctos
2931   __ load_unsigned_short(rax, field);
2932   __ push(ctos);
2933   // Rewrite bytecode to be faster
2934   if (!is_static && rc == may_rewrite) {
2935     patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);
2936   }
2937   __ jmp(Done);
2938 
2939   __ bind(notChar);
2940   __ cmpl(flags, stos);
2941   __ jcc(Assembler::notEqual, notShort);
2942   // stos
2943   __ load_signed_short(rax, field);
2944   __ push(stos);
2945   // Rewrite bytecode to be faster
2946   if (!is_static && rc == may_rewrite) {
2947     patch_bytecode(Bytecodes::_fast_sgetfield, bc, rbx);
2948   }
2949   __ jmp(Done);
2950 
2951   __ bind(notShort);
2952   __ cmpl(flags, ltos);
2953   __ jcc(Assembler::notEqual, notLong);
2954   // ltos
2955 
2956 #ifndef _LP64
2957   // Generate code as if volatile.  There just aren't enough registers to
2958   // save that information and this code is faster than the test.
2959   __ fild_d(field);                // Must load atomically
2960   __ subptr(rsp,2*wordSize);    // Make space for store
2961   __ fistp_d(Address(rsp,0));
2962   __ pop(rax);
2963   __ pop(rdx);
2964 #else
2965   __ movq(rax, field);
2966 #endif
2967 
2968   __ push(ltos);

2969   // Rewrite bytecode to be faster
2970   LP64_ONLY(if (!is_static && rc == may_rewrite) patch_bytecode(Bytecodes::_fast_lgetfield, bc, rbx));
2971   __ jmp(Done);
2972 
2973   __ bind(notLong);
2974   __ cmpl(flags, ftos);
2975   __ jcc(Assembler::notEqual, notFloat);
2976   // ftos
2977 

2978   __ load_float(field);
2979   __ push(ftos);
2980   // Rewrite bytecode to be faster
2981   if (!is_static && rc == may_rewrite) {
2982     patch_bytecode(Bytecodes::_fast_fgetfield, bc, rbx);
2983   }
2984   __ jmp(Done);
2985 
2986   __ bind(notFloat);
2987 #ifdef ASSERT
2988   __ cmpl(flags, dtos);
2989   __ jcc(Assembler::notEqual, notDouble);
2990 #endif
2991   // dtos
2992   __ load_double(field);
2993   __ push(dtos);
2994   // Rewrite bytecode to be faster
2995   if (!is_static && rc == may_rewrite) {
2996     patch_bytecode(Bytecodes::_fast_dgetfield, bc, rbx);
2997   }
2998 #ifdef ASSERT
2999   __ jmp(Done);
3000 
3001 
3002   __ bind(notDouble);
3003   __ stop("Bad state");
3004 #endif
3005 
3006   __ bind(Done);
3007   // [jk] not needed currently
3008   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
3009   //                                              Assembler::LoadStore));
3010 }
3011 
3012 void TemplateTable::getfield(int byte_no) {


3131   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3132   __ andl(rdx, 0x1);
3133 
3134   // field addresses
3135   const Address field(obj, off, Address::times_1, 0*wordSize);
3136   NOT_LP64( const Address hi(obj, off, Address::times_1, 1*wordSize);)
3137 
3138   Label notByte, notBool, notInt, notShort, notChar,
3139         notLong, notFloat, notObj, notDouble;
3140 
3141   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
3142 
3143   assert(btos == 0, "change code, btos != 0");
3144   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
3145   __ jcc(Assembler::notZero, notByte);
3146 
3147   // btos
3148   {
3149     __ pop(btos);
3150     if (!is_static) pop_and_check_object(obj);
3151     __ resolve_for_write(OOP_NOT_NULL, obj);
3152     __ movb(field, rax);
3153     if (!is_static && rc == may_rewrite) {
3154       patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
3155     }
3156     __ jmp(Done);
3157   }
3158 
3159   __ bind(notByte);
3160   __ cmpl(flags, ztos);
3161   __ jcc(Assembler::notEqual, notBool);
3162 
3163   // ztos
3164   {
3165     __ pop(ztos);
3166     if (!is_static) pop_and_check_object(obj);
3167     __ resolve_for_write(OOP_NOT_NULL, obj);
3168     __ andl(rax, 0x1);
3169     __ movb(field, rax);
3170     if (!is_static && rc == may_rewrite) {
3171       patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
3172     }
3173     __ jmp(Done);
3174   }
3175 
3176   __ bind(notBool);
3177   __ cmpl(flags, atos);
3178   __ jcc(Assembler::notEqual, notObj);
3179 
3180   // atos
3181   {
3182     __ pop(atos);
3183     if (!is_static) pop_and_check_object(obj);
3184     __ resolve_for_write(OOP_NOT_NULL, obj);
3185     // Store into the field
3186     do_oop_store(_masm, field, rax);
3187     if (!is_static && rc == may_rewrite) {
3188       patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3189     }
3190     __ jmp(Done);
3191   }
3192 
3193   __ bind(notObj);
3194   __ cmpl(flags, itos);
3195   __ jcc(Assembler::notEqual, notInt);
3196 
3197   // itos
3198   {
3199     __ pop(itos);
3200     if (!is_static) pop_and_check_object(obj);
3201     __ resolve_for_write(OOP_NOT_NULL, obj);
3202     __ movl(field, rax);
3203     if (!is_static && rc == may_rewrite) {
3204       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3205     }
3206     __ jmp(Done);
3207   }
3208 
3209   __ bind(notInt);
3210   __ cmpl(flags, ctos);
3211   __ jcc(Assembler::notEqual, notChar);
3212 
3213   // ctos
3214   {
3215     __ pop(ctos);
3216     if (!is_static) pop_and_check_object(obj);
3217     __ resolve_for_write(OOP_NOT_NULL, obj);
3218     __ movw(field, rax);
3219     if (!is_static && rc == may_rewrite) {
3220       patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx, true, byte_no);
3221     }
3222     __ jmp(Done);
3223   }
3224 
3225   __ bind(notChar);
3226   __ cmpl(flags, stos);
3227   __ jcc(Assembler::notEqual, notShort);
3228 
3229   // stos
3230   {
3231     __ pop(stos);
3232     if (!is_static) pop_and_check_object(obj);
3233     __ resolve_for_write(OOP_NOT_NULL, obj);
3234     __ movw(field, rax);
3235     if (!is_static && rc == may_rewrite) {
3236       patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx, true, byte_no);
3237     }
3238     __ jmp(Done);
3239   }
3240 
3241   __ bind(notShort);
3242   __ cmpl(flags, ltos);
3243   __ jcc(Assembler::notEqual, notLong);
3244 
3245   // ltos
3246 #ifdef _LP64
3247   {
3248     __ pop(ltos);
3249     if (!is_static) pop_and_check_object(obj);
3250     __ resolve_for_write(OOP_NOT_NULL, obj);
3251     __ movq(field, rax);
3252     if (!is_static && rc == may_rewrite) {
3253       patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no);
3254     }
3255     __ jmp(Done);
3256   }
3257 #else
3258   {
3259     Label notVolatileLong;
3260     __ testl(rdx, rdx);
3261     __ jcc(Assembler::zero, notVolatileLong);
3262 
3263     __ pop(ltos);  // overwrites rdx, do this after testing volatile.
3264     if (!is_static) pop_and_check_object(obj);
3265 
3266     // Replace with real volatile test
3267     __ push(rdx);
3268     __ push(rax);                 // Must update atomically with FIST
3269     __ fild_d(Address(rsp,0));    // So load into FPU register
3270     __ fistp_d(field);            // and put into memory atomically
3271     __ addptr(rsp, 2*wordSize);
3272     // volatile_barrier();
3273     volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3274                                                  Assembler::StoreStore));
3275     // Don't rewrite volatile version
3276     __ jmp(notVolatile);
3277 
3278     __ bind(notVolatileLong);
3279 
3280     __ pop(ltos);  // overwrites rdx
3281     if (!is_static) pop_and_check_object(obj);
3282     __ movptr(hi, rdx);
3283     __ movptr(field, rax);
3284     // Don't rewrite to _fast_lputfield for potential volatile case.
3285     __ jmp(notVolatile);
3286   }
3287 #endif // _LP64
3288 
3289   __ bind(notLong);
3290   __ cmpl(flags, ftos);
3291   __ jcc(Assembler::notEqual, notFloat);
3292 
3293   // ftos
3294   {
3295     __ pop(ftos);
3296     if (!is_static) pop_and_check_object(obj);
3297     __ resolve_for_write(OOP_NOT_NULL, obj);
3298     __ store_float(field);
3299     if (!is_static && rc == may_rewrite) {
3300       patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no);
3301     }
3302     __ jmp(Done);
3303   }
3304 
3305   __ bind(notFloat);
3306 #ifdef ASSERT
3307   __ cmpl(flags, dtos);
3308   __ jcc(Assembler::notEqual, notDouble);
3309 #endif
3310 
3311   // dtos
3312   {
3313     __ pop(dtos);
3314     if (!is_static) pop_and_check_object(obj);
3315     __ resolve_for_write(OOP_NOT_NULL, obj);
3316     __ store_double(field);
3317     if (!is_static && rc == may_rewrite) {
3318       patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no);
3319     }
3320   }
3321 
3322 #ifdef ASSERT
3323   __ jmp(Done);
3324 
3325   __ bind(notDouble);
3326   __ stop("Bad state");
3327 #endif
3328 
3329   __ bind(Done);
3330 
3331   // Check for volatile store
3332   __ testl(rdx, rdx);
3333   __ jcc(Assembler::zero, notVolatile);
3334   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3335                                                Assembler::StoreStore));
3336   __ bind(notVolatile);


3418 
3419   // test for volatile with rdx but rdx is tos register for lputfield.
3420   __ movl(rdx, Address(rcx, rbx, Address::times_ptr,
3421                        in_bytes(base +
3422                                 ConstantPoolCacheEntry::flags_offset())));
3423 
3424   // replace index with field offset from cache entry
3425   __ movptr(rbx, Address(rcx, rbx, Address::times_ptr,
3426                          in_bytes(base + ConstantPoolCacheEntry::f2_offset())));
3427 
3428   // [jk] not needed currently
3429   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3430   //                                              Assembler::StoreStore));
3431 
3432   Label notVolatile;
3433   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3434   __ andl(rdx, 0x1);
3435 
3436   // Get object from stack
3437   pop_and_check_object(rcx);
3438   __ resolve_for_write(OOP_NOT_NULL, rcx);
3439 
3440   // field address
3441   const Address field(rcx, rbx, Address::times_1);
3442 
3443   // access field
3444   switch (bytecode()) {
3445   case Bytecodes::_fast_aputfield:
3446     do_oop_store(_masm, field, rax);
3447     break;
3448   case Bytecodes::_fast_lputfield:
3449 #ifdef _LP64
3450   __ movq(field, rax);
3451 #else
3452   __ stop("should not be rewritten");
3453 #endif
3454     break;
3455   case Bytecodes::_fast_iputfield:
3456     __ movl(field, rax);
3457     break;
3458   case Bytecodes::_fast_zputfield:
3459     __ andl(rax, 0x1);  // boolean is true if LSB is 1
3460     // fall through to bputfield
3461   case Bytecodes::_fast_bputfield:
3462     __ movb(field, rax);
3463     break;
3464   case Bytecodes::_fast_sputfield:
3465     // fall through

3466   case Bytecodes::_fast_cputfield:
3467     __ movw(field, rax);
3468     break;
3469   case Bytecodes::_fast_fputfield:
3470     __ store_float(field);
3471     break;
3472   case Bytecodes::_fast_dputfield:
3473     __ store_double(field);
3474     break;
3475   default:
3476     ShouldNotReachHere();
3477   }
3478 
3479   // Check for volatile store
3480   __ testl(rdx, rdx);
3481   __ jcc(Assembler::zero, notVolatile);
3482   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3483                                                Assembler::StoreStore));
3484   __ bind(notVolatile);
3485 }
3486 
3487 void TemplateTable::fast_accessfield(TosState state) {
3488   transition(atos, state);
3489 
3490   // Do the JVMTI work here to avoid disturbing the register state below
3491   if (JvmtiExport::can_post_field_access()) {
3492     // Check to see if a field access watch has been set before we
3493     // take the time to call into the VM.


3510   }
3511 
3512   // access constant pool cache
3513   __ get_cache_and_index_at_bcp(rcx, rbx, 1);
3514   // replace index with field offset from cache entry
3515   // [jk] not needed currently
3516   // if (os::is_MP()) {
3517   //   __ movl(rdx, Address(rcx, rbx, Address::times_8,
3518   //                        in_bytes(ConstantPoolCache::base_offset() +
3519   //                                 ConstantPoolCacheEntry::flags_offset())));
3520   //   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3521   //   __ andl(rdx, 0x1);
3522   // }
3523   __ movptr(rbx, Address(rcx, rbx, Address::times_ptr,
3524                          in_bytes(ConstantPoolCache::base_offset() +
3525                                   ConstantPoolCacheEntry::f2_offset())));
3526 
3527   // rax: object
3528   __ verify_oop(rax);
3529   __ null_check(rax);
3530   __ resolve_for_read(OOP_NOT_NULL, rax);
3531   Address field(rax, rbx, Address::times_1);
3532 
3533   // access field
3534   switch (bytecode()) {
3535   case Bytecodes::_fast_agetfield:
3536     do_oop_load(_masm, field, rax);
3537     __ verify_oop(rax);
3538     break;
3539   case Bytecodes::_fast_lgetfield:
3540 #ifdef _LP64
3541   __ movq(rax, field);
3542 #else
3543   __ stop("should not be rewritten");
3544 #endif
3545     break;
3546   case Bytecodes::_fast_igetfield:
3547     __ movl(rax, field);
3548     break;
3549   case Bytecodes::_fast_bgetfield:
3550     __ movsbl(rax, field);
3551     break;
3552   case Bytecodes::_fast_sgetfield:
3553     __ load_signed_short(rax, field);
3554     break;
3555   case Bytecodes::_fast_cgetfield:
3556     __ load_unsigned_short(rax, field);
3557     break;
3558   case Bytecodes::_fast_fgetfield:
3559     __ load_float(field);
3560     break;
3561   case Bytecodes::_fast_dgetfield:
3562     __ load_double(field);
3563     break;
3564   default:
3565     ShouldNotReachHere();
3566   }
3567   // [jk] not needed currently
3568   // if (os::is_MP()) {
3569   //   Label notVolatile;
3570   //   __ testl(rdx, rdx);
3571   //   __ jcc(Assembler::zero, notVolatile);
3572   //   __ membar(Assembler::LoadLoad);
3573   //   __ bind(notVolatile);
3574   //};
3575 }
3576 
3577 void TemplateTable::fast_xaccess(TosState state) {
3578   transition(vtos, state);
3579 
3580   // get receiver
3581   __ movptr(rax, aaddress(0));
3582   // access constant pool cache
3583   __ get_cache_and_index_at_bcp(rcx, rdx, 2);
3584   __ movptr(rbx,
3585             Address(rcx, rdx, Address::times_ptr,
3586                     in_bytes(ConstantPoolCache::base_offset() +
3587                              ConstantPoolCacheEntry::f2_offset())));
3588   // make sure exception is reported in correct bcp range (getfield is
3589   // next instruction)
3590   __ increment(rbcp);
3591   __ null_check(rax);
3592   __ resolve_for_read(OOP_NOT_NULL, rax);
3593   const Address field = Address(rax, rbx, Address::times_1, 0*wordSize);
3594   switch (state) {
3595   case itos:
3596     __ movl(rax, field);
3597     break;
3598   case atos:
3599     do_oop_load(_masm, field, rax);
3600     __ verify_oop(rax);
3601     break;
3602   case ftos:
3603     __ load_float(field);
3604     break;
3605   default:
3606     ShouldNotReachHere();
3607   }
3608 
3609   // [jk] not needed currently
3610   // if (os::is_MP()) {
3611   //   Label notVolatile;
3612   //   __ movl(rdx, Address(rcx, rdx, Address::times_8,
3613   //                        in_bytes(ConstantPoolCache::base_offset() +
3614   //                                 ConstantPoolCacheEntry::flags_offset())));
3615   //   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3616   //   __ testl(rdx, 0x1);
3617   //   __ jcc(Assembler::zero, notVolatile);
3618   //   __ membar(Assembler::LoadLoad);
3619   //   __ bind(notVolatile);
3620   // }
3621 
3622   __ decrement(rbcp);
3623 }




 750   __ null_check(array, arrayOopDesc::length_offset_in_bytes());
 751   // sign extend index for use by indexed load
 752   __ movl2ptr(index, index);
 753   // check index
 754   __ cmpl(index, Address(array, arrayOopDesc::length_offset_in_bytes()));
 755   if (index != rbx) {
 756     // ??? convention: move aberrant index into rbx for exception message
 757     assert(rbx != array, "different registers");
 758     __ movl(rbx, index);
 759   }
 760   __ jump_cc(Assembler::aboveEqual,
 761              ExternalAddress(Interpreter::_throw_ArrayIndexOutOfBoundsException_entry));
 762 }
 763 
 764 
 765 void TemplateTable::iaload() {
 766   transition(itos, itos);
 767   // rax: index
 768   // rdx: array
 769   index_check(rdx, rax); // kills rbx
 770   __ access_load_at(T_INT, IN_HEAP, rax, Address(rdx, rax,

 771                                                  Address::times_4,
 772                                                  arrayOopDesc::base_offset_in_bytes(T_INT)),
 773                     noreg, noreg);
 774 }
 775 
 776 void TemplateTable::laload() {
 777   transition(itos, ltos);
 778   // rax: index
 779   // rdx: array
 780   index_check(rdx, rax); // kills rbx
 781   NOT_LP64(__ mov(rbx, rax));
 782   // rbx,: index
 783   __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */,
 784                     Address(rdx, rbx, Address::times_8,
 785                             arrayOopDesc::base_offset_in_bytes(T_LONG)),
 786                     noreg, noreg);
 787 }
 788 
 789 
 790 
 791 void TemplateTable::faload() {
 792   transition(itos, ftos);
 793   // rax: index
 794   // rdx: array
 795   index_check(rdx, rax); // kills rbx
 796   __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */,
 797                     Address(rdx, rax,
 798                             Address::times_4,
 799                             arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
 800                     noreg, noreg);
 801 }
 802 
 803 void TemplateTable::daload() {
 804   transition(itos, dtos);
 805   // rax: index
 806   // rdx: array
 807   index_check(rdx, rax); // kills rbx
 808   __ access_load_at(T_DOUBLE, IN_HEAP, noreg /* dtos */,
 809                     Address(rdx, rax,
 810                             Address::times_8,
 811                             arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
 812                     noreg, noreg);
 813 }
 814 
 815 void TemplateTable::aaload() {
 816   transition(itos, atos);
 817   // rax: index
 818   // rdx: array
 819   index_check(rdx, rax); // kills rbx

 820   do_oop_load(_masm,
 821               Address(rdx, rax,
 822                       UseCompressedOops ? Address::times_4 : Address::times_ptr,
 823                       arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
 824               rax,
 825               IN_HEAP_ARRAY);
 826 }
 827 
 828 void TemplateTable::baload() {
 829   transition(itos, itos);
 830   // rax: index
 831   // rdx: array
 832   index_check(rdx, rax); // kills rbx
 833   __ access_load_at(T_BYTE, IN_HEAP, rax,
 834                     Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
 835                     noreg, noreg);
 836 }
 837 
 838 void TemplateTable::caload() {
 839   transition(itos, itos);
 840   // rax: index
 841   // rdx: array
 842   index_check(rdx, rax); // kills rbx
 843   __ access_load_at(T_CHAR, IN_HEAP, rax,
 844                     Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
 845                     noreg, noreg);
 846 }
 847 
 848 // iload followed by caload frequent pair
 849 void TemplateTable::fast_icaload() {
 850   transition(vtos, itos);
 851   // load index out of locals
 852   locals_index(rbx);
 853   __ movl(rax, iaddress(rbx));
 854 
 855   // rax: index
 856   // rdx: array
 857   index_check(rdx, rax); // kills rbx
 858   __ access_load_at(T_CHAR, IN_HEAP, rax,
 859                     Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
 860                     noreg, noreg);


 861 }
 862 
 863 
 864 void TemplateTable::saload() {
 865   transition(itos, itos);
 866   // rax: index
 867   // rdx: array
 868   index_check(rdx, rax); // kills rbx
 869   __ access_load_at(T_SHORT, IN_HEAP, rax,
 870                     Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_SHORT)),
 871                     noreg, noreg);
 872 }
 873 
 874 void TemplateTable::iload(int n) {
 875   transition(vtos, itos);
 876   __ movl(rax, iaddress(n));
 877 }
 878 
 879 void TemplateTable::lload(int n) {
 880   transition(vtos, ltos);
 881   __ movptr(rax, laddress(n));
 882   NOT_LP64(__ movptr(rdx, haddress(n)));
 883 }
 884 
 885 void TemplateTable::fload(int n) {
 886   transition(vtos, ftos);
 887   __ load_float(faddress(n));
 888 }
 889 
 890 void TemplateTable::dload(int n) {
 891   transition(vtos, dtos);


1043   __ movdbl(daddress(rbx), xmm0);
1044 #else
1045   wide_lstore();
1046 #endif
1047 }
1048 
1049 void TemplateTable::wide_astore() {
1050   transition(vtos, vtos);
1051   __ pop_ptr(rax);
1052   locals_index_wide(rbx);
1053   __ movptr(aaddress(rbx), rax);
1054 }
1055 
1056 void TemplateTable::iastore() {
1057   transition(itos, vtos);
1058   __ pop_i(rbx);
1059   // rax: value
1060   // rbx: index
1061   // rdx: array
1062   index_check(rdx, rbx); // prefer index in rbx
1063   __ access_store_at(T_INT, IN_HEAP, Address(rdx, rbx,

1064                                              Address::times_4,
1065                                              arrayOopDesc::base_offset_in_bytes(T_INT)), rax, noreg, noreg);

1066 }
1067 
1068 void TemplateTable::lastore() {
1069   transition(ltos, vtos);
1070   __ pop_i(rbx);
1071   // rax,: low(value)
1072   // rcx: array
1073   // rdx: high(value)
1074   index_check(rcx, rbx);  // prefer index in rbx,
1075   // rbx,: index
1076   __ access_store_at(T_LONG, IN_HEAP, Address(rcx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG)), noreg /* ltos */, noreg, noreg);


1077 }
1078 
1079 
1080 void TemplateTable::fastore() {
1081   transition(ftos, vtos);
1082   __ pop_i(rbx);
1083   // value is in UseSSE >= 1 ? xmm0 : ST(0)
1084   // rbx:  index
1085   // rdx:  array
1086   index_check(rdx, rbx); // prefer index in rbx
1087   __ access_store_at(T_FLOAT, IN_HEAP, Address(rdx, rbx, Address::times_4, arrayOopDesc::base_offset_in_bytes(T_FLOAT)), noreg /* ftos */, noreg, noreg);

1088 }
1089 
1090 void TemplateTable::dastore() {
1091   transition(dtos, vtos);
1092   __ pop_i(rbx);
1093   // value is in UseSSE >= 2 ? xmm0 : ST(0)
1094   // rbx:  index
1095   // rdx:  array
1096   index_check(rdx, rbx); // prefer index in rbx
1097   __ access_store_at(T_DOUBLE, IN_HEAP, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_DOUBLE)), noreg /* ftos */, noreg, noreg);

1098 }
1099 
1100 void TemplateTable::aastore() {
1101   Label is_null, ok_is_subtype, done;
1102   transition(vtos, vtos);
1103   // stack: ..., array, index, value
1104   __ movptr(rax, at_tos());    // value
1105   __ movl(rcx, at_tos_p1()); // index
1106   __ movptr(rdx, at_tos_p2()); // array
1107 
1108   Address element_address(rdx, rcx,
1109                           UseCompressedOops? Address::times_4 : Address::times_ptr,
1110                           arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1111 
1112   index_check_without_pop(rdx, rcx);     // kills rbx

1113   __ testptr(rax, rax);
1114   __ jcc(Assembler::zero, is_null);
1115 
1116   // Move subklass into rbx
1117   __ load_klass(rbx, rax);
1118   // Move superklass into rax
1119   __ load_klass(rax, rdx);
1120   __ movptr(rax, Address(rax,
1121                          ObjArrayKlass::element_klass_offset()));


1122 
1123   // Generate subtype check.  Blows rcx, rdi
1124   // Superklass in rax.  Subklass in rbx.
1125   __ gen_subtype_check(rbx, ok_is_subtype);

1126   // Come here on failure
1127   // object is at TOS
1128   __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
1129 
1130   // Come here on success
1131   __ bind(ok_is_subtype);
1132 
1133   // Get the value we will store
1134   __ movptr(rax, at_tos());
1135   __ movl(rcx, at_tos_p1()); // index
1136   // Now store using the appropriate barrier
1137   do_oop_store(_masm, element_address, rax, IN_HEAP_ARRAY);
1138   __ jmp(done);
1139 
1140   // Have a NULL in rax, rdx=array, ecx=index.  Store NULL at ary[idx]
1141   __ bind(is_null);
1142   __ profile_null_seen(rbx);
1143 
1144   // Store a NULL
1145   do_oop_store(_masm, element_address, noreg, IN_HEAP_ARRAY);
1146 
1147   // Pop stack arguments
1148   __ bind(done);
1149   __ addptr(rsp, 3 * Interpreter::stackElementSize);
1150 }
1151 
1152 void TemplateTable::bastore() {
1153   transition(itos, vtos);
1154   __ pop_i(rbx);
1155   // rax: value
1156   // rbx: index
1157   // rdx: array
1158   index_check(rdx, rbx); // prefer index in rbx

1159   // Need to check whether array is boolean or byte
1160   // since both types share the bastore bytecode.
1161   __ load_klass(rcx, rdx);
1162   __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1163   int diffbit = Klass::layout_helper_boolean_diffbit();
1164   __ testl(rcx, diffbit);
1165   Label L_skip;
1166   __ jccb(Assembler::zero, L_skip);
1167   __ andl(rax, 1);  // if it is a T_BOOLEAN array, mask the stored value to 0/1
1168   __ bind(L_skip);
1169   __ access_store_at(T_BYTE, IN_HEAP, Address(rdx, rbx,
1170                                               Address::times_1,
1171                                               arrayOopDesc::base_offset_in_bytes(T_BYTE)),
1172                      rax, noreg, noreg);
1173 }
1174 
1175 void TemplateTable::castore() {
1176   transition(itos, vtos);
1177   __ pop_i(rbx);
1178   // rax: value
1179   // rbx: index
1180   // rdx: array
1181   index_check(rdx, rbx);  // prefer index in rbx
1182   __ access_store_at(T_CHAR, IN_HEAP, Address(rdx, rbx,

1183                                               Address::times_2,
1184                                               arrayOopDesc::base_offset_in_bytes(T_CHAR)),
1185                      rax, noreg, noreg);
1186 }
1187 
1188 
1189 void TemplateTable::sastore() {
1190   castore();
1191 }
1192 
1193 void TemplateTable::istore(int n) {
1194   transition(itos, vtos);
1195   __ movl(iaddress(n), rax);
1196 }
1197 
1198 void TemplateTable::lstore(int n) {
1199   transition(ltos, vtos);
1200   __ movptr(laddress(n), rax);
1201   NOT_LP64(__ movptr(haddress(n), rdx));
1202 }
1203 
1204 void TemplateTable::fstore(int n) {
1205   transition(ftos, vtos);


2839   __ pop_ptr(r);
2840   __ null_check(r);  // for field access must check obj.
2841   __ verify_oop(r);
2842 }
2843 
2844 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2845   transition(vtos, vtos);
2846 
2847   const Register cache = rcx;
2848   const Register index = rdx;
2849   const Register obj   = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
2850   const Register off   = rbx;
2851   const Register flags = rax;
2852   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // uses same reg as obj, so don't mix them
2853 
2854   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2855   jvmti_post_field_access(cache, index, is_static, false);
2856   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2857 
2858   if (!is_static) pop_and_check_object(obj);

2859 
2860   const Address field(obj, off, Address::times_1, 0*wordSize);

2861 
2862   Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj, notDouble;
2863 
2864   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
2865   // Make sure we don't need to mask edx after the above shift
2866   assert(btos == 0, "change code, btos != 0");
2867 
2868   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
2869 
2870   __ jcc(Assembler::notZero, notByte);
2871   // btos
2872   __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
2873   __ push(btos);
2874   // Rewrite bytecode to be faster
2875   if (!is_static && rc == may_rewrite) {
2876     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2877   }
2878   __ jmp(Done);
2879 
2880   __ bind(notByte);
2881   __ cmpl(flags, ztos);
2882   __ jcc(Assembler::notEqual, notBool);
2883 
2884   // ztos (same code as btos)
2885   __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg, noreg);
2886   __ push(ztos);
2887   // Rewrite bytecode to be faster
2888   if (!is_static && rc == may_rewrite) {
2889     // use btos rewriting, no truncating to t/f bit is needed for getfield.
2890     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2891   }
2892   __ jmp(Done);
2893 
2894   __ bind(notBool);
2895   __ cmpl(flags, atos);
2896   __ jcc(Assembler::notEqual, notObj);
2897   // atos
2898   do_oop_load(_masm, field, rax);
2899   __ push(atos);
2900   if (!is_static && rc == may_rewrite) {
2901     patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
2902   }
2903   __ jmp(Done);
2904 
2905   __ bind(notObj);
2906   __ cmpl(flags, itos);
2907   __ jcc(Assembler::notEqual, notInt);
2908   // itos
2909   __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
2910   __ push(itos);
2911   // Rewrite bytecode to be faster
2912   if (!is_static && rc == may_rewrite) {
2913     patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
2914   }
2915   __ jmp(Done);
2916 
2917   __ bind(notInt);
2918   __ cmpl(flags, ctos);
2919   __ jcc(Assembler::notEqual, notChar);
2920   // ctos
2921   __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
2922   __ push(ctos);
2923   // Rewrite bytecode to be faster
2924   if (!is_static && rc == may_rewrite) {
2925     patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);
2926   }
2927   __ jmp(Done);
2928 
2929   __ bind(notChar);
2930   __ cmpl(flags, stos);
2931   __ jcc(Assembler::notEqual, notShort);
2932   // stos
2933   __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
2934   __ push(stos);
2935   // Rewrite bytecode to be faster
2936   if (!is_static && rc == may_rewrite) {
2937     patch_bytecode(Bytecodes::_fast_sgetfield, bc, rbx);
2938   }
2939   __ jmp(Done);
2940 
2941   __ bind(notShort);
2942   __ cmpl(flags, ltos);
2943   __ jcc(Assembler::notEqual, notLong);
2944   // ltos
2945     // Generate code as if volatile (x86_32).  There just aren't enough registers to


2946     // save that information and this code is faster than the test.
2947   __ access_load_at(T_LONG, IN_HEAP | MO_RELAXED, noreg /* ltos */, field, noreg, noreg);








2948   __ push(ltos);
2949 
2950   // Rewrite bytecode to be faster
2951   LP64_ONLY(if (!is_static && rc == may_rewrite) patch_bytecode(Bytecodes::_fast_lgetfield, bc, rbx));
2952   __ jmp(Done);
2953 
2954   __ bind(notLong);
2955   __ cmpl(flags, ftos);
2956   __ jcc(Assembler::notEqual, notFloat);
2957   // ftos
2958 
2959   __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
2960   __ load_float(field);
2961   __ push(ftos);
2962   // Rewrite bytecode to be faster
2963   if (!is_static && rc == may_rewrite) {
2964     patch_bytecode(Bytecodes::_fast_fgetfield, bc, rbx);
2965   }
2966   __ jmp(Done);
2967 
2968   __ bind(notFloat);
2969 #ifdef ASSERT
2970   __ cmpl(flags, dtos);
2971   __ jcc(Assembler::notEqual, notDouble);
2972 #endif
2973   // dtos
2974   __ access_load_at(T_DOUBLE, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
2975   __ push(dtos);
2976   // Rewrite bytecode to be faster
2977   if (!is_static && rc == may_rewrite) {
2978     patch_bytecode(Bytecodes::_fast_dgetfield, bc, rbx);
2979   }
2980 #ifdef ASSERT
2981   __ jmp(Done);
2982 
2983 
2984   __ bind(notDouble);
2985   __ stop("Bad state");
2986 #endif
2987 
2988   __ bind(Done);
2989   // [jk] not needed currently
2990   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
2991   //                                              Assembler::LoadStore));
2992 }
2993 
2994 void TemplateTable::getfield(int byte_no) {


3113   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3114   __ andl(rdx, 0x1);
3115 
3116   // field addresses
3117   const Address field(obj, off, Address::times_1, 0*wordSize);
3118   NOT_LP64( const Address hi(obj, off, Address::times_1, 1*wordSize);)
3119 
3120   Label notByte, notBool, notInt, notShort, notChar,
3121         notLong, notFloat, notObj, notDouble;
3122 
3123   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
3124 
3125   assert(btos == 0, "change code, btos != 0");
3126   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
3127   __ jcc(Assembler::notZero, notByte);
3128 
3129   // btos
3130   {
3131     __ pop(btos);
3132     if (!is_static) pop_and_check_object(obj);
3133     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg);

3134     if (!is_static && rc == may_rewrite) {
3135       patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
3136     }
3137     __ jmp(Done);
3138   }
3139 
3140   __ bind(notByte);
3141   __ cmpl(flags, ztos);
3142   __ jcc(Assembler::notEqual, notBool);
3143 
3144   // ztos
3145   {
3146     __ pop(ztos);
3147     if (!is_static) pop_and_check_object(obj);
3148     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg);


3149     if (!is_static && rc == may_rewrite) {
3150       patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
3151     }
3152     __ jmp(Done);
3153   }
3154 
3155   __ bind(notBool);
3156   __ cmpl(flags, atos);
3157   __ jcc(Assembler::notEqual, notObj);
3158 
3159   // atos
3160   {
3161     __ pop(atos);
3162     if (!is_static) pop_and_check_object(obj);

3163     // Store into the field
3164     do_oop_store(_masm, field, rax);
3165     if (!is_static && rc == may_rewrite) {
3166       patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3167     }
3168     __ jmp(Done);
3169   }
3170 
3171   __ bind(notObj);
3172   __ cmpl(flags, itos);
3173   __ jcc(Assembler::notEqual, notInt);
3174 
3175   // itos
3176   {
3177     __ pop(itos);
3178     if (!is_static) pop_and_check_object(obj);
3179     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg);

3180     if (!is_static && rc == may_rewrite) {
3181       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3182     }
3183     __ jmp(Done);
3184   }
3185 
3186   __ bind(notInt);
3187   __ cmpl(flags, ctos);
3188   __ jcc(Assembler::notEqual, notChar);
3189 
3190   // ctos
3191   {
3192     __ pop(ctos);
3193     if (!is_static) pop_and_check_object(obj);
3194     __ access_store_at(T_CHAR, IN_HEAP, field, rax, noreg, noreg);

3195     if (!is_static && rc == may_rewrite) {
3196       patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx, true, byte_no);
3197     }
3198     __ jmp(Done);
3199   }
3200 
3201   __ bind(notChar);
3202   __ cmpl(flags, stos);
3203   __ jcc(Assembler::notEqual, notShort);
3204 
3205   // stos
3206   {
3207     __ pop(stos);
3208     if (!is_static) pop_and_check_object(obj);
3209     __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg);

3210     if (!is_static && rc == may_rewrite) {
3211       patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx, true, byte_no);
3212     }
3213     __ jmp(Done);
3214   }
3215 
3216   __ bind(notShort);
3217   __ cmpl(flags, ltos);
3218   __ jcc(Assembler::notEqual, notLong);
3219 
3220   // ltos
3221 #ifdef _LP64
3222   {
3223     __ pop(ltos);
3224     if (!is_static) pop_and_check_object(obj);
3225     __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos*/, noreg, noreg);

3226     if (!is_static && rc == may_rewrite) {
3227       patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no);
3228     }
3229     __ jmp(Done);
3230   }
3231 #else
3232   {
3233     Label notVolatileLong;
3234     __ testl(rdx, rdx);
3235     __ jcc(Assembler::zero, notVolatileLong);
3236 
3237     __ pop(ltos);  // overwrites rdx, do this after testing volatile.
3238     if (!is_static) pop_and_check_object(obj);
3239 
3240     // Replace with real volatile test
3241     __ access_store_at(T_LONG, IN_HEAP | MO_RELAXED, field, noreg /* ltos */, noreg, noreg);




3242     // volatile_barrier();
3243     volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3244                                                  Assembler::StoreStore));
3245     // Don't rewrite volatile version
3246     __ jmp(notVolatile);
3247 
3248     __ bind(notVolatileLong);
3249 
3250     __ pop(ltos);  // overwrites rdx
3251     if (!is_static) pop_and_check_object(obj);
3252 
3253     __ access_store_at(T_LONG, IN_HEAP | MO_RELAXED, field, noreg /* ltos */, noreg, noreg);
3254     // Don't rewrite to _fast_lputfield for potential volatile case.
3255     __ jmp(notVolatile);
3256   }
3257 #endif // _LP64
3258 
3259   __ bind(notLong);
3260   __ cmpl(flags, ftos);
3261   __ jcc(Assembler::notEqual, notFloat);
3262 
3263   // ftos
3264   {
3265     __ pop(ftos);
3266     if (!is_static) pop_and_check_object(obj);
3267     __ access_store_at(T_FLOAT, IN_HEAP, field, noreg /* ftos */, noreg, noreg);

3268     if (!is_static && rc == may_rewrite) {
3269       patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no);
3270     }
3271     __ jmp(Done);
3272   }
3273 
3274   __ bind(notFloat);
3275 #ifdef ASSERT
3276   __ cmpl(flags, dtos);
3277   __ jcc(Assembler::notEqual, notDouble);
3278 #endif
3279 
3280   // dtos
3281   {
3282     __ pop(dtos);
3283     if (!is_static) pop_and_check_object(obj);
3284     __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* ftos */, noreg, noreg);

3285     if (!is_static && rc == may_rewrite) {
3286       patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no);
3287     }
3288   }
3289 
3290 #ifdef ASSERT
3291   __ jmp(Done);
3292 
3293   __ bind(notDouble);
3294   __ stop("Bad state");
3295 #endif
3296 
3297   __ bind(Done);
3298 
3299   // Check for volatile store
3300   __ testl(rdx, rdx);
3301   __ jcc(Assembler::zero, notVolatile);
3302   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3303                                                Assembler::StoreStore));
3304   __ bind(notVolatile);


3386 
3387   // test for volatile with rdx but rdx is tos register for lputfield.
3388   __ movl(rdx, Address(rcx, rbx, Address::times_ptr,
3389                        in_bytes(base +
3390                                 ConstantPoolCacheEntry::flags_offset())));
3391 
3392   // replace index with field offset from cache entry
3393   __ movptr(rbx, Address(rcx, rbx, Address::times_ptr,
3394                          in_bytes(base + ConstantPoolCacheEntry::f2_offset())));
3395 
3396   // [jk] not needed currently
3397   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3398   //                                              Assembler::StoreStore));
3399 
3400   Label notVolatile;
3401   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3402   __ andl(rdx, 0x1);
3403 
3404   // Get object from stack
3405   pop_and_check_object(rcx);

3406 
3407   // field address
3408   const Address field(rcx, rbx, Address::times_1);
3409 
3410   // access field
3411   switch (bytecode()) {
3412   case Bytecodes::_fast_aputfield:
3413     do_oop_store(_masm, field, rax);
3414     break;
3415   case Bytecodes::_fast_lputfield:
3416 #ifdef _LP64
3417     __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg);
3418 #else
3419     __ stop("should not be rewritten");
3420 #endif
3421     break;
3422   case Bytecodes::_fast_iputfield:
3423     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg);
3424     break;
3425   case Bytecodes::_fast_zputfield:
3426     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg);
3427     break;
3428   case Bytecodes::_fast_bputfield:
3429     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg);
3430     break;
3431   case Bytecodes::_fast_sputfield:
3432     __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg);
3433     break;
3434   case Bytecodes::_fast_cputfield:
3435     __ access_store_at(T_CHAR, IN_HEAP, field, rax, noreg, noreg);
3436     break;
3437   case Bytecodes::_fast_fputfield:
3438     __ access_store_at(T_FLOAT, IN_HEAP, field, noreg /* ftos*/, noreg, noreg);
3439     break;
3440   case Bytecodes::_fast_dputfield:
3441     __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* ftos*/, noreg, noreg);
3442     break;
3443   default:
3444     ShouldNotReachHere();
3445   }
3446 
3447   // Check for volatile store
3448   __ testl(rdx, rdx);
3449   __ jcc(Assembler::zero, notVolatile);
3450   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3451                                                Assembler::StoreStore));
3452   __ bind(notVolatile);
3453 }
3454 
3455 void TemplateTable::fast_accessfield(TosState state) {
3456   transition(atos, state);
3457 
3458   // Do the JVMTI work here to avoid disturbing the register state below
3459   if (JvmtiExport::can_post_field_access()) {
3460     // Check to see if a field access watch has been set before we
3461     // take the time to call into the VM.


3478   }
3479 
3480   // access constant pool cache
3481   __ get_cache_and_index_at_bcp(rcx, rbx, 1);
3482   // replace index with field offset from cache entry
3483   // [jk] not needed currently
3484   // if (os::is_MP()) {
3485   //   __ movl(rdx, Address(rcx, rbx, Address::times_8,
3486   //                        in_bytes(ConstantPoolCache::base_offset() +
3487   //                                 ConstantPoolCacheEntry::flags_offset())));
3488   //   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3489   //   __ andl(rdx, 0x1);
3490   // }
3491   __ movptr(rbx, Address(rcx, rbx, Address::times_ptr,
3492                          in_bytes(ConstantPoolCache::base_offset() +
3493                                   ConstantPoolCacheEntry::f2_offset())));
3494 
3495   // rax: object
3496   __ verify_oop(rax);
3497   __ null_check(rax);

3498   Address field(rax, rbx, Address::times_1);
3499 
3500   // access field
3501   switch (bytecode()) {
3502   case Bytecodes::_fast_agetfield:
3503     do_oop_load(_masm, field, rax);
3504     __ verify_oop(rax);
3505     break;
3506   case Bytecodes::_fast_lgetfield:
3507 #ifdef _LP64
3508     __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg);
3509 #else
3510   __ stop("should not be rewritten");
3511 #endif
3512     break;
3513   case Bytecodes::_fast_igetfield:
3514     __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3515     break;
3516   case Bytecodes::_fast_bgetfield:
3517     __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3518     break;
3519   case Bytecodes::_fast_sgetfield:
3520     __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
3521     break;
3522   case Bytecodes::_fast_cgetfield:
3523     __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
3524     break;
3525   case Bytecodes::_fast_fgetfield:
3526     __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
3527     break;
3528   case Bytecodes::_fast_dgetfield:
3529     __ access_load_at(T_DOUBLE, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
3530     break;
3531   default:
3532     ShouldNotReachHere();
3533   }
3534   // [jk] not needed currently
3535   // if (os::is_MP()) {
3536   //   Label notVolatile;
3537   //   __ testl(rdx, rdx);
3538   //   __ jcc(Assembler::zero, notVolatile);
3539   //   __ membar(Assembler::LoadLoad);
3540   //   __ bind(notVolatile);
3541   //};
3542 }
3543 
3544 void TemplateTable::fast_xaccess(TosState state) {
3545   transition(vtos, state);
3546 
3547   // get receiver
3548   __ movptr(rax, aaddress(0));
3549   // access constant pool cache
3550   __ get_cache_and_index_at_bcp(rcx, rdx, 2);
3551   __ movptr(rbx,
3552             Address(rcx, rdx, Address::times_ptr,
3553                     in_bytes(ConstantPoolCache::base_offset() +
3554                              ConstantPoolCacheEntry::f2_offset())));
3555   // make sure exception is reported in correct bcp range (getfield is
3556   // next instruction)
3557   __ increment(rbcp);
3558   __ null_check(rax);

3559   const Address field = Address(rax, rbx, Address::times_1, 0*wordSize);
3560   switch (state) {
3561   case itos:
3562     __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3563     break;
3564   case atos:
3565     do_oop_load(_masm, field, rax);
3566     __ verify_oop(rax);
3567     break;
3568   case ftos:
3569     __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
3570     break;
3571   default:
3572     ShouldNotReachHere();
3573   }
3574 
3575   // [jk] not needed currently
3576   // if (os::is_MP()) {
3577   //   Label notVolatile;
3578   //   __ movl(rdx, Address(rcx, rdx, Address::times_8,
3579   //                        in_bytes(ConstantPoolCache::base_offset() +
3580   //                                 ConstantPoolCacheEntry::flags_offset())));
3581   //   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3582   //   __ testl(rdx, 0x1);
3583   //   __ jcc(Assembler::zero, notVolatile);
3584   //   __ membar(Assembler::LoadLoad);
3585   //   __ bind(notVolatile);
3586   // }
3587 
3588   __ decrement(rbcp);
3589 }


< prev index next >