< prev index next >

src/hotspot/cpu/x86/templateTable_x86.cpp

Print this page




 764 }
 765 
 766 void TemplateTable::fload() {
 767   transition(vtos, ftos);
 768   locals_index(rbx);
 769   __ load_float(faddress(rbx));
 770 }
 771 
 772 void TemplateTable::dload() {
 773   transition(vtos, dtos);
 774   locals_index(rbx);
 775   __ load_double(daddress(rbx));
 776 }
 777 
 778 void TemplateTable::aload() {
 779   transition(vtos, atos);
 780   locals_index(rbx);
 781   __ movptr(rax, aaddress(rbx));
 782 }
 783 
 784 void TemplateTable::vload() {
 785   transition(vtos, qtos);
 786   locals_index(rbx);
 787   __ movptr(rax, aaddress(rbx));
 788 }
 789 
 790 void TemplateTable::locals_index_wide(Register reg) {
 791   __ load_unsigned_short(reg, at_bcp(2));
 792   __ bswapl(reg);
 793   __ shrl(reg, 16);
 794   __ negptr(reg);
 795 }
 796 
 797 void TemplateTable::wide_iload() {
 798   transition(vtos, itos);
 799   locals_index_wide(rbx);
 800   __ movl(rax, iaddress(rbx));
 801 }
 802 
 803 void TemplateTable::wide_lload() {
 804   transition(vtos, ltos);
 805   locals_index_wide(rbx);
 806   __ movptr(rax, laddress(rbx));
 807   NOT_LP64(__ movl(rdx, haddress(rbx)));
 808 }
 809 
 810 void TemplateTable::wide_fload() {
 811   transition(vtos, ftos);
 812   locals_index_wide(rbx);
 813   __ load_float(faddress(rbx));
 814 }
 815 
 816 void TemplateTable::wide_dload() {
 817   transition(vtos, dtos);
 818   locals_index_wide(rbx);
 819   __ load_double(daddress(rbx));
 820 }
 821 
 822 void TemplateTable::wide_aload() {
 823   transition(vtos, atos);
 824   locals_index_wide(rbx);
 825   __ movptr(rax, aaddress(rbx));
 826 }
 827 
 828 void TemplateTable::wide_vload() {
 829   transition(vtos, qtos);
 830   locals_index_wide(rbx);
 831   __ movptr(rax, aaddress(rbx));
 832 }
 833 
 834 void TemplateTable::index_check(Register array, Register index) {
 835   // Pop ptr into array
 836   __ pop_ptr(array);
 837   index_check_without_pop(array, index);
 838 }
 839 
 840 void TemplateTable::index_check_without_pop(Register array, Register index) {
 841   // destroys rbx
 842   // check array
 843   __ null_check(array, arrayOopDesc::length_offset_in_bytes());
 844   // sign extend index for use by indexed load
 845   __ movl2ptr(index, index);
 846   // check index
 847   __ cmpl(index, Address(array, arrayOopDesc::length_offset_in_bytes()));
 848   if (index != rbx) {
 849     // ??? convention: move aberrant index into rbx for exception message
 850     assert(rbx != array, "different registers");
 851     __ movl(rbx, index);
 852   }
 853   __ jump_cc(Assembler::aboveEqual,


 891 void TemplateTable::daload() {
 892   transition(itos, dtos);
 893   // rax: index
 894   // rdx: array
 895   index_check(rdx, rax); // kills rbx
 896   __ load_double(Address(rdx, rax,
 897                          Address::times_8,
 898                          arrayOopDesc::base_offset_in_bytes(T_DOUBLE)));
 899 }
 900 
 901 void TemplateTable::aaload() {
 902   transition(itos, atos);
 903   // rax: index
 904   // rdx: array
 905   index_check(rdx, rax); // kills rbx
 906   __ load_heap_oop(rax, Address(rdx, rax,
 907                                 UseCompressedOops ? Address::times_4 : Address::times_ptr,
 908                                 arrayOopDesc::base_offset_in_bytes(T_OBJECT)));
 909 }
 910 
 911 void TemplateTable::vaload() {
 912   transition(itos, qtos);
 913 
 914   Register array = rcx;
 915   Register index = rax;
 916 
 917   index_check(array, index); // kills rbx, pops array
 918 
 919   __ call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::value_array_load) , array, index);
 920 }
 921 
 922 void TemplateTable::baload() {
 923   transition(itos, itos);
 924   // rax: index
 925   // rdx: array
 926   index_check(rdx, rax); // kills rbx
 927   __ load_signed_byte(rax, Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)));
 928 }
 929 
 930 void TemplateTable::caload() {
 931   transition(itos, itos);
 932   // rax: index
 933   // rdx: array
 934   index_check(rdx, rax); // kills rbx
 935   __ load_unsigned_short(rax, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)));
 936 }
 937 
 938 // iload followed by caload frequent pair
 939 void TemplateTable::fast_icaload() {
 940   transition(vtos, itos);
 941   // load index out of locals


1080 
1081 void TemplateTable::fstore() {
1082   transition(ftos, vtos);
1083   locals_index(rbx);
1084   __ store_float(faddress(rbx));
1085 }
1086 
1087 void TemplateTable::dstore() {
1088   transition(dtos, vtos);
1089   locals_index(rbx);
1090   __ store_double(daddress(rbx));
1091 }
1092 
1093 void TemplateTable::astore() {
1094   transition(vtos, vtos);
1095   __ pop_ptr(rax);
1096   locals_index(rbx);
1097   __ movptr(aaddress(rbx), rax);
1098 }
1099 
1100 void TemplateTable::vstore() {
1101   transition(vtos, vtos);
1102   __ pop_ptr(rax);
1103   locals_index(rbx);
1104   __ movptr(aaddress(rbx), rax);
1105 }
1106 
1107 void TemplateTable::wide_istore() {
1108   transition(vtos, vtos);
1109   __ pop_i();
1110   locals_index_wide(rbx);
1111   __ movl(iaddress(rbx), rax);
1112 }
1113 
1114 void TemplateTable::wide_lstore() {
1115   transition(vtos, vtos);
1116   NOT_LP64(__ pop_l(rax, rdx));
1117   LP64_ONLY(__ pop_l());
1118   locals_index_wide(rbx);
1119   __ movptr(laddress(rbx), rax);
1120   NOT_LP64(__ movl(haddress(rbx), rdx));
1121 }
1122 
1123 void TemplateTable::wide_fstore() {
1124 #ifdef _LP64
1125   transition(vtos, vtos);
1126   __ pop_f(xmm0);


1132 }
1133 
1134 void TemplateTable::wide_dstore() {
1135 #ifdef _LP64
1136   transition(vtos, vtos);
1137   __ pop_d(xmm0);
1138   locals_index_wide(rbx);
1139   __ movdbl(daddress(rbx), xmm0);
1140 #else
1141   wide_lstore();
1142 #endif
1143 }
1144 
1145 void TemplateTable::wide_astore() {
1146   transition(vtos, vtos);
1147   __ pop_ptr(rax);
1148   locals_index_wide(rbx);
1149   __ movptr(aaddress(rbx), rax);
1150 }
1151 
1152 void TemplateTable::wide_vstore() {
1153   transition(vtos, vtos);
1154   __ pop_ptr(rax);
1155   locals_index_wide(rbx);
1156   __ movptr(aaddress(rbx), rax);
1157 }
1158 
1159 void TemplateTable::iastore() {
1160   transition(itos, vtos);
1161   __ pop_i(rbx);
1162   // rax: value
1163   // rbx: index
1164   // rdx: array
1165   index_check(rdx, rbx); // prefer index in rbx
1166   __ movl(Address(rdx, rbx,
1167                   Address::times_4,
1168                   arrayOopDesc::base_offset_in_bytes(T_INT)),
1169           rax);
1170 }
1171 
1172 void TemplateTable::lastore() {
1173   transition(ltos, vtos);
1174   __ pop_i(rbx);
1175   // rax,: low(value)
1176   // rcx: array
1177   // rdx: high(value)
1178   index_check(rcx, rbx);  // prefer index in rbx,


1239   __ bind(ok_is_subtype);
1240 
1241   // Get the value we will store
1242   __ movptr(rax, at_tos());
1243   // Now store using the appropriate barrier
1244   do_oop_store(_masm, Address(rdx, 0), rax, _bs->kind(), true);
1245   __ jmp(done);
1246 
1247   // Have a NULL in rax, rdx=array, ecx=index.  Store NULL at ary[idx]
1248   __ bind(is_null);
1249   __ profile_null_seen(rbx);
1250 
1251   // Store a NULL
1252   do_oop_store(_masm, element_address, noreg, _bs->kind(), true);
1253 
1254   // Pop stack arguments
1255   __ bind(done);
1256   __ addptr(rsp, 3 * Interpreter::stackElementSize);
1257 }
1258 
1259 void TemplateTable::vastore() {
1260   transition(vtos, vtos);
1261 
1262   Register value = rcx;
1263   Register index = rbx;
1264   Register array = rax;
1265 
1266   // stack: ..., array, index, value
1267   __ pop_ptr(value);
1268   __ pop_i(index);
1269   __ pop_ptr(array);
1270 
1271   index_check_without_pop(array, index);
1272 
1273   __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::value_array_store), array, index, value);
1274 }

1275 
1276 void TemplateTable::bastore() {
1277   transition(itos, vtos);
1278   __ pop_i(rbx);
1279   // rax: value
1280   // rbx: index
1281   // rdx: array
1282   index_check(rdx, rbx); // prefer index in rbx
1283   // Need to check whether array is boolean or byte
1284   // since both types share the bastore bytecode.
1285   __ load_klass(rcx, rdx);
1286   __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1287   int diffbit = Klass::layout_helper_boolean_diffbit();
1288   __ testl(rcx, diffbit);
1289   Label L_skip;
1290   __ jccb(Assembler::zero, L_skip);
1291   __ andl(rax, 1);  // if it is a T_BOOLEAN array, mask the stored value to 0/1
1292   __ bind(L_skip);
1293   __ movb(Address(rdx, rbx,
1294                   Address::times_1,


2788     __ jcc(Assembler::zero, skip_register_finalizer);
2789 
2790     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::register_finalizer), robj);
2791 
2792     __ bind(skip_register_finalizer);
2793   }
2794 
2795 #ifdef _LP64
2796   if (SafepointMechanism::uses_thread_local_poll() && _desc->bytecode() != Bytecodes::_return_register_finalizer) {
2797     Label no_safepoint;
2798     NOT_PRODUCT(__ block_comment("Thread-local Safepoint poll"));
2799     __ testb(Address(r15_thread, Thread::polling_page_offset()), SafepointMechanism::poll_bit());
2800     __ jcc(Assembler::zero, no_safepoint);
2801     __ push(state);
2802     __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2803                                     InterpreterRuntime::at_safepoint));
2804     __ pop(state);
2805     __ bind(no_safepoint);
2806   }
2807 #endif
2808   if (state == qtos) {
2809     const Register thread1 = NOT_LP64(rcx) LP64_ONLY(r15_thread);
2810     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::return_value), rax);
2811     NOT_LP64(__ get_thread(thread1));
2812     __ get_vm_result(rax, thread1);
2813   }

2814 
2815   // Narrow result if state is itos but result type is smaller.
2816   // Need to narrow in the return bytecode rather than in generate_return_entry
2817   // since compiled code callers expect the result to already be narrowed.
2818   if (state == itos) {
2819     __ narrow(rax);
2820   }
2821 
2822 #ifdef ASSERT
2823   if (EnableMVT || EnableValhalla) {
2824     if (state == atos) {
2825       const Register thread1 = NOT_LP64(rcx) LP64_ONLY(r15_thread);
2826       __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::check_areturn), rax);
2827       NOT_LP64(__ get_thread(thread1));
2828       __ get_vm_result(rax, thread1);
2829     }
2830   }
2831 #endif // ASSERT
2832 
2833   __ remove_activation(state, rbcp, true, true, true, state == qtos && ValueTypeReturnedAsFields);
2834 
2835   __ jmp(rbcp);
2836 }
2837 
2838 // ----------------------------------------------------------------------------
2839 // Volatile variables demand their effects be made known to all CPU's
2840 // in order.  Store buffers on most chips allow reads & writes to
2841 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2842 // without some kind of memory barrier (i.e., it's not sufficient that
2843 // the interpreter does not reorder volatile references, the hardware
2844 // also must not reorder them).
2845 //
2846 // According to the new Java Memory Model (JMM):
2847 // (1) All volatiles are serialized wrt to each other.  ALSO reads &
2848 //     writes act as aquire & release, so:
2849 // (2) A read cannot let unrelated NON-volatile memory refs that
2850 //     happen after the read float up to before the read.  It's OK for
2851 //     non-volatile memory refs that happen before the volatile read to
2852 //     float down below it.
2853 // (3) Similar a volatile write cannot let unrelated NON-volatile


3039 
3040   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
3041   // Make sure we don't need to mask edx after the above shift
3042   assert(btos == 0, "change code, btos != 0");
3043 
3044   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
3045 
3046   __ jcc(Assembler::notZero, notByte);
3047   // btos
3048   if (!is_static) pop_and_check_object(obj);
3049   __ load_signed_byte(rax, field);
3050   __ push(btos);
3051   // Rewrite bytecode to be faster
3052   if (!is_static && rc == may_rewrite) {
3053     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3054   }
3055   __ jmp(Done);
3056 
3057   __ bind(notByte);
3058 
3059   __ cmpl(flags, qtos);
3060   __ jcc(Assembler::notEqual, notValueType);
3061   // qtos
3062   if (is_static) {
3063     Label initialized;
3064     // Issue below if the static field has not been initialized yet
3065     __ load_heap_oop(rax, field);
3066     __ testptr(rax, rax);
3067     __ jcc(Assembler::notZero, initialized);
3068     __ andl(flags2, ConstantPoolCacheEntry::field_index_mask);
3069     __ call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::uninitialized_static_value_field),
3070          obj, flags2);
3071     __ verify_oop(rax);
3072     __ bind(initialized);
3073     __ push(qtos);
3074   } else {
3075     __ andl(flags2, ConstantPoolCacheEntry::field_index_mask);
3076     pop_and_check_object(rbx);
3077     call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::qgetfield),
3078             rbx, flags2, rcx);
3079     __ verify_oop(rax);
3080     __ push(qtos);
3081     // Bytecode rewrite?
3082   }
3083   __ jmp(Done);
3084 
3085   __ bind(notValueType);
3086 
3087   if (!is_static) pop_and_check_object(obj);
3088 
3089   __ cmpl(flags, ztos);
3090   __ jcc(Assembler::notEqual, notBool);
3091 
3092   // ztos (same code as btos)
3093   __ load_signed_byte(rax, field);
3094   __ push(ztos);
3095   // Rewrite bytecode to be faster
3096   if (!is_static && rc == may_rewrite) {
3097     // use btos rewriting, no truncating to t/f bit is needed for getfield.
3098     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3099   }
3100   __ jmp(Done);
3101 
3102   __ bind(notBool);
3103   __ cmpl(flags, atos);
3104   __ jcc(Assembler::notEqual, notObj);
3105   // atos


3202 #endif
3203 
3204   __ bind(Done);
3205   // [jk] not needed currently
3206   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
3207   //                                              Assembler::LoadStore));
3208 }
3209 
3210 void TemplateTable::getfield(int byte_no) {
3211   getfield_or_static(byte_no, false);
3212 }
3213 
3214 void TemplateTable::nofast_getfield(int byte_no) {
3215   getfield_or_static(byte_no, false, may_not_rewrite);
3216 }
3217 
3218 void TemplateTable::getstatic(int byte_no) {
3219   getfield_or_static(byte_no, true);
3220 }
3221 
3222 void TemplateTable::vwithfield() {
3223   transition(vtos, qtos);
3224 
3225   Register cache = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
3226   Register index = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
3227 
3228   resolve_cache_and_index(f2_byte, cache, index, sizeof(u2));
3229 
3230   call_VM(rbx, CAST_FROM_FN_PTR(address, InterpreterRuntime::vwithfield), cache);
3231   // new value type is returned in rbx
3232   // stack adjustement is returned in rax
3233   __ verify_oop(rbx);
3234   __ addptr(rsp, rax);
3235   __ movptr(rax, rbx);
3236 }
3237 
3238 // The registers cache and index expected to be set before call.
3239 // The function may destroy various registers, just not the cache and index registers.
3240 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
3241 
3242   const Register robj = LP64_ONLY(c_rarg2)   NOT_LP64(rax);
3243   const Register RBX  = LP64_ONLY(c_rarg1)   NOT_LP64(rbx);
3244   const Register RCX  = LP64_ONLY(c_rarg3)   NOT_LP64(rcx);
3245   const Register RDX  = LP64_ONLY(rscratch1) NOT_LP64(rdx);
3246 
3247   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
3248 
3249   if (JvmtiExport::can_post_field_modification()) {
3250     // Check to see if a field modification watch has been set before


3384     __ jmp(Done);
3385   }
3386 
3387   __ bind(notBool);
3388   __ cmpl(flags, atos);
3389   __ jcc(Assembler::notEqual, notObj);
3390 
3391   // atos
3392   {
3393     __ pop(atos);
3394     if (!is_static) pop_and_check_object(obj);
3395     // Store into the field
3396     do_oop_store(_masm, field, rax, _bs->kind(), false);
3397     if (!is_static && rc == may_rewrite) {
3398       patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3399     }
3400     __ jmp(Done);
3401   }
3402 
3403   __ bind(notObj);
3404   __ cmpl(flags, qtos);
3405   __ jcc(Assembler::notEqual, notValueType);
3406 
3407   // qtos
3408   {
3409     __ pop(qtos); // => rax == value
3410     if (!is_static) {
3411       // value types in non-static fields are embedded
3412       pop_and_check_object(rbx);
3413       call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::qputfield),
3414           rbx, rax, rcx);
3415       __ jmp(notVolatile); // value types are never volatile
3416     } else {
3417       // Store into the static field
3418       // Value types in static fields are currently handled with indirection
3419       // but a copy to the Java heap might be required if the value is currently
3420       // stored in a thread local buffer
3421       call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::qputstatic), rax, off, obj);
3422     }
3423     __ jmp(Done);
3424   }





3425 
3426   __ bind(notValueType);
3427   __ cmpl(flags, itos);
3428   __ jcc(Assembler::notEqual, notInt);
3429 
3430   // itos
3431   {
3432     __ pop(itos);
3433     if (!is_static) pop_and_check_object(obj);
3434     __ movl(field, rax);
3435     if (!is_static && rc == may_rewrite) {
3436       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3437     }
3438     __ jmp(Done);
3439   }
3440 
3441   __ bind(notInt);
3442   __ cmpl(flags, ctos);
3443   __ jcc(Assembler::notEqual, notChar);
3444 
3445   // ctos
3446   {


4360     __ jmp(done);
4361   }
4362 
4363   // slow case
4364   __ bind(slow_case);
4365   __ pop(rcx);   // restore stack pointer to what it was when we came in.
4366   __ bind(slow_case_no_pop);
4367 
4368   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4369   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4370 
4371   __ get_constant_pool(rarg1);
4372   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4373   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), rarg1, rarg2);
4374    __ verify_oop(rax);
4375 
4376   // continue
4377   __ bind(done);
4378 }
4379 
4380 void TemplateTable::vdefault() {
4381   transition(vtos, qtos);
4382 
4383   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4384   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4385 
4386   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4387   __ get_constant_pool(rarg1);
4388 
4389   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::vdefault),
4390       rarg1, rarg2);
4391   __ verify_oop(rax);
4392 }
4393 
4394 void TemplateTable::newarray() {
4395   transition(itos, atos);
4396   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4397   __ load_unsigned_byte(rarg1, at_bcp(1));
4398   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
4399           rarg1, rax);
4400 }
4401 
4402 void TemplateTable::anewarray() {
4403   transition(itos, atos);
4404 
4405   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4406   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4407 
4408   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4409   __ get_constant_pool(rarg1);


4528   // Come here on failure
4529   __ xorl(rax, rax);
4530   __ jmpb(done);
4531   // Come here on success
4532   __ bind(ok_is_subtype);
4533   __ movl(rax, 1);
4534 
4535   // Collect counts on whether this test sees NULLs a lot or not.
4536   if (ProfileInterpreter) {
4537     __ jmp(done);
4538     __ bind(is_null);
4539     __ profile_null_seen(rcx);
4540   } else {
4541     __ bind(is_null);   // same as 'done'
4542   }
4543   __ bind(done);
4544   // rax = 0: obj == NULL or  obj is not an instanceof the specified klass
4545   // rax = 1: obj != NULL and obj is     an instanceof the specified klass
4546 }
4547 
4548 void TemplateTable::_vbox() {
4549   transition(qtos, atos);
4550 
4551   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4552   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4553 
4554   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4555   __ get_constant_pool(rarg1);
4556   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::vbox),
4557       rarg1, rarg2, rax);
4558 }
4559 
4560 void TemplateTable::_vunbox() {
4561   transition(atos, qtos);
4562 
4563   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4564   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4565 
4566   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4567   __ get_constant_pool(rarg1);
4568   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::vunbox),
4569       rarg1, rarg2, rax);
4570 }
4571 
4572 //----------------------------------------------------------------------------------------------------
4573 // Breakpoints
4574 void TemplateTable::_breakpoint() {
4575   // Note: We get here even if we are single stepping..
4576   // jbug insists on setting breakpoints at every bytecode
4577   // even if we are in single step mode.
4578 
4579   transition(vtos, vtos);
4580 
4581   Register rarg = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4582 
4583   // get the unpatched byte code
4584   __ get_method(rarg);
4585   __ call_VM(noreg,
4586              CAST_FROM_FN_PTR(address,
4587                               InterpreterRuntime::get_original_bytecode_at),
4588              rarg, rbcp);
4589   __ mov(rbx, rax);  // why?
4590 
4591   // post the breakpoint event




 764 }
 765 
 766 void TemplateTable::fload() {
 767   transition(vtos, ftos);
 768   locals_index(rbx);
 769   __ load_float(faddress(rbx));
 770 }
 771 
 772 void TemplateTable::dload() {
 773   transition(vtos, dtos);
 774   locals_index(rbx);
 775   __ load_double(daddress(rbx));
 776 }
 777 
 778 void TemplateTable::aload() {
 779   transition(vtos, atos);
 780   locals_index(rbx);
 781   __ movptr(rax, aaddress(rbx));
 782 }
 783 






 784 void TemplateTable::locals_index_wide(Register reg) {
 785   __ load_unsigned_short(reg, at_bcp(2));
 786   __ bswapl(reg);
 787   __ shrl(reg, 16);
 788   __ negptr(reg);
 789 }
 790 
 791 void TemplateTable::wide_iload() {
 792   transition(vtos, itos);
 793   locals_index_wide(rbx);
 794   __ movl(rax, iaddress(rbx));
 795 }
 796 
 797 void TemplateTable::wide_lload() {
 798   transition(vtos, ltos);
 799   locals_index_wide(rbx);
 800   __ movptr(rax, laddress(rbx));
 801   NOT_LP64(__ movl(rdx, haddress(rbx)));
 802 }
 803 
 804 void TemplateTable::wide_fload() {
 805   transition(vtos, ftos);
 806   locals_index_wide(rbx);
 807   __ load_float(faddress(rbx));
 808 }
 809 
 810 void TemplateTable::wide_dload() {
 811   transition(vtos, dtos);
 812   locals_index_wide(rbx);
 813   __ load_double(daddress(rbx));
 814 }
 815 
 816 void TemplateTable::wide_aload() {
 817   transition(vtos, atos);
 818   locals_index_wide(rbx);
 819   __ movptr(rax, aaddress(rbx));
 820 }
 821 






 822 void TemplateTable::index_check(Register array, Register index) {
 823   // Pop ptr into array
 824   __ pop_ptr(array);
 825   index_check_without_pop(array, index);
 826 }
 827 
 828 void TemplateTable::index_check_without_pop(Register array, Register index) {
 829   // destroys rbx
 830   // check array
 831   __ null_check(array, arrayOopDesc::length_offset_in_bytes());
 832   // sign extend index for use by indexed load
 833   __ movl2ptr(index, index);
 834   // check index
 835   __ cmpl(index, Address(array, arrayOopDesc::length_offset_in_bytes()));
 836   if (index != rbx) {
 837     // ??? convention: move aberrant index into rbx for exception message
 838     assert(rbx != array, "different registers");
 839     __ movl(rbx, index);
 840   }
 841   __ jump_cc(Assembler::aboveEqual,


 879 void TemplateTable::daload() {
 880   transition(itos, dtos);
 881   // rax: index
 882   // rdx: array
 883   index_check(rdx, rax); // kills rbx
 884   __ load_double(Address(rdx, rax,
 885                          Address::times_8,
 886                          arrayOopDesc::base_offset_in_bytes(T_DOUBLE)));
 887 }
 888 
 889 void TemplateTable::aaload() {
 890   transition(itos, atos);
 891   // rax: index
 892   // rdx: array
 893   index_check(rdx, rax); // kills rbx
 894   __ load_heap_oop(rax, Address(rdx, rax,
 895                                 UseCompressedOops ? Address::times_4 : Address::times_ptr,
 896                                 arrayOopDesc::base_offset_in_bytes(T_OBJECT)));
 897 }
 898 











 899 void TemplateTable::baload() {
 900   transition(itos, itos);
 901   // rax: index
 902   // rdx: array
 903   index_check(rdx, rax); // kills rbx
 904   __ load_signed_byte(rax, Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)));
 905 }
 906 
 907 void TemplateTable::caload() {
 908   transition(itos, itos);
 909   // rax: index
 910   // rdx: array
 911   index_check(rdx, rax); // kills rbx
 912   __ load_unsigned_short(rax, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)));
 913 }
 914 
 915 // iload followed by caload frequent pair
 916 void TemplateTable::fast_icaload() {
 917   transition(vtos, itos);
 918   // load index out of locals


1057 
1058 void TemplateTable::fstore() {
1059   transition(ftos, vtos);
1060   locals_index(rbx);
1061   __ store_float(faddress(rbx));
1062 }
1063 
1064 void TemplateTable::dstore() {
1065   transition(dtos, vtos);
1066   locals_index(rbx);
1067   __ store_double(daddress(rbx));
1068 }
1069 
1070 void TemplateTable::astore() {
1071   transition(vtos, vtos);
1072   __ pop_ptr(rax);
1073   locals_index(rbx);
1074   __ movptr(aaddress(rbx), rax);
1075 }
1076 







1077 void TemplateTable::wide_istore() {
1078   transition(vtos, vtos);
1079   __ pop_i();
1080   locals_index_wide(rbx);
1081   __ movl(iaddress(rbx), rax);
1082 }
1083 
1084 void TemplateTable::wide_lstore() {
1085   transition(vtos, vtos);
1086   NOT_LP64(__ pop_l(rax, rdx));
1087   LP64_ONLY(__ pop_l());
1088   locals_index_wide(rbx);
1089   __ movptr(laddress(rbx), rax);
1090   NOT_LP64(__ movl(haddress(rbx), rdx));
1091 }
1092 
1093 void TemplateTable::wide_fstore() {
1094 #ifdef _LP64
1095   transition(vtos, vtos);
1096   __ pop_f(xmm0);


1102 }
1103 
1104 void TemplateTable::wide_dstore() {
1105 #ifdef _LP64
1106   transition(vtos, vtos);
1107   __ pop_d(xmm0);
1108   locals_index_wide(rbx);
1109   __ movdbl(daddress(rbx), xmm0);
1110 #else
1111   wide_lstore();
1112 #endif
1113 }
1114 
1115 void TemplateTable::wide_astore() {
1116   transition(vtos, vtos);
1117   __ pop_ptr(rax);
1118   locals_index_wide(rbx);
1119   __ movptr(aaddress(rbx), rax);
1120 }
1121 







1122 void TemplateTable::iastore() {
1123   transition(itos, vtos);
1124   __ pop_i(rbx);
1125   // rax: value
1126   // rbx: index
1127   // rdx: array
1128   index_check(rdx, rbx); // prefer index in rbx
1129   __ movl(Address(rdx, rbx,
1130                   Address::times_4,
1131                   arrayOopDesc::base_offset_in_bytes(T_INT)),
1132           rax);
1133 }
1134 
1135 void TemplateTable::lastore() {
1136   transition(ltos, vtos);
1137   __ pop_i(rbx);
1138   // rax,: low(value)
1139   // rcx: array
1140   // rdx: high(value)
1141   index_check(rcx, rbx);  // prefer index in rbx,


1202   __ bind(ok_is_subtype);
1203 
1204   // Get the value we will store
1205   __ movptr(rax, at_tos());
1206   // Now store using the appropriate barrier
1207   do_oop_store(_masm, Address(rdx, 0), rax, _bs->kind(), true);
1208   __ jmp(done);
1209 
1210   // Have a NULL in rax, rdx=array, ecx=index.  Store NULL at ary[idx]
1211   __ bind(is_null);
1212   __ profile_null_seen(rbx);
1213 
1214   // Store a NULL
1215   do_oop_store(_masm, element_address, noreg, _bs->kind(), true);
1216 
1217   // Pop stack arguments
1218   __ bind(done);
1219   __ addptr(rsp, 3 * Interpreter::stackElementSize);
1220 }
1221 
1222 // This code has to be merged with aastore
1223 //void TemplateTable::vastore() {
1224 //  transition(vtos, vtos);
1225 //
1226 //  Register value = rcx;
1227 //  Register index = rbx;
1228 //  Register array = rax;
1229 //
1230 //  // stack: ..., array, index, value
1231 //  __ pop_ptr(value);
1232 //  __ pop_i(index);
1233 //  __ pop_ptr(array);
1234 //
1235 //  index_check_without_pop(array, index);
1236 //
1237 //  __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::value_array_store), array, index, value);
1238 //}
1239 
1240 void TemplateTable::bastore() {
1241   transition(itos, vtos);
1242   __ pop_i(rbx);
1243   // rax: value
1244   // rbx: index
1245   // rdx: array
1246   index_check(rdx, rbx); // prefer index in rbx
1247   // Need to check whether array is boolean or byte
1248   // since both types share the bastore bytecode.
1249   __ load_klass(rcx, rdx);
1250   __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1251   int diffbit = Klass::layout_helper_boolean_diffbit();
1252   __ testl(rcx, diffbit);
1253   Label L_skip;
1254   __ jccb(Assembler::zero, L_skip);
1255   __ andl(rax, 1);  // if it is a T_BOOLEAN array, mask the stored value to 0/1
1256   __ bind(L_skip);
1257   __ movb(Address(rdx, rbx,
1258                   Address::times_1,


2752     __ jcc(Assembler::zero, skip_register_finalizer);
2753 
2754     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::register_finalizer), robj);
2755 
2756     __ bind(skip_register_finalizer);
2757   }
2758 
2759 #ifdef _LP64
2760   if (SafepointMechanism::uses_thread_local_poll() && _desc->bytecode() != Bytecodes::_return_register_finalizer) {
2761     Label no_safepoint;
2762     NOT_PRODUCT(__ block_comment("Thread-local Safepoint poll"));
2763     __ testb(Address(r15_thread, Thread::polling_page_offset()), SafepointMechanism::poll_bit());
2764     __ jcc(Assembler::zero, no_safepoint);
2765     __ push(state);
2766     __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2767                                     InterpreterRuntime::at_safepoint));
2768     __ pop(state);
2769     __ bind(no_safepoint);
2770   }
2771 #endif
2772 //  This code has to be re-store before re-enabling value buffering in TLVB
2773 //  if (state == qtos) {
2774 //    const Register thread1 = NOT_LP64(rcx) LP64_ONLY(r15_thread);
2775 //    __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::return_value), rax);
2776 //    NOT_LP64(__ get_thread(thread1));
2777 //    __ get_vm_result(rax, thread1);
2778 //  }
2779 
2780   // Narrow result if state is itos but result type is smaller.
2781   // Need to narrow in the return bytecode rather than in generate_return_entry
2782   // since compiled code callers expect the result to already be narrowed.
2783   if (state == itos) {
2784     __ narrow(rax);
2785   }
2786 
2787 #ifdef ASSERT
2788   if (EnableMVT || EnableValhalla) {
2789     if (state == atos) {
2790       const Register thread1 = NOT_LP64(rcx) LP64_ONLY(r15_thread);
2791       __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::check_areturn), rax);
2792       NOT_LP64(__ get_thread(thread1));
2793       __ get_vm_result(rax, thread1);
2794     }
2795   }
2796 #endif // ASSERT
2797 
2798   __ remove_activation(state, rbcp, true, true, true, /*state == qtos*/ false && ValueTypeReturnedAsFields);
2799 
2800   __ jmp(rbcp);
2801 }
2802 
2803 // ----------------------------------------------------------------------------
2804 // Volatile variables demand their effects be made known to all CPU's
2805 // in order.  Store buffers on most chips allow reads & writes to
2806 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2807 // without some kind of memory barrier (i.e., it's not sufficient that
2808 // the interpreter does not reorder volatile references, the hardware
2809 // also must not reorder them).
2810 //
2811 // According to the new Java Memory Model (JMM):
2812 // (1) All volatiles are serialized wrt to each other.  ALSO reads &
2813 //     writes act as aquire & release, so:
2814 // (2) A read cannot let unrelated NON-volatile memory refs that
2815 //     happen after the read float up to before the read.  It's OK for
2816 //     non-volatile memory refs that happen before the volatile read to
2817 //     float down below it.
2818 // (3) Similar a volatile write cannot let unrelated NON-volatile


3004 
3005   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
3006   // Make sure we don't need to mask edx after the above shift
3007   assert(btos == 0, "change code, btos != 0");
3008 
3009   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
3010 
3011   __ jcc(Assembler::notZero, notByte);
3012   // btos
3013   if (!is_static) pop_and_check_object(obj);
3014   __ load_signed_byte(rax, field);
3015   __ push(btos);
3016   // Rewrite bytecode to be faster
3017   if (!is_static && rc == may_rewrite) {
3018     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3019   }
3020   __ jmp(Done);
3021 
3022   __ bind(notByte);
3023 
3024 //  __ cmpl(flags, qtos);
3025 //  __ jcc(Assembler::notEqual, notValueType);
3026 //  // qtos
3027 //  if (is_static) {
3028 //    Label initialized;
3029 //    // Issue below if the static field has not been initialized yet
3030 //    __ load_heap_oop(rax, field);
3031 //    __ testptr(rax, rax);
3032 //    __ jcc(Assembler::notZero, initialized);
3033 //    __ andl(flags2, ConstantPoolCacheEntry::field_index_mask);
3034 //    __ call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::uninitialized_static_value_field),
3035 //         obj, flags2);
3036 //    __ verify_oop(rax);
3037 //    __ bind(initialized);
3038 //    __ push(atos);
3039 //  } else {
3040 //    __ andl(flags2, ConstantPoolCacheEntry::field_index_mask);
3041 //    pop_and_check_object(rbx);
3042 //    call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::qgetfield),
3043 //            rbx, flags2, rcx);
3044 //    __ verify_oop(rax);
3045 //    __ push(atos);
3046 //    // Bytecode rewrite?
3047 //  }
3048 //  __ jmp(Done);
3049 //
3050 //  __ bind(notValueType);
3051 
3052   if (!is_static) pop_and_check_object(obj);
3053 
3054   __ cmpl(flags, ztos);
3055   __ jcc(Assembler::notEqual, notBool);
3056 
3057   // ztos (same code as btos)
3058   __ load_signed_byte(rax, field);
3059   __ push(ztos);
3060   // Rewrite bytecode to be faster
3061   if (!is_static && rc == may_rewrite) {
3062     // use btos rewriting, no truncating to t/f bit is needed for getfield.
3063     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3064   }
3065   __ jmp(Done);
3066 
3067   __ bind(notBool);
3068   __ cmpl(flags, atos);
3069   __ jcc(Assembler::notEqual, notObj);
3070   // atos


3167 #endif
3168 
3169   __ bind(Done);
3170   // [jk] not needed currently
3171   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
3172   //                                              Assembler::LoadStore));
3173 }
3174 
3175 void TemplateTable::getfield(int byte_no) {
3176   getfield_or_static(byte_no, false);
3177 }
3178 
3179 void TemplateTable::nofast_getfield(int byte_no) {
3180   getfield_or_static(byte_no, false, may_not_rewrite);
3181 }
3182 
3183 void TemplateTable::getstatic(int byte_no) {
3184   getfield_or_static(byte_no, true);
3185 }
3186 
3187 void TemplateTable::withfield() {
3188   transition(vtos, atos);
3189 
3190   Register cache = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
3191   Register index = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
3192 
3193   resolve_cache_and_index(f2_byte, cache, index, sizeof(u2));
3194 
3195   call_VM(rbx, CAST_FROM_FN_PTR(address, InterpreterRuntime::withfield), cache);
3196   // new value type is returned in rbx
3197   // stack adjustement is returned in rax
3198   __ verify_oop(rbx);
3199   __ addptr(rsp, rax);
3200   __ movptr(rax, rbx);
3201 }
3202 
3203 // The registers cache and index expected to be set before call.
3204 // The function may destroy various registers, just not the cache and index registers.
3205 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
3206 
3207   const Register robj = LP64_ONLY(c_rarg2)   NOT_LP64(rax);
3208   const Register RBX  = LP64_ONLY(c_rarg1)   NOT_LP64(rbx);
3209   const Register RCX  = LP64_ONLY(c_rarg3)   NOT_LP64(rcx);
3210   const Register RDX  = LP64_ONLY(rscratch1) NOT_LP64(rdx);
3211 
3212   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
3213 
3214   if (JvmtiExport::can_post_field_modification()) {
3215     // Check to see if a field modification watch has been set before


3349     __ jmp(Done);
3350   }
3351 
3352   __ bind(notBool);
3353   __ cmpl(flags, atos);
3354   __ jcc(Assembler::notEqual, notObj);
3355 
3356   // atos
3357   {
3358     __ pop(atos);
3359     if (!is_static) pop_and_check_object(obj);
3360     // Store into the field
3361     do_oop_store(_masm, field, rax, _bs->kind(), false);
3362     if (!is_static && rc == may_rewrite) {
3363       patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3364     }
3365     __ jmp(Done);
3366   }
3367 
3368   __ bind(notObj);


3369 
3370 //  __ cmpl(flags, qtos);
3371 //  __ jcc(Assembler::notEqual, notValueType);
3372 //
3373 //  // qtos
3374 //  {
3375 //    __ pop(atos); // => rax == value
3376 //    if (!is_static) {
3377 //      // value types in non-static fields are embedded
3378 //      pop_and_check_object(rbx);
3379 //      call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::qputfield),
3380 //          rbx, rax, rcx);
3381 //      __ jmp(notVolatile); // value types are never volatile
3382 //    } else {
3383 //      // Store into the static field
3384 //      // Value types in static fields are currently handled with indirection
3385 //      // but a copy to the Java heap might be required if the value is currently
3386 //      // stored in a thread local buffer
3387 //      call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::qputstatic), rax, off, obj);
3388 //    }
3389 //    __ jmp(Done);
3390 //  }
3391 //
3392 //  __ bind(notValueType);
3393 

3394   __ cmpl(flags, itos);
3395   __ jcc(Assembler::notEqual, notInt);
3396 
3397   // itos
3398   {
3399     __ pop(itos);
3400     if (!is_static) pop_and_check_object(obj);
3401     __ movl(field, rax);
3402     if (!is_static && rc == may_rewrite) {
3403       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3404     }
3405     __ jmp(Done);
3406   }
3407 
3408   __ bind(notInt);
3409   __ cmpl(flags, ctos);
3410   __ jcc(Assembler::notEqual, notChar);
3411 
3412   // ctos
3413   {


4327     __ jmp(done);
4328   }
4329 
4330   // slow case
4331   __ bind(slow_case);
4332   __ pop(rcx);   // restore stack pointer to what it was when we came in.
4333   __ bind(slow_case_no_pop);
4334 
4335   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4336   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4337 
4338   __ get_constant_pool(rarg1);
4339   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4340   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), rarg1, rarg2);
4341    __ verify_oop(rax);
4342 
4343   // continue
4344   __ bind(done);
4345 }
4346 
4347 void TemplateTable::defaultvalue() {
4348   transition(vtos, atos);
4349 
4350   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4351   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4352 
4353   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4354   __ get_constant_pool(rarg1);
4355 
4356   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::defaultvalue),
4357       rarg1, rarg2);
4358   __ verify_oop(rax);
4359 }
4360 
4361 void TemplateTable::newarray() {
4362   transition(itos, atos);
4363   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4364   __ load_unsigned_byte(rarg1, at_bcp(1));
4365   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
4366           rarg1, rax);
4367 }
4368 
4369 void TemplateTable::anewarray() {
4370   transition(itos, atos);
4371 
4372   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4373   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4374 
4375   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4376   __ get_constant_pool(rarg1);


4495   // Come here on failure
4496   __ xorl(rax, rax);
4497   __ jmpb(done);
4498   // Come here on success
4499   __ bind(ok_is_subtype);
4500   __ movl(rax, 1);
4501 
4502   // Collect counts on whether this test sees NULLs a lot or not.
4503   if (ProfileInterpreter) {
4504     __ jmp(done);
4505     __ bind(is_null);
4506     __ profile_null_seen(rcx);
4507   } else {
4508     __ bind(is_null);   // same as 'done'
4509   }
4510   __ bind(done);
4511   // rax = 0: obj == NULL or  obj is not an instanceof the specified klass
4512   // rax = 1: obj != NULL and obj is     an instanceof the specified klass
4513 }
4514 
























4515 //----------------------------------------------------------------------------------------------------
4516 // Breakpoints
4517 void TemplateTable::_breakpoint() {
4518   // Note: We get here even if we are single stepping..
4519   // jbug insists on setting breakpoints at every bytecode
4520   // even if we are in single step mode.
4521 
4522   transition(vtos, vtos);
4523 
4524   Register rarg = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4525 
4526   // get the unpatched byte code
4527   __ get_method(rarg);
4528   __ call_VM(noreg,
4529              CAST_FROM_FN_PTR(address,
4530                               InterpreterRuntime::get_original_bytecode_at),
4531              rarg, rbcp);
4532   __ mov(rbx, rax);  // why?
4533 
4534   // post the breakpoint event


< prev index next >