< prev index next >

src/hotspot/cpu/x86/templateTable_x86.cpp

Print this page




 160 static void do_oop_load(InterpreterMacroAssembler* _masm,
 161                         Address src,
 162                         Register dst,
 163                         DecoratorSet decorators = 0) {
 164   __ load_heap_oop(dst, src, rdx, rbx, decorators);
 165 }
 166 
 167 Address TemplateTable::at_bcp(int offset) {
 168   assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
 169   return Address(rbcp, offset);
 170 }
 171 
 172 
 173 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
 174                                    Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
 175                                    int byte_no) {
 176   if (!RewriteBytecodes)  return;
 177   Label L_patch_done;
 178 
 179   switch (bc) {

 180   case Bytecodes::_fast_aputfield:
 181   case Bytecodes::_fast_bputfield:
 182   case Bytecodes::_fast_zputfield:
 183   case Bytecodes::_fast_cputfield:
 184   case Bytecodes::_fast_dputfield:
 185   case Bytecodes::_fast_fputfield:
 186   case Bytecodes::_fast_iputfield:
 187   case Bytecodes::_fast_lputfield:
 188   case Bytecodes::_fast_sputfield:
 189     {
 190       // We skip bytecode quickening for putfield instructions when
 191       // the put_code written to the constant pool cache is zero.
 192       // This is required so that every execution of this instruction
 193       // calls out to InterpreterRuntime::resolve_get_put to do
 194       // additional, required work.
 195       assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
 196       assert(load_bc_into_bc_reg, "we use bc_reg as temp");
 197       __ get_cache_and_index_and_bytecode_at_bcp(temp_reg, bc_reg, temp_reg, byte_no, 1);
 198       __ movl(bc_reg, bc);
 199       __ cmpl(temp_reg, (int) 0);


 352   __ sarl(rax, 16);
 353 }
 354 
 355 void TemplateTable::ldc(bool wide) {
 356   transition(vtos, vtos);
 357   Register rarg = NOT_LP64(rcx) LP64_ONLY(c_rarg1);
 358   Label call_ldc, notFloat, notClass, notInt, Done;
 359 
 360   if (wide) {
 361     __ get_unsigned_2_byte_index_at_bcp(rbx, 1);
 362   } else {
 363     __ load_unsigned_byte(rbx, at_bcp(1));
 364   }
 365 
 366   __ get_cpool_and_tags(rcx, rax);
 367   const int base_offset = ConstantPool::header_size() * wordSize;
 368   const int tags_offset = Array<u1>::base_offset_in_bytes();
 369 
 370   // get type
 371   __ movzbl(rdx, Address(rax, rbx, Address::times_1, tags_offset));

 372 
 373   // unresolved class - get the resolved class
 374   __ cmpl(rdx, JVM_CONSTANT_UnresolvedClass);
 375   __ jccb(Assembler::equal, call_ldc);
 376 
 377   // unresolved class in error state - call into runtime to throw the error
 378   // from the first resolution attempt
 379   __ cmpl(rdx, JVM_CONSTANT_UnresolvedClassInError);
 380   __ jccb(Assembler::equal, call_ldc);
 381 
 382   // resolved class - need to call vm to get java mirror of the class
 383   __ cmpl(rdx, JVM_CONSTANT_Class);
 384   __ jcc(Assembler::notEqual, notClass);
 385 
 386   __ bind(call_ldc);
 387 
 388   __ movl(rarg, wide);
 389   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::ldc), rarg);
 390 
 391   __ push(atos);


 802                     Address(rdx, rax,
 803                             Address::times_4,
 804                             arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
 805                     noreg, noreg);
 806 }
 807 
 808 void TemplateTable::daload() {
 809   transition(itos, dtos);
 810   // rax: index
 811   // rdx: array
 812   index_check(rdx, rax); // kills rbx
 813   __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */,
 814                     Address(rdx, rax,
 815                             Address::times_8,
 816                             arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
 817                     noreg, noreg);
 818 }
 819 
 820 void TemplateTable::aaload() {
 821   transition(itos, atos);
 822   // rax: index
 823   // rdx: array
 824   index_check(rdx, rax); // kills rbx





 825   do_oop_load(_masm,
 826               Address(rdx, rax,
 827                       UseCompressedOops ? Address::times_4 : Address::times_ptr,
 828                       arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
 829               rax,
 830               IS_ARRAY);












 831 }
 832 
 833 void TemplateTable::baload() {
 834   transition(itos, itos);
 835   // rax: index
 836   // rdx: array
 837   index_check(rdx, rax); // kills rbx
 838   __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax,
 839                     Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
 840                     noreg, noreg);
 841 }
 842 
 843 void TemplateTable::caload() {
 844   transition(itos, itos);
 845   // rax: index
 846   // rdx: array
 847   index_check(rdx, rax); // kills rbx
 848   __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
 849                     Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
 850                     noreg, noreg);


1096   __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY,
1097                      Address(rdx, rbx, Address::times_4,
1098                              arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1099                      noreg /* ftos */, noreg, noreg);
1100 }
1101 
1102 void TemplateTable::dastore() {
1103   transition(dtos, vtos);
1104   __ pop_i(rbx);
1105   // value is in UseSSE >= 2 ? xmm0 : ST(0)
1106   // rbx:  index
1107   // rdx:  array
1108   index_check(rdx, rbx); // prefer index in rbx
1109   __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY,
1110                      Address(rdx, rbx, Address::times_8,
1111                              arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1112                      noreg /* dtos */, noreg, noreg);
1113 }
1114 
1115 void TemplateTable::aastore() {
1116   Label is_null, ok_is_subtype, done;
1117   transition(vtos, vtos);
1118   // stack: ..., array, index, value
1119   __ movptr(rax, at_tos());    // value
1120   __ movl(rcx, at_tos_p1()); // index
1121   __ movptr(rdx, at_tos_p2()); // array
1122 
1123   Address element_address(rdx, rcx,
1124                           UseCompressedOops? Address::times_4 : Address::times_ptr,
1125                           arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1126 
1127   index_check_without_pop(rdx, rcx);     // kills rbx

1128   __ testptr(rax, rax);
1129   __ jcc(Assembler::zero, is_null);
1130 






1131   // Move subklass into rbx
1132   __ load_klass(rbx, rax);
1133   // Move superklass into rax
1134   __ load_klass(rax, rdx);
1135   __ movptr(rax, Address(rax,
1136                          ObjArrayKlass::element_klass_offset()));
1137 
1138   // Generate subtype check.  Blows rcx, rdi
1139   // Superklass in rax.  Subklass in rbx.

1140   __ gen_subtype_check(rbx, ok_is_subtype);
1141 
1142   // Come here on failure
1143   // object is at TOS
1144   __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
1145 
1146   // Come here on success
1147   __ bind(ok_is_subtype);
1148 
1149   // Get the value we will store
1150   __ movptr(rax, at_tos());
1151   __ movl(rcx, at_tos_p1()); // index
1152   // Now store using the appropriate barrier
1153   do_oop_store(_masm, element_address, rax, IS_ARRAY);
1154   __ jmp(done);
1155 
1156   // Have a NULL in rax, rdx=array, ecx=index.  Store NULL at ary[idx]
1157   __ bind(is_null);
1158   __ profile_null_seen(rbx);













1159 





1160   // Store a NULL
1161   do_oop_store(_masm, element_address, noreg, IS_ARRAY);

















1162 








1163   // Pop stack arguments
1164   __ bind(done);
1165   __ addptr(rsp, 3 * Interpreter::stackElementSize);
1166 }
1167 
1168 void TemplateTable::bastore() {
1169   transition(itos, vtos);
1170   __ pop_i(rbx);
1171   // rax: value
1172   // rbx: index
1173   // rdx: array
1174   index_check(rdx, rbx); // prefer index in rbx
1175   // Need to check whether array is boolean or byte
1176   // since both types share the bastore bytecode.
1177   __ load_klass(rcx, rdx);
1178   __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1179   int diffbit = Klass::layout_helper_boolean_diffbit();
1180   __ testl(rcx, diffbit);
1181   Label L_skip;
1182   __ jccb(Assembler::zero, L_skip);


2388   __ jcc(j_not(cc), not_taken);
2389   branch(false, false);
2390   __ bind(not_taken);
2391   __ profile_not_taken_branch(rax);
2392 }
2393 
2394 void TemplateTable::if_nullcmp(Condition cc) {
2395   transition(atos, vtos);
2396   // assume branch is more often taken than not (loops use backward branches)
2397   Label not_taken;
2398   __ testptr(rax, rax);
2399   __ jcc(j_not(cc), not_taken);
2400   branch(false, false);
2401   __ bind(not_taken);
2402   __ profile_not_taken_branch(rax);
2403 }
2404 
2405 void TemplateTable::if_acmp(Condition cc) {
2406   transition(atos, vtos);
2407   // assume branch is more often taken than not (loops use backward branches)
2408   Label not_taken;
2409   __ pop_ptr(rdx);















2410   __ cmpoop(rdx, rax);











2411   __ jcc(j_not(cc), not_taken);

2412   branch(false, false);
2413   __ bind(not_taken);
2414   __ profile_not_taken_branch(rax);
2415 }
2416 
2417 void TemplateTable::ret() {
2418   transition(vtos, vtos);
2419   locals_index(rbx);
2420   LP64_ONLY(__ movslq(rbx, iaddress(rbx))); // get return bci, compute return bcp
2421   NOT_LP64(__ movptr(rbx, iaddress(rbx)));
2422   __ profile_ret(rbx, rcx);
2423   __ get_method(rax);
2424   __ movptr(rbcp, Address(rax, Method::const_offset()));
2425   __ lea(rbcp, Address(rbcp, rbx, Address::times_1,
2426                       ConstMethod::codes_offset()));
2427   __ dispatch_next(vtos, 0, true);
2428 }
2429 
2430 void TemplateTable::wide_ret() {
2431   transition(vtos, vtos);


2662     __ testb(Address(r15_thread, Thread::polling_page_offset()), SafepointMechanism::poll_bit());
2663 #else
2664     const Register thread = rdi;
2665     __ get_thread(thread);
2666     __ testb(Address(thread, Thread::polling_page_offset()), SafepointMechanism::poll_bit());
2667 #endif
2668     __ jcc(Assembler::zero, no_safepoint);
2669     __ push(state);
2670     __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2671                                     InterpreterRuntime::at_safepoint));
2672     __ pop(state);
2673     __ bind(no_safepoint);
2674   }
2675 
2676   // Narrow result if state is itos but result type is smaller.
2677   // Need to narrow in the return bytecode rather than in generate_return_entry
2678   // since compiled code callers expect the result to already be narrowed.
2679   if (state == itos) {
2680     __ narrow(rax);
2681   }
2682   __ remove_activation(state, rbcp);

2683 
2684   __ jmp(rbcp);
2685 }
2686 
2687 // ----------------------------------------------------------------------------
2688 // Volatile variables demand their effects be made known to all CPU's
2689 // in order.  Store buffers on most chips allow reads & writes to
2690 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2691 // without some kind of memory barrier (i.e., it's not sufficient that
2692 // the interpreter does not reorder volatile references, the hardware
2693 // also must not reorder them).
2694 //
2695 // According to the new Java Memory Model (JMM):
2696 // (1) All volatiles are serialized wrt to each other.  ALSO reads &
2697 //     writes act as aquire & release, so:
2698 // (2) A read cannot let unrelated NON-volatile memory refs that
2699 //     happen after the read float up to before the read.  It's OK for
2700 //     non-volatile memory refs that happen before the volatile read to
2701 //     float down below it.
2702 // (3) Similar a volatile write cannot let unrelated NON-volatile


2849     __ get_cache_and_index_at_bcp(cache, index, 1);
2850     __ bind(L1);
2851   }
2852 }
2853 
2854 void TemplateTable::pop_and_check_object(Register r) {
2855   __ pop_ptr(r);
2856   __ null_check(r);  // for field access must check obj.
2857   __ verify_oop(r);
2858 }
2859 
2860 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2861   transition(vtos, vtos);
2862 
2863   const Register cache = rcx;
2864   const Register index = rdx;
2865   const Register obj   = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
2866   const Register off   = rbx;
2867   const Register flags = rax;
2868   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // uses same reg as obj, so don't mix them

2869 
2870   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2871   jvmti_post_field_access(cache, index, is_static, false);
2872   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2873 
2874   if (!is_static) pop_and_check_object(obj);
2875 
2876   const Address field(obj, off, Address::times_1, 0*wordSize);
2877 
2878   Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj;








2879 
2880   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
2881   // Make sure we don't need to mask edx after the above shift
2882   assert(btos == 0, "change code, btos != 0");
2883 
2884   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
2885 
2886   __ jcc(Assembler::notZero, notByte);
2887   // btos

2888   __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
2889   __ push(btos);
2890   // Rewrite bytecode to be faster
2891   if (!is_static && rc == may_rewrite) {
2892     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2893   }
2894   __ jmp(Done);
2895 
2896   __ bind(notByte);

2897   __ cmpl(flags, ztos);
2898   __ jcc(Assembler::notEqual, notBool);
2899 
2900   // ztos (same code as btos)
2901   __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg, noreg);
2902   __ push(ztos);
2903   // Rewrite bytecode to be faster
2904   if (!is_static && rc == may_rewrite) {
2905     // use btos rewriting, no truncating to t/f bit is needed for getfield.
2906     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2907   }
2908   __ jmp(Done);
2909 
2910   __ bind(notBool);
2911   __ cmpl(flags, atos);
2912   __ jcc(Assembler::notEqual, notObj);
2913   // atos


2914   do_oop_load(_masm, field, rax);
2915   __ push(atos);
2916   if (!is_static && rc == may_rewrite) {
2917     patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
2918   }
2919   __ jmp(Done);





























































2920 
2921   __ bind(notObj);



2922   __ cmpl(flags, itos);
2923   __ jcc(Assembler::notEqual, notInt);
2924   // itos
2925   __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
2926   __ push(itos);
2927   // Rewrite bytecode to be faster
2928   if (!is_static && rc == may_rewrite) {
2929     patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
2930   }
2931   __ jmp(Done);
2932 
2933   __ bind(notInt);
2934   __ cmpl(flags, ctos);
2935   __ jcc(Assembler::notEqual, notChar);
2936   // ctos
2937   __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
2938   __ push(ctos);
2939   // Rewrite bytecode to be faster
2940   if (!is_static && rc == may_rewrite) {
2941     patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);


3000 #endif
3001 
3002   __ bind(Done);
3003   // [jk] not needed currently
3004   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
3005   //                                              Assembler::LoadStore));
3006 }
3007 
3008 void TemplateTable::getfield(int byte_no) {
3009   getfield_or_static(byte_no, false);
3010 }
3011 
3012 void TemplateTable::nofast_getfield(int byte_no) {
3013   getfield_or_static(byte_no, false, may_not_rewrite);
3014 }
3015 
3016 void TemplateTable::getstatic(int byte_no) {
3017   getfield_or_static(byte_no, true);
3018 }
3019 















3020 
3021 // The registers cache and index expected to be set before call.
3022 // The function may destroy various registers, just not the cache and index registers.
3023 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
3024 
3025   const Register robj = LP64_ONLY(c_rarg2)   NOT_LP64(rax);
3026   const Register RBX  = LP64_ONLY(c_rarg1)   NOT_LP64(rbx);
3027   const Register RCX  = LP64_ONLY(c_rarg3)   NOT_LP64(rcx);
3028   const Register RDX  = LP64_ONLY(rscratch1) NOT_LP64(rdx);
3029 
3030   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
3031 
3032   if (JvmtiExport::can_post_field_modification()) {
3033     // Check to see if a field modification watch has been set before
3034     // we take the time to call into the VM.
3035     Label L1;
3036     assert_different_registers(cache, index, rax);
3037     __ mov32(rax, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3038     __ testl(rax, rax);
3039     __ jcc(Assembler::zero, L1);


3095     // c_rarg1: object pointer set up above (NULL if static)
3096     // c_rarg2: cache entry pointer
3097     // c_rarg3: jvalue object on the stack
3098     __ call_VM(noreg,
3099                CAST_FROM_FN_PTR(address,
3100                                 InterpreterRuntime::post_field_modification),
3101                RBX, robj, RCX);
3102     __ get_cache_and_index_at_bcp(cache, index, 1);
3103     __ bind(L1);
3104   }
3105 }
3106 
3107 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3108   transition(vtos, vtos);
3109 
3110   const Register cache = rcx;
3111   const Register index = rdx;
3112   const Register obj   = rcx;
3113   const Register off   = rbx;
3114   const Register flags = rax;

3115 
3116   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
3117   jvmti_post_field_mod(cache, index, is_static);
3118   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
3119 
3120   // [jk] not needed currently
3121   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3122   //                                              Assembler::StoreStore));
3123 
3124   Label notVolatile, Done;
3125   __ movl(rdx, flags);
3126   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3127   __ andl(rdx, 0x1);
3128 
3129   // Check for volatile store
3130   __ testl(rdx, rdx);

3131   __ jcc(Assembler::zero, notVolatile);
3132 
3133   putfield_or_static_helper(byte_no, is_static, rc, obj, off, flags);
3134   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3135                                                Assembler::StoreStore));
3136   __ jmp(Done);
3137   __ bind(notVolatile);
3138 
3139   putfield_or_static_helper(byte_no, is_static, rc, obj, off, flags);
3140 
3141   __ bind(Done);
3142 }
3143 
3144 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
3145                                               Register obj, Register off, Register flags) {
3146 
3147   // field addresses
3148   const Address field(obj, off, Address::times_1, 0*wordSize);
3149   NOT_LP64( const Address hi(obj, off, Address::times_1, 1*wordSize);)
3150 
3151   Label notByte, notBool, notInt, notShort, notChar,
3152         notLong, notFloat, notObj;
3153   Label Done;
3154 
3155   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3156 
3157   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
3158 
3159   assert(btos == 0, "change code, btos != 0");
3160   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
3161   __ jcc(Assembler::notZero, notByte);
3162 
3163   // btos
3164   {
3165     __ pop(btos);
3166     if (!is_static) pop_and_check_object(obj);
3167     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg);
3168     if (!is_static && rc == may_rewrite) {
3169       patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
3170     }
3171     __ jmp(Done);
3172   }


3175   __ cmpl(flags, ztos);
3176   __ jcc(Assembler::notEqual, notBool);
3177 
3178   // ztos
3179   {
3180     __ pop(ztos);
3181     if (!is_static) pop_and_check_object(obj);
3182     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg);
3183     if (!is_static && rc == may_rewrite) {
3184       patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
3185     }
3186     __ jmp(Done);
3187   }
3188 
3189   __ bind(notBool);
3190   __ cmpl(flags, atos);
3191   __ jcc(Assembler::notEqual, notObj);
3192 
3193   // atos
3194   {

3195     __ pop(atos);
3196     if (!is_static) pop_and_check_object(obj);
3197     // Store into the field
3198     do_oop_store(_masm, field, rax);
3199     if (!is_static && rc == may_rewrite) {
3200       patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3201     }
3202     __ jmp(Done);









































3203   }
3204 
3205   __ bind(notObj);
3206   __ cmpl(flags, itos);
3207   __ jcc(Assembler::notEqual, notInt);
3208 
3209   // itos
3210   {
3211     __ pop(itos);
3212     if (!is_static) pop_and_check_object(obj);
3213     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg);
3214     if (!is_static && rc == may_rewrite) {
3215       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3216     }
3217     __ jmp(Done);
3218   }
3219 
3220   __ bind(notInt);
3221   __ cmpl(flags, ctos);
3222   __ jcc(Assembler::notEqual, notChar);


3319 }
3320 
3321 void TemplateTable::jvmti_post_fast_field_mod() {
3322 
3323   const Register scratch = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3324 
3325   if (JvmtiExport::can_post_field_modification()) {
3326     // Check to see if a field modification watch has been set before
3327     // we take the time to call into the VM.
3328     Label L2;
3329     __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3330     __ testl(scratch, scratch);
3331     __ jcc(Assembler::zero, L2);
3332     __ pop_ptr(rbx);                  // copy the object pointer from tos
3333     __ verify_oop(rbx);
3334     __ push_ptr(rbx);                 // put the object pointer back on tos
3335     // Save tos values before call_VM() clobbers them. Since we have
3336     // to do it for every data type, we use the saved values as the
3337     // jvalue object.
3338     switch (bytecode()) {          // load values into the jvalue object

3339     case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;
3340     case Bytecodes::_fast_bputfield: // fall through
3341     case Bytecodes::_fast_zputfield: // fall through
3342     case Bytecodes::_fast_sputfield: // fall through
3343     case Bytecodes::_fast_cputfield: // fall through
3344     case Bytecodes::_fast_iputfield: __ push_i(rax); break;
3345     case Bytecodes::_fast_dputfield: __ push(dtos); break;
3346     case Bytecodes::_fast_fputfield: __ push(ftos); break;
3347     case Bytecodes::_fast_lputfield: __ push_l(rax); break;
3348 
3349     default:
3350       ShouldNotReachHere();
3351     }
3352     __ mov(scratch, rsp);             // points to jvalue on the stack
3353     // access constant pool cache entry
3354     LP64_ONLY(__ get_cache_entry_pointer_at_bcp(c_rarg2, rax, 1));
3355     NOT_LP64(__ get_cache_entry_pointer_at_bcp(rax, rdx, 1));
3356     __ verify_oop(rbx);
3357     // rbx: object pointer copied above
3358     // c_rarg2: cache entry pointer
3359     // c_rarg3: jvalue object on the stack
3360     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3));
3361     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, rax, rcx));
3362 
3363     switch (bytecode()) {             // restore tos values

3364     case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
3365     case Bytecodes::_fast_bputfield: // fall through
3366     case Bytecodes::_fast_zputfield: // fall through
3367     case Bytecodes::_fast_sputfield: // fall through
3368     case Bytecodes::_fast_cputfield: // fall through
3369     case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
3370     case Bytecodes::_fast_dputfield: __ pop(dtos); break;
3371     case Bytecodes::_fast_fputfield: __ pop(ftos); break;
3372     case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
3373     default: break;
3374     }
3375     __ bind(L2);
3376   }
3377 }
3378 
3379 void TemplateTable::fast_storefield(TosState state) {
3380   transition(state, vtos);
3381 
3382   ByteSize base = ConstantPoolCache::base_offset();
3383 
3384   jvmti_post_fast_field_mod();
3385 
3386   // access constant pool cache
3387   __ get_cache_and_index_at_bcp(rcx, rbx, 1);
3388 
3389   // test for volatile with rdx but rdx is tos register for lputfield.
3390   __ movl(rdx, Address(rcx, rbx, Address::times_ptr,
3391                        in_bytes(base +
3392                                 ConstantPoolCacheEntry::flags_offset())));
3393 
3394   // replace index with field offset from cache entry
3395   __ movptr(rbx, Address(rcx, rbx, Address::times_ptr,
3396                          in_bytes(base + ConstantPoolCacheEntry::f2_offset())));
3397 
3398   // [jk] not needed currently
3399   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3400   //                                              Assembler::StoreStore));
3401 
3402   Label notVolatile, Done;




3403   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3404   __ andl(rdx, 0x1);
3405 
3406   // Get object from stack
3407   pop_and_check_object(rcx);
3408 
3409   // field address
3410   const Address field(rcx, rbx, Address::times_1);
3411 
3412   // Check for volatile store
3413   __ testl(rdx, rdx);
3414   __ jcc(Assembler::zero, notVolatile);
3415 
3416   fast_storefield_helper(field, rax);
3417   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3418                                                Assembler::StoreStore));
3419   __ jmp(Done);
3420   __ bind(notVolatile);
3421 
3422   fast_storefield_helper(field, rax);
3423 
3424   __ bind(Done);
3425 }
3426 
3427 void TemplateTable::fast_storefield_helper(Address field, Register rax) {
3428 
3429   // access field
3430   switch (bytecode()) {














3431   case Bytecodes::_fast_aputfield:

3432     do_oop_store(_masm, field, rax);

3433     break;
3434   case Bytecodes::_fast_lputfield:
3435 #ifdef _LP64
3436     __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg);
3437 #else
3438   __ stop("should not be rewritten");
3439 #endif
3440     break;
3441   case Bytecodes::_fast_iputfield:
3442     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg);
3443     break;
3444   case Bytecodes::_fast_zputfield:
3445     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg);
3446     break;
3447   case Bytecodes::_fast_bputfield:
3448     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg);
3449     break;
3450   case Bytecodes::_fast_sputfield:
3451     __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg);
3452     break;


3482     __ push_ptr(rax);  // save object pointer before call_VM() clobbers it
3483     LP64_ONLY(__ mov(c_rarg1, rax));
3484     // c_rarg1: object pointer copied above
3485     // c_rarg2: cache entry pointer
3486     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2));
3487     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), rax, rcx));
3488     __ pop_ptr(rax); // restore object pointer
3489     __ bind(L1);
3490   }
3491 
3492   // access constant pool cache
3493   __ get_cache_and_index_at_bcp(rcx, rbx, 1);
3494   // replace index with field offset from cache entry
3495   // [jk] not needed currently
3496   // __ movl(rdx, Address(rcx, rbx, Address::times_8,
3497   //                      in_bytes(ConstantPoolCache::base_offset() +
3498   //                               ConstantPoolCacheEntry::flags_offset())));
3499   // __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3500   // __ andl(rdx, 0x1);
3501   //
3502   __ movptr(rbx, Address(rcx, rbx, Address::times_ptr,
3503                          in_bytes(ConstantPoolCache::base_offset() +
3504                                   ConstantPoolCacheEntry::f2_offset())));
3505 
3506   // rax: object
3507   __ verify_oop(rax);
3508   __ null_check(rax);
3509   Address field(rax, rbx, Address::times_1);
3510 
3511   // access field
3512   switch (bytecode()) {




































3513   case Bytecodes::_fast_agetfield:
3514     do_oop_load(_masm, field, rax);
3515     __ verify_oop(rax);
3516     break;
3517   case Bytecodes::_fast_lgetfield:
3518 #ifdef _LP64
3519     __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg);
3520 #else
3521   __ stop("should not be rewritten");
3522 #endif
3523     break;
3524   case Bytecodes::_fast_igetfield:
3525     __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3526     break;
3527   case Bytecodes::_fast_bgetfield:
3528     __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3529     break;
3530   case Bytecodes::_fast_sgetfield:
3531     __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
3532     break;


4117     __ jmp(done);
4118   }
4119 
4120   // slow case
4121   __ bind(slow_case);
4122   __ pop(rcx);   // restore stack pointer to what it was when we came in.
4123   __ bind(slow_case_no_pop);
4124 
4125   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4126   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4127 
4128   __ get_constant_pool(rarg1);
4129   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4130   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), rarg1, rarg2);
4131    __ verify_oop(rax);
4132 
4133   // continue
4134   __ bind(done);
4135 }
4136 














4137 void TemplateTable::newarray() {
4138   transition(itos, atos);
4139   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4140   __ load_unsigned_byte(rarg1, at_bcp(1));
4141   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
4142           rarg1, rax);
4143 }
4144 
4145 void TemplateTable::anewarray() {
4146   transition(itos, atos);
4147 
4148   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4149   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4150 
4151   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4152   __ get_constant_pool(rarg1);
4153   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
4154           rarg1, rarg2, rax);
4155 }
4156 
4157 void TemplateTable::arraylength() {
4158   transition(atos, itos);
4159   __ null_check(rax, arrayOopDesc::length_offset_in_bytes());
4160   __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes()));
4161 }
4162 
4163 void TemplateTable::checkcast() {
4164   transition(atos, atos);
4165   Label done, is_null, ok_is_subtype, quicked, resolved;
4166   __ testptr(rax, rax); // object is in rax
4167   __ jcc(Assembler::zero, is_null);
4168 
4169   // Get cpool & tags index
4170   __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4171   __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4172   // See if bytecode has already been quicked
4173   __ cmpb(Address(rdx, rbx,
4174                   Address::times_1,
4175                   Array<u1>::base_offset_in_bytes()),
4176           JVM_CONSTANT_Class);

4177   __ jcc(Assembler::equal, quicked);
4178   __ push(atos); // save receiver for result, and for GC
4179   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4180 
4181   // vm_result_2 has metadata result
4182 #ifndef _LP64
4183   // borrow rdi from locals
4184   __ get_thread(rdi);
4185   __ get_vm_result_2(rax, rdi);
4186   __ restore_locals();
4187 #else
4188   __ get_vm_result_2(rax, r15_thread);
4189 #endif
4190 
4191   __ pop_ptr(rdx); // restore receiver
4192   __ jmpb(resolved);
4193 
4194   // Get superklass in rax and subklass in rbx
4195   __ bind(quicked);
4196   __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
4197   __ load_resolved_klass_at_index(rcx, rbx, rax);
4198 
4199   __ bind(resolved);
4200   __ load_klass(rbx, rdx);
4201 
4202   // Generate subtype check.  Blows rcx, rdi.  Object in rdx.
4203   // Superklass in rax.  Subklass in rbx.
4204   __ gen_subtype_check(rbx, ok_is_subtype);
4205 
4206   // Come here on failure
4207   __ push_ptr(rdx);
4208   // object is at TOS
4209   __ jump(ExternalAddress(Interpreter::_throw_ClassCastException_entry));
4210 
4211   // Come here on success
4212   __ bind(ok_is_subtype);
4213   __ mov(rax, rdx); // Restore object in rdx



4214 
4215   // Collect counts on whether this check-cast sees NULLs a lot or not.
4216   if (ProfileInterpreter) {
4217     __ jmp(done);
4218     __ bind(is_null);
4219     __ profile_null_seen(rcx);
4220   } else {
4221     __ bind(is_null);   // same as 'done'
4222   }















4223   __ bind(done);
4224 }
4225 
4226 void TemplateTable::instanceof() {
4227   transition(atos, itos);
4228   Label done, is_null, ok_is_subtype, quicked, resolved;
4229   __ testptr(rax, rax);
4230   __ jcc(Assembler::zero, is_null);
4231 
4232   // Get cpool & tags index
4233   __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4234   __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4235   // See if bytecode has already been quicked
4236   __ cmpb(Address(rdx, rbx,
4237                   Address::times_1,
4238                   Array<u1>::base_offset_in_bytes()),
4239           JVM_CONSTANT_Class);

4240   __ jcc(Assembler::equal, quicked);
4241 
4242   __ push(atos); // save receiver for result, and for GC
4243   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4244   // vm_result_2 has metadata result
4245 
4246 #ifndef _LP64
4247   // borrow rdi from locals
4248   __ get_thread(rdi);
4249   __ get_vm_result_2(rax, rdi);
4250   __ restore_locals();
4251 #else
4252   __ get_vm_result_2(rax, r15_thread);
4253 #endif
4254 
4255   __ pop_ptr(rdx); // restore receiver
4256   __ verify_oop(rdx);
4257   __ load_klass(rdx, rdx);
4258   __ jmpb(resolved);
4259 


4270 
4271   // Come here on failure
4272   __ xorl(rax, rax);
4273   __ jmpb(done);
4274   // Come here on success
4275   __ bind(ok_is_subtype);
4276   __ movl(rax, 1);
4277 
4278   // Collect counts on whether this test sees NULLs a lot or not.
4279   if (ProfileInterpreter) {
4280     __ jmp(done);
4281     __ bind(is_null);
4282     __ profile_null_seen(rcx);
4283   } else {
4284     __ bind(is_null);   // same as 'done'
4285   }
4286   __ bind(done);
4287   // rax = 0: obj == NULL or  obj is not an instanceof the specified klass
4288   // rax = 1: obj != NULL and obj is     an instanceof the specified klass
4289 }
4290 
4291 
4292 //----------------------------------------------------------------------------------------------------
4293 // Breakpoints
4294 void TemplateTable::_breakpoint() {
4295   // Note: We get here even if we are single stepping..
4296   // jbug insists on setting breakpoints at every bytecode
4297   // even if we are in single step mode.
4298 
4299   transition(vtos, vtos);
4300 
4301   Register rarg = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4302 
4303   // get the unpatched byte code
4304   __ get_method(rarg);
4305   __ call_VM(noreg,
4306              CAST_FROM_FN_PTR(address,
4307                               InterpreterRuntime::get_original_bytecode_at),
4308              rarg, rbcp);
4309   __ mov(rbx, rax);  // why?
4310 




 160 static void do_oop_load(InterpreterMacroAssembler* _masm,
 161                         Address src,
 162                         Register dst,
 163                         DecoratorSet decorators = 0) {
 164   __ load_heap_oop(dst, src, rdx, rbx, decorators);
 165 }
 166 
 167 Address TemplateTable::at_bcp(int offset) {
 168   assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
 169   return Address(rbcp, offset);
 170 }
 171 
 172 
 173 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
 174                                    Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
 175                                    int byte_no) {
 176   if (!RewriteBytecodes)  return;
 177   Label L_patch_done;
 178 
 179   switch (bc) {
 180   case Bytecodes::_fast_qputfield:
 181   case Bytecodes::_fast_aputfield:
 182   case Bytecodes::_fast_bputfield:
 183   case Bytecodes::_fast_zputfield:
 184   case Bytecodes::_fast_cputfield:
 185   case Bytecodes::_fast_dputfield:
 186   case Bytecodes::_fast_fputfield:
 187   case Bytecodes::_fast_iputfield:
 188   case Bytecodes::_fast_lputfield:
 189   case Bytecodes::_fast_sputfield:
 190     {
 191       // We skip bytecode quickening for putfield instructions when
 192       // the put_code written to the constant pool cache is zero.
 193       // This is required so that every execution of this instruction
 194       // calls out to InterpreterRuntime::resolve_get_put to do
 195       // additional, required work.
 196       assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
 197       assert(load_bc_into_bc_reg, "we use bc_reg as temp");
 198       __ get_cache_and_index_and_bytecode_at_bcp(temp_reg, bc_reg, temp_reg, byte_no, 1);
 199       __ movl(bc_reg, bc);
 200       __ cmpl(temp_reg, (int) 0);


 353   __ sarl(rax, 16);
 354 }
 355 
 356 void TemplateTable::ldc(bool wide) {
 357   transition(vtos, vtos);
 358   Register rarg = NOT_LP64(rcx) LP64_ONLY(c_rarg1);
 359   Label call_ldc, notFloat, notClass, notInt, Done;
 360 
 361   if (wide) {
 362     __ get_unsigned_2_byte_index_at_bcp(rbx, 1);
 363   } else {
 364     __ load_unsigned_byte(rbx, at_bcp(1));
 365   }
 366 
 367   __ get_cpool_and_tags(rcx, rax);
 368   const int base_offset = ConstantPool::header_size() * wordSize;
 369   const int tags_offset = Array<u1>::base_offset_in_bytes();
 370 
 371   // get type
 372   __ movzbl(rdx, Address(rax, rbx, Address::times_1, tags_offset));
 373   __ andl(rdx, ~JVM_CONSTANT_QDESC_BIT);
 374 
 375   // unresolved class - get the resolved class
 376   __ cmpl(rdx, JVM_CONSTANT_UnresolvedClass);
 377   __ jccb(Assembler::equal, call_ldc);
 378 
 379   // unresolved class in error state - call into runtime to throw the error
 380   // from the first resolution attempt
 381   __ cmpl(rdx, JVM_CONSTANT_UnresolvedClassInError);
 382   __ jccb(Assembler::equal, call_ldc);
 383 
 384   // resolved class - need to call vm to get java mirror of the class
 385   __ cmpl(rdx, JVM_CONSTANT_Class);
 386   __ jcc(Assembler::notEqual, notClass);
 387 
 388   __ bind(call_ldc);
 389 
 390   __ movl(rarg, wide);
 391   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::ldc), rarg);
 392 
 393   __ push(atos);


 804                     Address(rdx, rax,
 805                             Address::times_4,
 806                             arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
 807                     noreg, noreg);
 808 }
 809 
 810 void TemplateTable::daload() {
 811   transition(itos, dtos);
 812   // rax: index
 813   // rdx: array
 814   index_check(rdx, rax); // kills rbx
 815   __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */,
 816                     Address(rdx, rax,
 817                             Address::times_8,
 818                             arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
 819                     noreg, noreg);
 820 }
 821 
 822 void TemplateTable::aaload() {
 823   transition(itos, atos);
 824 
 825   Register array = rcx;
 826   Register index = rax;
 827 
 828   index_check(array, index); // kills rbx
 829   if (ValueArrayFlatten) {
 830     Label is_flat_array, done;
 831     __ test_flat_array_oop(array, rbx, is_flat_array);
 832     do_oop_load(_masm,
 833                 Address(array, index,
 834                         UseCompressedOops ? Address::times_4 : Address::times_ptr,
 835                         arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
 836                 rax,
 837                 IS_ARRAY);
 838     __ jmp(done);
 839     __ bind(is_flat_array);
 840     __ call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::value_array_load), array, index);
 841     __ bind(done);
 842   } else {
 843     do_oop_load(_masm,
 844                 Address(array, index,
 845                         UseCompressedOops ? Address::times_4 : Address::times_ptr,
 846                         arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
 847                 rax,
 848                 IS_ARRAY);
 849   }
 850 }
 851 
 852 void TemplateTable::baload() {
 853   transition(itos, itos);
 854   // rax: index
 855   // rdx: array
 856   index_check(rdx, rax); // kills rbx
 857   __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax,
 858                     Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
 859                     noreg, noreg);
 860 }
 861 
 862 void TemplateTable::caload() {
 863   transition(itos, itos);
 864   // rax: index
 865   // rdx: array
 866   index_check(rdx, rax); // kills rbx
 867   __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
 868                     Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
 869                     noreg, noreg);


1115   __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY,
1116                      Address(rdx, rbx, Address::times_4,
1117                              arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1118                      noreg /* ftos */, noreg, noreg);
1119 }
1120 
1121 void TemplateTable::dastore() {
1122   transition(dtos, vtos);
1123   __ pop_i(rbx);
1124   // value is in UseSSE >= 2 ? xmm0 : ST(0)
1125   // rbx:  index
1126   // rdx:  array
1127   index_check(rdx, rbx); // prefer index in rbx
1128   __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY,
1129                      Address(rdx, rbx, Address::times_8,
1130                              arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1131                      noreg /* dtos */, noreg, noreg);
1132 }
1133 
1134 void TemplateTable::aastore() {
1135   Label is_null, is_flat_array, ok_is_subtype, done;
1136   transition(vtos, vtos);
1137   // stack: ..., array, index, value
1138   __ movptr(rax, at_tos());    // value
1139   __ movl(rcx, at_tos_p1()); // index
1140   __ movptr(rdx, at_tos_p2()); // array
1141 
1142   Address element_address(rdx, rcx,
1143                           UseCompressedOops? Address::times_4 : Address::times_ptr,
1144                           arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1145 
1146   index_check_without_pop(rdx, rcx);     // kills rbx
1147 
1148   __ testptr(rax, rax);
1149   __ jcc(Assembler::zero, is_null);
1150 
1151   // Move array class to rdi
1152   __ load_klass(rdi, rdx);
1153   if (ValueArrayFlatten) {
1154     __ test_flat_array_klass(rdi, rbx, is_flat_array);
1155   }
1156 
1157   // Move subklass into rbx
1158   __ load_klass(rbx, rax);
1159   // Move array element superklass into rax
1160   __ movptr(rax, Address(rdi,

1161                          ObjArrayKlass::element_klass_offset()));
1162 
1163   // Generate subtype check.  Blows rcx, rdi
1164   // Superklass in rax.  Subklass in rbx.
1165   // is "rbx <: rax" ? (value subclass <: array element superclass)
1166   __ gen_subtype_check(rbx, ok_is_subtype);
1167 
1168   // Come here on failure
1169   // object is at TOS
1170   __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
1171 
1172   // Come here on success
1173   __ bind(ok_is_subtype);
1174 
1175   // Get the value we will store
1176   __ movptr(rax, at_tos());
1177   __ movl(rcx, at_tos_p1()); // index
1178   // Now store using the appropriate barrier
1179   do_oop_store(_masm, element_address, rax, IS_ARRAY);
1180   __ jmp(done);
1181 
1182   // Have a NULL in rax, rdx=array, ecx=index.  Store NULL at ary[idx]
1183   __ bind(is_null);
1184   __ profile_null_seen(rbx);
1185   if (EnableValhalla) {
1186     Label is_null_into_value_array_npe, store_null;
1187 
1188     __ load_klass(rdi, rdx);
1189     // No way to store null in flat array
1190     __ test_flat_array_klass(rdi, rbx, is_null_into_value_array_npe);
1191 
1192     // Use case for storing values in objArray where element_klass is specifically
1193     // a value type because they could not be flattened "for reasons",
1194     // these need to have the same semantics as flat arrays, i.e. NPE
1195     __ movptr(rdi, Address(rdi, ObjArrayKlass::element_klass_offset()));
1196     __ test_klass_is_value(rdi, rdi, is_null_into_value_array_npe);
1197     __ jmp(store_null);
1198 
1199     __ bind(is_null_into_value_array_npe);
1200     __ jump(ExternalAddress(Interpreter::_throw_NullPointerException_entry));
1201 
1202     __ bind(store_null);
1203   }
1204   // Store a NULL
1205   do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1206   __ jmp(done);
1207 
1208   if (EnableValhalla) {
1209     Label is_type_ok;
1210     __ bind(is_flat_array); // Store non-null value to flat
1211 
1212     // Simplistic type check...
1213 
1214     // Profile the not-null value's klass.
1215     __ load_klass(rbx, rax);
1216     __ profile_typecheck(rcx, rbx, rax); // blows rcx, and rax
1217     // Move element klass into rax
1218     __ movptr(rax, Address(rdi, ArrayKlass::element_klass_offset()));
1219     // flat value array needs exact type match
1220     // is "rax == rbx" (value subclass == array element superclass)
1221     __ cmpptr(rax, rbx);
1222     __ jccb(Assembler::equal, is_type_ok);
1223 
1224     __ profile_typecheck_failed(rcx);
1225     __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
1226 
1227     __ bind(is_type_ok);
1228     __ movptr(rax, at_tos());  // value
1229     __ movl(rcx, at_tos_p1()); // index
1230     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::value_array_store), rax, rdx, rcx);
1231   }
1232   // Pop stack arguments
1233   __ bind(done);
1234   __ addptr(rsp, 3 * Interpreter::stackElementSize);
1235 }
1236 
1237 void TemplateTable::bastore() {
1238   transition(itos, vtos);
1239   __ pop_i(rbx);
1240   // rax: value
1241   // rbx: index
1242   // rdx: array
1243   index_check(rdx, rbx); // prefer index in rbx
1244   // Need to check whether array is boolean or byte
1245   // since both types share the bastore bytecode.
1246   __ load_klass(rcx, rdx);
1247   __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1248   int diffbit = Klass::layout_helper_boolean_diffbit();
1249   __ testl(rcx, diffbit);
1250   Label L_skip;
1251   __ jccb(Assembler::zero, L_skip);


2457   __ jcc(j_not(cc), not_taken);
2458   branch(false, false);
2459   __ bind(not_taken);
2460   __ profile_not_taken_branch(rax);
2461 }
2462 
2463 void TemplateTable::if_nullcmp(Condition cc) {
2464   transition(atos, vtos);
2465   // assume branch is more often taken than not (loops use backward branches)
2466   Label not_taken;
2467   __ testptr(rax, rax);
2468   __ jcc(j_not(cc), not_taken);
2469   branch(false, false);
2470   __ bind(not_taken);
2471   __ profile_not_taken_branch(rax);
2472 }
2473 
2474 void TemplateTable::if_acmp(Condition cc) {
2475   transition(atos, vtos);
2476   // assume branch is more often taken than not (loops use backward branches)
2477   Label taken, not_taken;
2478   __ pop_ptr(rdx);
2479 
2480   const int is_value_mask = markOopDesc::always_locked_pattern;
2481   if (EnableValhalla && ACmpOnValues == 1) {
2482     Label is_null;
2483     __ testptr(rdx, rdx);
2484     __ jcc(Assembler::zero, is_null);
2485     __ movptr(rbx, Address(rdx, oopDesc::mark_offset_in_bytes()));
2486     __ andptr(rbx, is_value_mask);
2487     __ cmpl(rbx, is_value_mask);
2488     __ setb(Assembler::equal, rbx);
2489     __ movzbl(rbx, rbx);
2490     __ orptr(rdx, rbx);
2491     __ bind(is_null);
2492   }
2493 
2494   __ cmpoop(rdx, rax);
2495 
2496   if (EnableValhalla && ACmpOnValues != 1) {
2497     __ jcc(Assembler::notEqual, (cc == not_equal) ? taken : not_taken);
2498     __ testptr(rdx, rdx);
2499     __ jcc(Assembler::zero, (cc == equal) ? taken : not_taken);
2500     __ movptr(rbx, Address(rdx, oopDesc::mark_offset_in_bytes()));
2501     __ andptr(rbx, is_value_mask);
2502     __ cmpl(rbx, is_value_mask);
2503     cc = (cc == equal) ? not_equal : equal;
2504   }
2505 
2506   __ jcc(j_not(cc), not_taken);
2507   __ bind(taken);
2508   branch(false, false);
2509   __ bind(not_taken);
2510   __ profile_not_taken_branch(rax);
2511 }
2512 
2513 void TemplateTable::ret() {
2514   transition(vtos, vtos);
2515   locals_index(rbx);
2516   LP64_ONLY(__ movslq(rbx, iaddress(rbx))); // get return bci, compute return bcp
2517   NOT_LP64(__ movptr(rbx, iaddress(rbx)));
2518   __ profile_ret(rbx, rcx);
2519   __ get_method(rax);
2520   __ movptr(rbcp, Address(rax, Method::const_offset()));
2521   __ lea(rbcp, Address(rbcp, rbx, Address::times_1,
2522                       ConstMethod::codes_offset()));
2523   __ dispatch_next(vtos, 0, true);
2524 }
2525 
2526 void TemplateTable::wide_ret() {
2527   transition(vtos, vtos);


2758     __ testb(Address(r15_thread, Thread::polling_page_offset()), SafepointMechanism::poll_bit());
2759 #else
2760     const Register thread = rdi;
2761     __ get_thread(thread);
2762     __ testb(Address(thread, Thread::polling_page_offset()), SafepointMechanism::poll_bit());
2763 #endif
2764     __ jcc(Assembler::zero, no_safepoint);
2765     __ push(state);
2766     __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2767                                     InterpreterRuntime::at_safepoint));
2768     __ pop(state);
2769     __ bind(no_safepoint);
2770   }
2771 
2772   // Narrow result if state is itos but result type is smaller.
2773   // Need to narrow in the return bytecode rather than in generate_return_entry
2774   // since compiled code callers expect the result to already be narrowed.
2775   if (state == itos) {
2776     __ narrow(rax);
2777   }
2778 
2779   __ remove_activation(state, rbcp, true, true, true);
2780 
2781   __ jmp(rbcp);
2782 }
2783 
2784 // ----------------------------------------------------------------------------
2785 // Volatile variables demand their effects be made known to all CPU's
2786 // in order.  Store buffers on most chips allow reads & writes to
2787 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2788 // without some kind of memory barrier (i.e., it's not sufficient that
2789 // the interpreter does not reorder volatile references, the hardware
2790 // also must not reorder them).
2791 //
2792 // According to the new Java Memory Model (JMM):
2793 // (1) All volatiles are serialized wrt to each other.  ALSO reads &
2794 //     writes act as aquire & release, so:
2795 // (2) A read cannot let unrelated NON-volatile memory refs that
2796 //     happen after the read float up to before the read.  It's OK for
2797 //     non-volatile memory refs that happen before the volatile read to
2798 //     float down below it.
2799 // (3) Similar a volatile write cannot let unrelated NON-volatile


2946     __ get_cache_and_index_at_bcp(cache, index, 1);
2947     __ bind(L1);
2948   }
2949 }
2950 
2951 void TemplateTable::pop_and_check_object(Register r) {
2952   __ pop_ptr(r);
2953   __ null_check(r);  // for field access must check obj.
2954   __ verify_oop(r);
2955 }
2956 
2957 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2958   transition(vtos, vtos);
2959 
2960   const Register cache = rcx;
2961   const Register index = rdx;
2962   const Register obj   = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
2963   const Register off   = rbx;
2964   const Register flags = rax;
2965   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // uses same reg as obj, so don't mix them
2966   const Register flags2 = rdx;
2967 
2968   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2969   jvmti_post_field_access(cache, index, is_static, false);
2970   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2971 


2972   const Address field(obj, off, Address::times_1, 0*wordSize);
2973 
2974   Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj, notValueType;
2975 
2976   if (!is_static) {
2977     __ movptr(rcx, Address(cache, index, Address::times_ptr,
2978                            in_bytes(ConstantPoolCache::base_offset() +
2979                                     ConstantPoolCacheEntry::f1_offset())));
2980   }
2981 
2982   __ movl(flags2, flags);
2983 
2984   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
2985   // Make sure we don't need to mask edx after the above shift
2986   assert(btos == 0, "change code, btos != 0");
2987 
2988   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
2989 
2990   __ jcc(Assembler::notZero, notByte);
2991   // btos
2992   if (!is_static) pop_and_check_object(obj);
2993   __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
2994   __ push(btos);
2995   // Rewrite bytecode to be faster
2996   if (!is_static && rc == may_rewrite) {
2997     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2998   }
2999   __ jmp(Done);
3000 
3001   __ bind(notByte);
3002 
3003   __ cmpl(flags, ztos);
3004   __ jcc(Assembler::notEqual, notBool);
3005    if (!is_static) pop_and_check_object(obj);
3006   // ztos (same code as btos)
3007   __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg, noreg);
3008   __ push(ztos);
3009   // Rewrite bytecode to be faster
3010   if (!is_static && rc == may_rewrite) {
3011     // use btos rewriting, no truncating to t/f bit is needed for getfield.
3012     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3013   }
3014   __ jmp(Done);
3015 
3016   __ bind(notBool);
3017   __ cmpl(flags, atos);
3018   __ jcc(Assembler::notEqual, notObj);
3019   // atos
3020   if (!EnableValhalla) {
3021     if (!is_static) pop_and_check_object(obj);
3022     do_oop_load(_masm, field, rax);
3023     __ push(atos);
3024     if (!is_static && rc == may_rewrite) {
3025       patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
3026     }
3027     __ jmp(Done);
3028   } else {
3029     if (is_static) {
3030       __ load_heap_oop(rax, field);
3031       Label isFlattenable, uninitialized;
3032       // Issue below if the static field has not been initialized yet
3033       __ test_field_is_flattenable(flags2, rscratch1, isFlattenable);
3034         // Not flattenable case
3035         __ push(atos);
3036         __ jmp(Done);
3037       // Flattenable case, must not return null even if uninitialized
3038       __ bind(isFlattenable);
3039         __ testptr(rax, rax);
3040         __ jcc(Assembler::zero, uninitialized);
3041           __ push(atos);
3042           __ jmp(Done);
3043         __ bind(uninitialized);
3044           __ andl(flags2, ConstantPoolCacheEntry::field_index_mask);
3045           __ call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::uninitialized_static_value_field),
3046                  obj, flags2);
3047           __ verify_oop(rax);
3048           __ push(atos);
3049           __ jmp(Done);
3050     } else {
3051       Label isFlattened, nonnull, isFlattenable, rewriteFlattenable;
3052       __ test_field_is_flattenable(flags2, rscratch1, isFlattenable);
3053         // Non-flattenable field case, also covers the object case
3054         pop_and_check_object(obj);
3055         __ load_heap_oop(rax, field);
3056         __ push(atos);
3057         if (rc == may_rewrite) {
3058           patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
3059         }
3060         __ jmp(Done);
3061       __ bind(isFlattenable);
3062         __ test_field_is_flattened(flags2, rscratch1, isFlattened);
3063           // Non-flattened field case
3064           pop_and_check_object(obj);
3065           __ load_heap_oop(rax, field);
3066           __ testptr(rax, rax);
3067           __ jcc(Assembler::notZero, nonnull);
3068             __ andl(flags2, ConstantPoolCacheEntry::field_index_mask);
3069             __ call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::uninitialized_instance_value_field),
3070                        obj, flags2);
3071           __ bind(nonnull);
3072           __ verify_oop(rax);
3073           __ push(atos);
3074           __ jmp(rewriteFlattenable);
3075         __ bind(isFlattened);
3076           __ andl(flags2, ConstantPoolCacheEntry::field_index_mask);
3077           pop_and_check_object(rbx);
3078           call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::read_flattened_field),
3079                   rbx, flags2, rcx);
3080           __ verify_oop(rax);
3081           __ push(atos);
3082       __ bind(rewriteFlattenable);
3083       if (rc == may_rewrite) {
3084         patch_bytecode(Bytecodes::_fast_qgetfield, bc, rbx);
3085       }
3086       __ jmp(Done);
3087     }
3088   }
3089 
3090   __ bind(notObj);
3091 
3092   if (!is_static) pop_and_check_object(obj);
3093 
3094   __ cmpl(flags, itos);
3095   __ jcc(Assembler::notEqual, notInt);
3096   // itos
3097   __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3098   __ push(itos);
3099   // Rewrite bytecode to be faster
3100   if (!is_static && rc == may_rewrite) {
3101     patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
3102   }
3103   __ jmp(Done);
3104 
3105   __ bind(notInt);
3106   __ cmpl(flags, ctos);
3107   __ jcc(Assembler::notEqual, notChar);
3108   // ctos
3109   __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
3110   __ push(ctos);
3111   // Rewrite bytecode to be faster
3112   if (!is_static && rc == may_rewrite) {
3113     patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);


3172 #endif
3173 
3174   __ bind(Done);
3175   // [jk] not needed currently
3176   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
3177   //                                              Assembler::LoadStore));
3178 }
3179 
3180 void TemplateTable::getfield(int byte_no) {
3181   getfield_or_static(byte_no, false);
3182 }
3183 
3184 void TemplateTable::nofast_getfield(int byte_no) {
3185   getfield_or_static(byte_no, false, may_not_rewrite);
3186 }
3187 
3188 void TemplateTable::getstatic(int byte_no) {
3189   getfield_or_static(byte_no, true);
3190 }
3191 
3192 void TemplateTable::withfield() {
3193   transition(vtos, atos);
3194 
3195   Register cache = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
3196   Register index = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
3197 
3198   resolve_cache_and_index(f2_byte, cache, index, sizeof(u2));
3199 
3200   call_VM(rbx, CAST_FROM_FN_PTR(address, InterpreterRuntime::withfield), cache);
3201   // new value type is returned in rbx
3202   // stack adjustement is returned in rax
3203   __ verify_oop(rbx);
3204   __ addptr(rsp, rax);
3205   __ movptr(rax, rbx);
3206 }
3207 
3208 // The registers cache and index expected to be set before call.
3209 // The function may destroy various registers, just not the cache and index registers.
3210 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
3211 
3212   const Register robj = LP64_ONLY(c_rarg2)   NOT_LP64(rax);
3213   const Register RBX  = LP64_ONLY(c_rarg1)   NOT_LP64(rbx);
3214   const Register RCX  = LP64_ONLY(c_rarg3)   NOT_LP64(rcx);
3215   const Register RDX  = LP64_ONLY(rscratch1) NOT_LP64(rdx);
3216 
3217   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
3218 
3219   if (JvmtiExport::can_post_field_modification()) {
3220     // Check to see if a field modification watch has been set before
3221     // we take the time to call into the VM.
3222     Label L1;
3223     assert_different_registers(cache, index, rax);
3224     __ mov32(rax, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3225     __ testl(rax, rax);
3226     __ jcc(Assembler::zero, L1);


3282     // c_rarg1: object pointer set up above (NULL if static)
3283     // c_rarg2: cache entry pointer
3284     // c_rarg3: jvalue object on the stack
3285     __ call_VM(noreg,
3286                CAST_FROM_FN_PTR(address,
3287                                 InterpreterRuntime::post_field_modification),
3288                RBX, robj, RCX);
3289     __ get_cache_and_index_at_bcp(cache, index, 1);
3290     __ bind(L1);
3291   }
3292 }
3293 
3294 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3295   transition(vtos, vtos);
3296 
3297   const Register cache = rcx;
3298   const Register index = rdx;
3299   const Register obj   = rcx;
3300   const Register off   = rbx;
3301   const Register flags = rax;
3302   const Register flags2 = rdx;
3303 
3304   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
3305   jvmti_post_field_mod(cache, index, is_static);
3306   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
3307 
3308   // [jk] not needed currently
3309   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3310   //                                              Assembler::StoreStore));
3311 
3312   Label notVolatile, Done;
3313   __ movl(rdx, flags);
3314   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3315   __ andl(rdx, 0x1);
3316 
3317   // Check for volatile store
3318   __ testl(rdx, rdx);
3319   __ movl(flags2, flags);
3320   __ jcc(Assembler::zero, notVolatile);
3321 
3322   putfield_or_static_helper(byte_no, is_static, rc, obj, off, flags, flags2);
3323   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3324                                                Assembler::StoreStore));
3325   __ jmp(Done);
3326   __ bind(notVolatile);
3327 
3328   putfield_or_static_helper(byte_no, is_static, rc, obj, off, flags, flags2);
3329 
3330   __ bind(Done);
3331 }
3332 
3333 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
3334                                               Register obj, Register off, Register flags, Register flags2) {
3335 
3336   // field addresses
3337   const Address field(obj, off, Address::times_1, 0*wordSize);
3338   NOT_LP64( const Address hi(obj, off, Address::times_1, 1*wordSize);)
3339 
3340   Label notByte, notBool, notInt, notShort, notChar,
3341         notLong, notFloat, notObj, notValueType;
3342   Label Done;
3343 
3344   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3345 
3346   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
3347 
3348   assert(btos == 0, "change code, btos != 0");
3349   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
3350   __ jcc(Assembler::notZero, notByte);
3351 
3352   // btos
3353   {
3354     __ pop(btos);
3355     if (!is_static) pop_and_check_object(obj);
3356     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg);
3357     if (!is_static && rc == may_rewrite) {
3358       patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
3359     }
3360     __ jmp(Done);
3361   }


3364   __ cmpl(flags, ztos);
3365   __ jcc(Assembler::notEqual, notBool);
3366 
3367   // ztos
3368   {
3369     __ pop(ztos);
3370     if (!is_static) pop_and_check_object(obj);
3371     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg);
3372     if (!is_static && rc == may_rewrite) {
3373       patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
3374     }
3375     __ jmp(Done);
3376   }
3377 
3378   __ bind(notBool);
3379   __ cmpl(flags, atos);
3380   __ jcc(Assembler::notEqual, notObj);
3381 
3382   // atos
3383   {
3384     if (!EnableValhalla) {
3385       __ pop(atos);
3386       if (!is_static) pop_and_check_object(obj);
3387       // Store into the field
3388       do_oop_store(_masm, field, rax);
3389       if (!is_static && rc == may_rewrite) {
3390         patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3391       }
3392       __ jmp(Done);
3393     } else {
3394       __ pop(atos);
3395       if (is_static) {
3396         Label notFlattenable, notBuffered;
3397         __ test_field_is_not_flattenable(flags2, rscratch1, notFlattenable);
3398         __ null_check(rax);
3399         __ bind(notFlattenable);
3400         do_oop_store(_masm, field, rax);
3401         __ jmp(Done);
3402       } else {
3403         Label isFlattenable, isFlattened, notBuffered, notBuffered2, rewriteNotFlattenable, rewriteFlattenable;
3404         __ test_field_is_flattenable(flags2, rscratch1, isFlattenable);
3405         // Not flattenable case, covers not flattenable values and objects
3406         pop_and_check_object(obj);
3407         // Store into the field
3408         do_oop_store(_masm, field, rax);
3409         __ bind(rewriteNotFlattenable);
3410         if (rc == may_rewrite) {
3411           patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3412         }
3413         __ jmp(Done);
3414         // Implementation of the flattenable semantic
3415         __ bind(isFlattenable);
3416         __ null_check(rax);
3417         __ test_field_is_flattened(flags2, rscratch1, isFlattened);
3418         // Not flattened case
3419         pop_and_check_object(obj);
3420         // Store into the field
3421         do_oop_store(_masm, field, rax);
3422         __ jmp(rewriteFlattenable);
3423         __ bind(isFlattened);
3424         pop_and_check_object(obj);
3425         call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_flattened_value),
3426                 rax, off, obj);
3427         __ bind(rewriteFlattenable);
3428         if (rc == may_rewrite) {
3429           patch_bytecode(Bytecodes::_fast_qputfield, bc, rbx, true, byte_no);
3430         }
3431         __ jmp(Done);
3432       }
3433     }
3434   }
3435 
3436   __ bind(notObj);
3437   __ cmpl(flags, itos);
3438   __ jcc(Assembler::notEqual, notInt);
3439 
3440   // itos
3441   {
3442     __ pop(itos);
3443     if (!is_static) pop_and_check_object(obj);
3444     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg);
3445     if (!is_static && rc == may_rewrite) {
3446       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3447     }
3448     __ jmp(Done);
3449   }
3450 
3451   __ bind(notInt);
3452   __ cmpl(flags, ctos);
3453   __ jcc(Assembler::notEqual, notChar);


3550 }
3551 
3552 void TemplateTable::jvmti_post_fast_field_mod() {
3553 
3554   const Register scratch = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3555 
3556   if (JvmtiExport::can_post_field_modification()) {
3557     // Check to see if a field modification watch has been set before
3558     // we take the time to call into the VM.
3559     Label L2;
3560     __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3561     __ testl(scratch, scratch);
3562     __ jcc(Assembler::zero, L2);
3563     __ pop_ptr(rbx);                  // copy the object pointer from tos
3564     __ verify_oop(rbx);
3565     __ push_ptr(rbx);                 // put the object pointer back on tos
3566     // Save tos values before call_VM() clobbers them. Since we have
3567     // to do it for every data type, we use the saved values as the
3568     // jvalue object.
3569     switch (bytecode()) {          // load values into the jvalue object
3570     case Bytecodes::_fast_qputfield: //fall through
3571     case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;
3572     case Bytecodes::_fast_bputfield: // fall through
3573     case Bytecodes::_fast_zputfield: // fall through
3574     case Bytecodes::_fast_sputfield: // fall through
3575     case Bytecodes::_fast_cputfield: // fall through
3576     case Bytecodes::_fast_iputfield: __ push_i(rax); break;
3577     case Bytecodes::_fast_dputfield: __ push(dtos); break;
3578     case Bytecodes::_fast_fputfield: __ push(ftos); break;
3579     case Bytecodes::_fast_lputfield: __ push_l(rax); break;
3580 
3581     default:
3582       ShouldNotReachHere();
3583     }
3584     __ mov(scratch, rsp);             // points to jvalue on the stack
3585     // access constant pool cache entry
3586     LP64_ONLY(__ get_cache_entry_pointer_at_bcp(c_rarg2, rax, 1));
3587     NOT_LP64(__ get_cache_entry_pointer_at_bcp(rax, rdx, 1));
3588     __ verify_oop(rbx);
3589     // rbx: object pointer copied above
3590     // c_rarg2: cache entry pointer
3591     // c_rarg3: jvalue object on the stack
3592     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3));
3593     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, rax, rcx));
3594 
3595     switch (bytecode()) {             // restore tos values
3596     case Bytecodes::_fast_qputfield: // fall through
3597     case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
3598     case Bytecodes::_fast_bputfield: // fall through
3599     case Bytecodes::_fast_zputfield: // fall through
3600     case Bytecodes::_fast_sputfield: // fall through
3601     case Bytecodes::_fast_cputfield: // fall through
3602     case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
3603     case Bytecodes::_fast_dputfield: __ pop(dtos); break;
3604     case Bytecodes::_fast_fputfield: __ pop(ftos); break;
3605     case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
3606     default: break;
3607     }
3608     __ bind(L2);
3609   }
3610 }
3611 
3612 void TemplateTable::fast_storefield(TosState state) {
3613   transition(state, vtos);
3614 
3615   ByteSize base = ConstantPoolCache::base_offset();
3616 
3617   jvmti_post_fast_field_mod();
3618 
3619   // access constant pool cache
3620   __ get_cache_and_index_at_bcp(rcx, rbx, 1);
3621 
3622   // test for volatile with rdx but rdx is tos register for lputfield.
3623   __ movl(rdx, Address(rcx, rbx, Address::times_ptr,
3624                        in_bytes(base +
3625                                 ConstantPoolCacheEntry::flags_offset())));
3626 
3627   // replace index with field offset from cache entry
3628   __ movptr(rbx, Address(rcx, rbx, Address::times_ptr,
3629                          in_bytes(base + ConstantPoolCacheEntry::f2_offset())));
3630 
3631   // [jk] not needed currently
3632   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3633   //                                              Assembler::StoreStore));
3634 
3635   Label notVolatile, Done;
3636   if (bytecode() == Bytecodes::_fast_qputfield) {
3637     __ movl(rscratch2, rdx);
3638   }
3639 
3640   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3641   __ andl(rdx, 0x1);
3642 
3643   // Get object from stack
3644   pop_and_check_object(rcx);
3645 
3646   // field address
3647   const Address field(rcx, rbx, Address::times_1);
3648 
3649   // Check for volatile store
3650   __ testl(rdx, rdx);
3651   __ jcc(Assembler::zero, notVolatile);
3652 
3653   fast_storefield_helper(field, rax);
3654   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3655                                                Assembler::StoreStore));
3656   __ jmp(Done);
3657   __ bind(notVolatile);
3658 
3659   fast_storefield_helper(field, rax);
3660 
3661   __ bind(Done);
3662 }
3663 
3664 void TemplateTable::fast_storefield_helper(Address field, Register rax) {
3665 
3666   // access field
3667   switch (bytecode()) {
3668   case Bytecodes::_fast_qputfield:
3669     {
3670       Label isFlattened, done;
3671       __ null_check(rax);
3672       __ test_field_is_flattened(rscratch2, rscratch1, isFlattened);
3673       // No Flattened case
3674       do_oop_store(_masm, field, rax);
3675       __ jmp(done);
3676       __ bind(isFlattened);
3677       call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_flattened_value),
3678           rax, rbx, rcx);
3679       __ bind(done);
3680     }
3681     break;
3682   case Bytecodes::_fast_aputfield:
3683     {
3684       do_oop_store(_masm, field, rax);
3685     }
3686     break;
3687   case Bytecodes::_fast_lputfield:
3688 #ifdef _LP64
3689     __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg);
3690 #else
3691   __ stop("should not be rewritten");
3692 #endif
3693     break;
3694   case Bytecodes::_fast_iputfield:
3695     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg);
3696     break;
3697   case Bytecodes::_fast_zputfield:
3698     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg);
3699     break;
3700   case Bytecodes::_fast_bputfield:
3701     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg);
3702     break;
3703   case Bytecodes::_fast_sputfield:
3704     __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg);
3705     break;


3735     __ push_ptr(rax);  // save object pointer before call_VM() clobbers it
3736     LP64_ONLY(__ mov(c_rarg1, rax));
3737     // c_rarg1: object pointer copied above
3738     // c_rarg2: cache entry pointer
3739     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2));
3740     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), rax, rcx));
3741     __ pop_ptr(rax); // restore object pointer
3742     __ bind(L1);
3743   }
3744 
3745   // access constant pool cache
3746   __ get_cache_and_index_at_bcp(rcx, rbx, 1);
3747   // replace index with field offset from cache entry
3748   // [jk] not needed currently
3749   // __ movl(rdx, Address(rcx, rbx, Address::times_8,
3750   //                      in_bytes(ConstantPoolCache::base_offset() +
3751   //                               ConstantPoolCacheEntry::flags_offset())));
3752   // __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3753   // __ andl(rdx, 0x1);
3754   //
3755   __ movptr(rdx, Address(rcx, rbx, Address::times_ptr,
3756                          in_bytes(ConstantPoolCache::base_offset() +
3757                                   ConstantPoolCacheEntry::f2_offset())));
3758 
3759   // rax: object
3760   __ verify_oop(rax);
3761   __ null_check(rax);
3762   Address field(rax, rdx, Address::times_1);
3763 
3764   // access field
3765   switch (bytecode()) {
3766   case Bytecodes::_fast_qgetfield:
3767     {
3768       Label isFlattened, nonnull, Done;
3769       __ movptr(rscratch1, Address(rcx, rbx, Address::times_ptr,
3770                                    in_bytes(ConstantPoolCache::base_offset() +
3771                                             ConstantPoolCacheEntry::flags_offset())));
3772       __ test_field_is_flattened(rscratch1, rscratch2, isFlattened);
3773         // Non-flattened field case
3774         __ movptr(rscratch1, rax);
3775         __ load_heap_oop(rax, field);
3776         __ testptr(rax, rax);
3777         __ jcc(Assembler::notZero, nonnull);
3778           __ movptr(rax, rscratch1);
3779           __ movl(rcx, Address(rcx, rbx, Address::times_ptr,
3780                              in_bytes(ConstantPoolCache::base_offset() +
3781                                       ConstantPoolCacheEntry::flags_offset())));
3782           __ andl(rcx, ConstantPoolCacheEntry::field_index_mask);
3783           __ call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::uninitialized_instance_value_field),
3784                      rax, rcx);
3785         __ bind(nonnull);
3786         __ verify_oop(rax);
3787         __ jmp(Done);
3788       __ bind(isFlattened);
3789         __ movl(rdx, Address(rcx, rbx, Address::times_ptr,
3790                            in_bytes(ConstantPoolCache::base_offset() +
3791                                     ConstantPoolCacheEntry::flags_offset())));
3792         __ andl(rdx, ConstantPoolCacheEntry::field_index_mask);
3793         __ movptr(rcx, Address(rcx, rbx, Address::times_ptr,
3794                                      in_bytes(ConstantPoolCache::base_offset() +
3795                                               ConstantPoolCacheEntry::f1_offset())));
3796         call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::read_flattened_field),
3797                 rax, rdx, rcx);
3798         __ verify_oop(rax);
3799       __ bind(Done);
3800     }
3801     break;
3802   case Bytecodes::_fast_agetfield:
3803     do_oop_load(_masm, field, rax);
3804     __ verify_oop(rax);
3805     break;
3806   case Bytecodes::_fast_lgetfield:
3807 #ifdef _LP64
3808     __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg);
3809 #else
3810   __ stop("should not be rewritten");
3811 #endif
3812     break;
3813   case Bytecodes::_fast_igetfield:
3814     __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3815     break;
3816   case Bytecodes::_fast_bgetfield:
3817     __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3818     break;
3819   case Bytecodes::_fast_sgetfield:
3820     __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
3821     break;


4406     __ jmp(done);
4407   }
4408 
4409   // slow case
4410   __ bind(slow_case);
4411   __ pop(rcx);   // restore stack pointer to what it was when we came in.
4412   __ bind(slow_case_no_pop);
4413 
4414   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4415   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4416 
4417   __ get_constant_pool(rarg1);
4418   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4419   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), rarg1, rarg2);
4420    __ verify_oop(rax);
4421 
4422   // continue
4423   __ bind(done);
4424 }
4425 
4426 void TemplateTable::defaultvalue() {
4427   transition(vtos, atos);
4428 
4429   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4430   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4431 
4432   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4433   __ get_constant_pool(rarg1);
4434 
4435   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::defaultvalue),
4436       rarg1, rarg2);
4437   __ verify_oop(rax);
4438 }
4439 
4440 void TemplateTable::newarray() {
4441   transition(itos, atos);
4442   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4443   __ load_unsigned_byte(rarg1, at_bcp(1));
4444   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
4445           rarg1, rax);
4446 }
4447 
4448 void TemplateTable::anewarray() {
4449   transition(itos, atos);
4450 
4451   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4452   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4453 
4454   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4455   __ get_constant_pool(rarg1);
4456   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
4457           rarg1, rarg2, rax);
4458 }
4459 
4460 void TemplateTable::arraylength() {
4461   transition(atos, itos);
4462   __ null_check(rax, arrayOopDesc::length_offset_in_bytes());
4463   __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes()));
4464 }
4465 
4466 void TemplateTable::checkcast() {
4467   transition(atos, atos);
4468   Label done, is_null, ok_is_subtype, quicked, resolved;
4469   __ testptr(rax, rax); // object is in rax
4470   __ jcc(Assembler::zero, is_null);
4471 
4472   // Get cpool & tags index
4473   __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4474   __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4475   // See if bytecode has already been quicked
4476   __ movzbl(rdx, Address(rdx, rbx,
4477       Address::times_1,
4478       Array<u1>::base_offset_in_bytes()));
4479   __ andl (rdx, ~JVM_CONSTANT_QDESC_BIT);
4480   __ cmpl(rdx, JVM_CONSTANT_Class);
4481   __ jcc(Assembler::equal, quicked);
4482   __ push(atos); // save receiver for result, and for GC
4483   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4484 
4485   // vm_result_2 has metadata result
4486 #ifndef _LP64
4487   // borrow rdi from locals
4488   __ get_thread(rdi);
4489   __ get_vm_result_2(rax, rdi);
4490   __ restore_locals();
4491 #else
4492   __ get_vm_result_2(rax, r15_thread);
4493 #endif
4494 
4495   __ pop_ptr(rdx); // restore receiver
4496   __ jmpb(resolved);
4497 
4498   // Get superklass in rax and subklass in rbx
4499   __ bind(quicked);
4500   __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
4501   __ load_resolved_klass_at_index(rcx, rbx, rax);
4502 
4503   __ bind(resolved);
4504   __ load_klass(rbx, rdx);
4505 
4506   // Generate subtype check.  Blows rcx, rdi.  Object in rdx.
4507   // Superklass in rax.  Subklass in rbx.
4508   __ gen_subtype_check(rbx, ok_is_subtype);
4509 
4510   // Come here on failure
4511   __ push_ptr(rdx);
4512   // object is at TOS
4513   __ jump(ExternalAddress(Interpreter::_throw_ClassCastException_entry));
4514 
4515   // Come here on success
4516   __ bind(ok_is_subtype);
4517   __ mov(rax, rdx); // Restore object in rdx
4518   __ jmp(done);
4519 
4520   __ bind(is_null);
4521 
4522   // Collect counts on whether this check-cast sees NULLs a lot or not.
4523   if (ProfileInterpreter) {


4524     __ profile_null_seen(rcx);


4525   }
4526 
4527   if (EnableValhalla) {
4528     // Get cpool & tags index
4529     __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4530     __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4531     // See if CP entry is a Q-descriptor
4532     __ movzbl(rcx, Address(rdx, rbx,
4533         Address::times_1,
4534         Array<u1>::base_offset_in_bytes()));
4535     __ andl (rcx, JVM_CONSTANT_QDESC_BIT);
4536     __ cmpl(rcx, JVM_CONSTANT_QDESC_BIT);
4537     __ jcc(Assembler::notEqual, done);
4538     __ jump(ExternalAddress(Interpreter::_throw_NullPointerException_entry));
4539   }
4540 
4541   __ bind(done);
4542 }
4543 
4544 void TemplateTable::instanceof() {
4545   transition(atos, itos);
4546   Label done, is_null, ok_is_subtype, quicked, resolved;
4547   __ testptr(rax, rax);
4548   __ jcc(Assembler::zero, is_null);
4549 
4550   // Get cpool & tags index
4551   __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4552   __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4553   // See if bytecode has already been quicked
4554   __ movzbl(rdx, Address(rdx, rbx,
4555         Address::times_1,
4556         Array<u1>::base_offset_in_bytes()));
4557   __ andl (rdx, ~JVM_CONSTANT_QDESC_BIT);
4558   __ cmpl(rdx, JVM_CONSTANT_Class);
4559   __ jcc(Assembler::equal, quicked);
4560 
4561   __ push(atos); // save receiver for result, and for GC
4562   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4563   // vm_result_2 has metadata result
4564 
4565 #ifndef _LP64
4566   // borrow rdi from locals
4567   __ get_thread(rdi);
4568   __ get_vm_result_2(rax, rdi);
4569   __ restore_locals();
4570 #else
4571   __ get_vm_result_2(rax, r15_thread);
4572 #endif
4573 
4574   __ pop_ptr(rdx); // restore receiver
4575   __ verify_oop(rdx);
4576   __ load_klass(rdx, rdx);
4577   __ jmpb(resolved);
4578 


4589 
4590   // Come here on failure
4591   __ xorl(rax, rax);
4592   __ jmpb(done);
4593   // Come here on success
4594   __ bind(ok_is_subtype);
4595   __ movl(rax, 1);
4596 
4597   // Collect counts on whether this test sees NULLs a lot or not.
4598   if (ProfileInterpreter) {
4599     __ jmp(done);
4600     __ bind(is_null);
4601     __ profile_null_seen(rcx);
4602   } else {
4603     __ bind(is_null);   // same as 'done'
4604   }
4605   __ bind(done);
4606   // rax = 0: obj == NULL or  obj is not an instanceof the specified klass
4607   // rax = 1: obj != NULL and obj is     an instanceof the specified klass
4608 }

4609 
4610 //----------------------------------------------------------------------------------------------------
4611 // Breakpoints
4612 void TemplateTable::_breakpoint() {
4613   // Note: We get here even if we are single stepping..
4614   // jbug insists on setting breakpoints at every bytecode
4615   // even if we are in single step mode.
4616 
4617   transition(vtos, vtos);
4618 
4619   Register rarg = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4620 
4621   // get the unpatched byte code
4622   __ get_method(rarg);
4623   __ call_VM(noreg,
4624              CAST_FROM_FN_PTR(address,
4625                               InterpreterRuntime::get_original_bytecode_at),
4626              rarg, rbcp);
4627   __ mov(rbx, rax);  // why?
4628 


< prev index next >