140 ShouldNotReachHere();
141 return Assembler::zero;
142 }
143
144
145
146 // Miscelaneous helper routines
147 // Store an oop (or NULL) at the address described by obj.
148 // If val == noreg this means store a NULL
149
150
151 static void do_oop_store(InterpreterMacroAssembler* _masm,
152 Address obj,
153 Register val,
154 BarrierSet::Name barrier,
155 bool precise) {
156 assert(val == noreg || val == rax, "parameter is just for looks");
157 switch (barrier) {
158 #if INCLUDE_ALL_GCS
159 case BarrierSet::G1SATBCTLogging:
160 {
161 // flatten object address if needed
162 // We do it regardless of precise because we need the registers
163 if (obj.index() == noreg && obj.disp() == 0) {
164 if (obj.base() != rdx) {
165 __ movptr(rdx, obj.base());
166 }
167 } else {
168 __ lea(rdx, obj);
169 }
170
171 Register rtmp = LP64_ONLY(r8) NOT_LP64(rsi);
172 Register rthread = LP64_ONLY(r15_thread) NOT_LP64(rcx);
173
174 NOT_LP64(__ get_thread(rcx));
175 NOT_LP64(__ save_bcp());
176
177 __ g1_write_barrier_pre(rdx /* obj */,
178 rbx /* pre_val */,
179 rthread /* thread */,
180 rtmp /* tmp */,
181 val != noreg /* tosca_live */,
182 false /* expand_call */);
183 if (val == noreg) {
184 __ store_heap_oop_null(Address(rdx, 0));
185 } else {
186 // G1 barrier needs uncompressed oop for region cross check.
187 Register new_val = val;
188 if (UseCompressedOops) {
189 new_val = rbx;
190 __ movptr(new_val, val);
191 }
192 __ store_heap_oop(Address(rdx, 0), val);
193 __ g1_write_barrier_post(rdx /* store_adr */,
194 new_val /* new_val */,
195 rthread /* thread */,
196 rtmp /* tmp */,
197 rbx /* tmp2 */);
198 }
199 NOT_LP64( __ restore_bcp());
200 }
201 break;
202 #endif // INCLUDE_ALL_GCS
203 case BarrierSet::CardTableForRS:
204 case BarrierSet::CardTableExtension:
205 {
206 if (val == noreg) {
207 __ store_heap_oop_null(obj);
208 } else {
209 __ store_heap_oop(obj, val);
210 // flatten object address if needed
211 if (!precise || (obj.index() == noreg && obj.disp() == 0)) {
667 transition(vtos, dtos);
668 locals_index_wide(rbx);
669 __ load_double(daddress(rbx));
670 }
671
672 void TemplateTable::wide_aload() {
673 transition(vtos, atos);
674 locals_index_wide(rbx);
675 __ movptr(rax, aaddress(rbx));
676 }
677
678 void TemplateTable::index_check(Register array, Register index) {
679 // Pop ptr into array
680 __ pop_ptr(array);
681 index_check_without_pop(array, index);
682 }
683
684 void TemplateTable::index_check_without_pop(Register array, Register index) {
685 // destroys rbx
686 // check array
687 __ null_check(array, arrayOopDesc::length_offset_in_bytes());
688 // sign extend index for use by indexed load
689 __ movl2ptr(index, index);
690 // check index
691 __ cmpl(index, Address(array, arrayOopDesc::length_offset_in_bytes()));
692 if (index != rbx) {
693 // ??? convention: move aberrant index into rbx for exception message
694 assert(rbx != array, "different registers");
695 __ movl(rbx, index);
696 }
697 __ jump_cc(Assembler::aboveEqual,
698 ExternalAddress(Interpreter::_throw_ArrayIndexOutOfBoundsException_entry));
699 }
700
701
702 void TemplateTable::iaload() {
703 transition(itos, itos);
704 // rax: index
705 // rdx: array
706 index_check(rdx, rax); // kills rbx
707 __ movl(rax, Address(rdx, rax,
708 Address::times_4,
709 arrayOopDesc::base_offset_in_bytes(T_INT)));
710 }
711
712 void TemplateTable::laload() {
713 transition(itos, ltos);
714 // rax: index
715 // rdx: array
716 index_check(rdx, rax); // kills rbx
717 NOT_LP64(__ mov(rbx, rax));
718 // rbx,: index
719 __ movptr(rax, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 0 * wordSize));
720 NOT_LP64(__ movl(rdx, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 1 * wordSize)));
721 }
722
723
724
725 void TemplateTable::faload() {
726 transition(itos, ftos);
727 // rax: index
728 // rdx: array
729 index_check(rdx, rax); // kills rbx
730 __ load_float(Address(rdx, rax,
731 Address::times_4,
732 arrayOopDesc::base_offset_in_bytes(T_FLOAT)));
733 }
734
735 void TemplateTable::daload() {
736 transition(itos, dtos);
737 // rax: index
738 // rdx: array
739 index_check(rdx, rax); // kills rbx
740 __ load_double(Address(rdx, rax,
741 Address::times_8,
742 arrayOopDesc::base_offset_in_bytes(T_DOUBLE)));
743 }
744
745 void TemplateTable::aaload() {
746 transition(itos, atos);
747 // rax: index
748 // rdx: array
749 index_check(rdx, rax); // kills rbx
750 __ load_heap_oop(rax, Address(rdx, rax,
751 UseCompressedOops ? Address::times_4 : Address::times_ptr,
752 arrayOopDesc::base_offset_in_bytes(T_OBJECT)));
753 }
754
755 void TemplateTable::baload() {
756 transition(itos, itos);
757 // rax: index
758 // rdx: array
759 index_check(rdx, rax); // kills rbx
760 __ load_signed_byte(rax, Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)));
761 }
762
763 void TemplateTable::caload() {
764 transition(itos, itos);
765 // rax: index
766 // rdx: array
767 index_check(rdx, rax); // kills rbx
768 __ load_unsigned_short(rax, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)));
769 }
770
771 // iload followed by caload frequent pair
772 void TemplateTable::fast_icaload() {
773 transition(vtos, itos);
774 // load index out of locals
775 locals_index(rbx);
776 __ movl(rax, iaddress(rbx));
777
778 // rax: index
779 // rdx: array
780 index_check(rdx, rax); // kills rbx
781 __ load_unsigned_short(rax,
782 Address(rdx, rax,
783 Address::times_2,
784 arrayOopDesc::base_offset_in_bytes(T_CHAR)));
785 }
786
787
788 void TemplateTable::saload() {
789 transition(itos, itos);
790 // rax: index
791 // rdx: array
792 index_check(rdx, rax); // kills rbx
793 __ load_signed_short(rax, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_SHORT)));
794 }
795
796 void TemplateTable::iload(int n) {
797 transition(vtos, itos);
798 __ movl(rax, iaddress(n));
799 }
800
801 void TemplateTable::lload(int n) {
802 transition(vtos, ltos);
803 __ movptr(rax, laddress(n));
804 NOT_LP64(__ movptr(rdx, haddress(n)));
805 }
806
807 void TemplateTable::fload(int n) {
808 transition(vtos, ftos);
809 __ load_float(faddress(n));
810 }
811
812 void TemplateTable::dload(int n) {
967 __ movdbl(daddress(rbx), xmm0);
968 #else
969 wide_lstore();
970 #endif
971 }
972
973 void TemplateTable::wide_astore() {
974 transition(vtos, vtos);
975 __ pop_ptr(rax);
976 locals_index_wide(rbx);
977 __ movptr(aaddress(rbx), rax);
978 }
979
980 void TemplateTable::iastore() {
981 transition(itos, vtos);
982 __ pop_i(rbx);
983 // rax: value
984 // rbx: index
985 // rdx: array
986 index_check(rdx, rbx); // prefer index in rbx
987 __ movl(Address(rdx, rbx,
988 Address::times_4,
989 arrayOopDesc::base_offset_in_bytes(T_INT)),
990 rax);
991 }
992
993 void TemplateTable::lastore() {
994 transition(ltos, vtos);
995 __ pop_i(rbx);
996 // rax,: low(value)
997 // rcx: array
998 // rdx: high(value)
999 index_check(rcx, rbx); // prefer index in rbx,
1000 // rbx,: index
1001 __ movptr(Address(rcx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 0 * wordSize), rax);
1002 NOT_LP64(__ movl(Address(rcx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 1 * wordSize), rdx));
1003 }
1004
1005
1006 void TemplateTable::fastore() {
1007 transition(ftos, vtos);
1008 __ pop_i(rbx);
1009 // value is in UseSSE >= 1 ? xmm0 : ST(0)
1010 // rbx: index
1011 // rdx: array
1012 index_check(rdx, rbx); // prefer index in rbx
1013 __ store_float(Address(rdx, rbx, Address::times_4, arrayOopDesc::base_offset_in_bytes(T_FLOAT)));
1014 }
1015
1016 void TemplateTable::dastore() {
1017 transition(dtos, vtos);
1018 __ pop_i(rbx);
1019 // value is in UseSSE >= 2 ? xmm0 : ST(0)
1020 // rbx: index
1021 // rdx: array
1022 index_check(rdx, rbx); // prefer index in rbx
1023 __ store_double(Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_DOUBLE)));
1024 }
1025
1026 void TemplateTable::aastore() {
1027 Label is_null, ok_is_subtype, done;
1028 transition(vtos, vtos);
1029 // stack: ..., array, index, value
1030 __ movptr(rax, at_tos()); // value
1031 __ movl(rcx, at_tos_p1()); // index
1032 __ movptr(rdx, at_tos_p2()); // array
1033
1034 Address element_address(rdx, rcx,
1035 UseCompressedOops? Address::times_4 : Address::times_ptr,
1036 arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1037
1038 index_check_without_pop(rdx, rcx); // kills rbx
1039 __ testptr(rax, rax);
1040 __ jcc(Assembler::zero, is_null);
1041
1042 // Move subklass into rbx
1043 __ load_klass(rbx, rax);
1044 // Move superklass into rax
1045 __ load_klass(rax, rdx);
1046 __ movptr(rax, Address(rax,
1047 ObjArrayKlass::element_klass_offset()));
1048 // Compress array + index*oopSize + 12 into a single register. Frees rcx.
1049 __ lea(rdx, element_address);
1050
1051 // Generate subtype check. Blows rcx, rdi
1052 // Superklass in rax. Subklass in rbx.
1053 __ gen_subtype_check(rbx, ok_is_subtype);
1054
1055 // Come here on failure
1056 // object is at TOS
1057 __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
1058
1067
1068 // Have a NULL in rax, rdx=array, ecx=index. Store NULL at ary[idx]
1069 __ bind(is_null);
1070 __ profile_null_seen(rbx);
1071
1072 // Store a NULL
1073 do_oop_store(_masm, element_address, noreg, _bs->kind(), true);
1074
1075 // Pop stack arguments
1076 __ bind(done);
1077 __ addptr(rsp, 3 * Interpreter::stackElementSize);
1078 }
1079
1080 void TemplateTable::bastore() {
1081 transition(itos, vtos);
1082 __ pop_i(rbx);
1083 // rax: value
1084 // rbx: index
1085 // rdx: array
1086 index_check(rdx, rbx); // prefer index in rbx
1087 __ movb(Address(rdx, rbx,
1088 Address::times_1,
1089 arrayOopDesc::base_offset_in_bytes(T_BYTE)),
1090 rax);
1091 }
1092
1093 void TemplateTable::castore() {
1094 transition(itos, vtos);
1095 __ pop_i(rbx);
1096 // rax: value
1097 // rbx: index
1098 // rdx: array
1099 index_check(rdx, rbx); // prefer index in rbx
1100 __ movw(Address(rdx, rbx,
1101 Address::times_2,
1102 arrayOopDesc::base_offset_in_bytes(T_CHAR)),
1103 rax);
1104 }
1105
1106
1107 void TemplateTable::sastore() {
1108 castore();
1109 }
1110
1111 void TemplateTable::istore(int n) {
1112 transition(itos, vtos);
1113 __ movl(iaddress(n), rax);
1114 }
1115
1116 void TemplateTable::lstore(int n) {
1117 transition(ltos, vtos);
1118 __ movptr(laddress(n), rax);
1119 NOT_LP64(__ movptr(haddress(n), rdx));
2294 __ bind(not_taken);
2295 __ profile_not_taken_branch(rax);
2296 }
2297
2298 void TemplateTable::if_nullcmp(Condition cc) {
2299 transition(atos, vtos);
2300 // assume branch is more often taken than not (loops use backward branches)
2301 Label not_taken;
2302 __ testptr(rax, rax);
2303 __ jcc(j_not(cc), not_taken);
2304 branch(false, false);
2305 __ bind(not_taken);
2306 __ profile_not_taken_branch(rax);
2307 }
2308
2309 void TemplateTable::if_acmp(Condition cc) {
2310 transition(atos, vtos);
2311 // assume branch is more often taken than not (loops use backward branches)
2312 Label not_taken;
2313 __ pop_ptr(rdx);
2314 __ cmpptr(rdx, rax);
2315 __ jcc(j_not(cc), not_taken);
2316 branch(false, false);
2317 __ bind(not_taken);
2318 __ profile_not_taken_branch(rax);
2319 }
2320
2321 void TemplateTable::ret() {
2322 transition(vtos, vtos);
2323 locals_index(rbx);
2324 LP64_ONLY(__ movslq(rbx, iaddress(rbx))); // get return bci, compute return bcp
2325 NOT_LP64(__ movptr(rbx, iaddress(rbx)));
2326 __ profile_ret(rbx, rcx);
2327 __ get_method(rax);
2328 __ movptr(rbcp, Address(rax, Method::const_offset()));
2329 __ lea(rbcp, Address(rbcp, rbx, Address::times_1,
2330 ConstMethod::codes_offset()));
2331 __ dispatch_next(vtos);
2332 }
2333
2734 __ pop_ptr(r);
2735 __ null_check(r); // for field access must check obj.
2736 __ verify_oop(r);
2737 }
2738
2739 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2740 transition(vtos, vtos);
2741
2742 const Register cache = rcx;
2743 const Register index = rdx;
2744 const Register obj = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
2745 const Register off = rbx;
2746 const Register flags = rax;
2747 const Register bc = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // uses same reg as obj, so don't mix them
2748
2749 resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2750 jvmti_post_field_access(cache, index, is_static, false);
2751 load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2752
2753 if (!is_static) pop_and_check_object(obj);
2754
2755 const Address field(obj, off, Address::times_1, 0*wordSize);
2756 NOT_LP64(const Address hi(obj, off, Address::times_1, 1*wordSize));
2757
2758 Label Done, notByte, notInt, notShort, notChar, notLong, notFloat, notObj, notDouble;
2759
2760 __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
2761 // Make sure we don't need to mask edx after the above shift
2762 assert(btos == 0, "change code, btos != 0");
2763
2764 __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
2765
2766 __ jcc(Assembler::notZero, notByte);
2767 // btos
2768 __ load_signed_byte(rax, field);
2769 __ push(btos);
2770 // Rewrite bytecode to be faster
2771 if (!is_static && rc == may_rewrite) {
2772 patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2773 }
3003 __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3004 __ andl(rdx, 0x1);
3005
3006 // field addresses
3007 const Address field(obj, off, Address::times_1, 0*wordSize);
3008 NOT_LP64( const Address hi(obj, off, Address::times_1, 1*wordSize);)
3009
3010 Label notByte, notInt, notShort, notChar,
3011 notLong, notFloat, notObj, notDouble;
3012
3013 __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
3014
3015 assert(btos == 0, "change code, btos != 0");
3016 __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
3017 __ jcc(Assembler::notZero, notByte);
3018
3019 // btos
3020 {
3021 __ pop(btos);
3022 if (!is_static) pop_and_check_object(obj);
3023 __ movb(field, rax);
3024 if (!is_static && rc == may_rewrite) {
3025 patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
3026 }
3027 __ jmp(Done);
3028 }
3029
3030 __ bind(notByte);
3031 __ cmpl(flags, atos);
3032 __ jcc(Assembler::notEqual, notObj);
3033
3034 // atos
3035 {
3036 __ pop(atos);
3037 if (!is_static) pop_and_check_object(obj);
3038 // Store into the field
3039 do_oop_store(_masm, field, rax, _bs->kind(), false);
3040 if (!is_static && rc == may_rewrite) {
3041 patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3042 }
3043 __ jmp(Done);
3044 }
3045
3046 __ bind(notObj);
3047 __ cmpl(flags, itos);
3048 __ jcc(Assembler::notEqual, notInt);
3049
3050 // itos
3051 {
3052 __ pop(itos);
3053 if (!is_static) pop_and_check_object(obj);
3054 __ movl(field, rax);
3055 if (!is_static && rc == may_rewrite) {
3056 patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3057 }
3058 __ jmp(Done);
3059 }
3060
3061 __ bind(notInt);
3062 __ cmpl(flags, ctos);
3063 __ jcc(Assembler::notEqual, notChar);
3064
3065 // ctos
3066 {
3067 __ pop(ctos);
3068 if (!is_static) pop_and_check_object(obj);
3069 __ movw(field, rax);
3070 if (!is_static && rc == may_rewrite) {
3071 patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx, true, byte_no);
3072 }
3073 __ jmp(Done);
3074 }
3075
3076 __ bind(notChar);
3077 __ cmpl(flags, stos);
3078 __ jcc(Assembler::notEqual, notShort);
3079
3080 // stos
3081 {
3082 __ pop(stos);
3083 if (!is_static) pop_and_check_object(obj);
3084 __ movw(field, rax);
3085 if (!is_static && rc == may_rewrite) {
3086 patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx, true, byte_no);
3087 }
3088 __ jmp(Done);
3089 }
3090
3091 __ bind(notShort);
3092 __ cmpl(flags, ltos);
3093 __ jcc(Assembler::notEqual, notLong);
3094
3095 // ltos
3096 #ifdef _LP64
3097 {
3098 __ pop(ltos);
3099 if (!is_static) pop_and_check_object(obj);
3100 __ movq(field, rax);
3101 if (!is_static && rc == may_rewrite) {
3102 patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no);
3103 }
3104 __ jmp(Done);
3105 }
3106 #else
3107 {
3108 Label notVolatileLong;
3109 __ testl(rdx, rdx);
3110 __ jcc(Assembler::zero, notVolatileLong);
3111
3112 __ pop(ltos); // overwrites rdx, do this after testing volatile.
3113 if (!is_static) pop_and_check_object(obj);
3114
3115 // Replace with real volatile test
3116 __ push(rdx);
3117 __ push(rax); // Must update atomically with FIST
3118 __ fild_d(Address(rsp,0)); // So load into FPU register
3119 __ fistp_d(field); // and put into memory atomically
3126
3127 __ bind(notVolatileLong);
3128
3129 __ pop(ltos); // overwrites rdx
3130 if (!is_static) pop_and_check_object(obj);
3131 __ movptr(hi, rdx);
3132 __ movptr(field, rax);
3133 // Don't rewrite to _fast_lputfield for potential volatile case.
3134 __ jmp(notVolatile);
3135 }
3136 #endif // _LP64
3137
3138 __ bind(notLong);
3139 __ cmpl(flags, ftos);
3140 __ jcc(Assembler::notEqual, notFloat);
3141
3142 // ftos
3143 {
3144 __ pop(ftos);
3145 if (!is_static) pop_and_check_object(obj);
3146 __ store_float(field);
3147 if (!is_static && rc == may_rewrite) {
3148 patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no);
3149 }
3150 __ jmp(Done);
3151 }
3152
3153 __ bind(notFloat);
3154 #ifdef ASSERT
3155 __ cmpl(flags, dtos);
3156 __ jcc(Assembler::notEqual, notDouble);
3157 #endif
3158
3159 // dtos
3160 {
3161 __ pop(dtos);
3162 if (!is_static) pop_and_check_object(obj);
3163 __ store_double(field);
3164 if (!is_static && rc == may_rewrite) {
3165 patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no);
3166 }
3167 }
3168
3169 #ifdef ASSERT
3170 __ jmp(Done);
3171
3172 __ bind(notDouble);
3173 __ stop("Bad state");
3174 #endif
3175
3176 __ bind(Done);
3177
3178 // Check for volatile store
3179 __ testl(rdx, rdx);
3180 __ jcc(Assembler::zero, notVolatile);
3181 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3182 Assembler::StoreStore));
3262
3263 // test for volatile with rdx but rdx is tos register for lputfield.
3264 __ movl(rdx, Address(rcx, rbx, Address::times_ptr,
3265 in_bytes(base +
3266 ConstantPoolCacheEntry::flags_offset())));
3267
3268 // replace index with field offset from cache entry
3269 __ movptr(rbx, Address(rcx, rbx, Address::times_ptr,
3270 in_bytes(base + ConstantPoolCacheEntry::f2_offset())));
3271
3272 // [jk] not needed currently
3273 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3274 // Assembler::StoreStore));
3275
3276 Label notVolatile;
3277 __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3278 __ andl(rdx, 0x1);
3279
3280 // Get object from stack
3281 pop_and_check_object(rcx);
3282
3283 // field address
3284 const Address field(rcx, rbx, Address::times_1);
3285
3286 // access field
3287 switch (bytecode()) {
3288 case Bytecodes::_fast_aputfield:
3289 do_oop_store(_masm, field, rax, _bs->kind(), false);
3290 break;
3291 case Bytecodes::_fast_lputfield:
3292 #ifdef _LP64
3293 __ movq(field, rax);
3294 #else
3295 __ stop("should not be rewritten");
3296 #endif
3297 break;
3298 case Bytecodes::_fast_iputfield:
3299 __ movl(field, rax);
3300 break;
3301 case Bytecodes::_fast_bputfield:
3350 }
3351
3352 // access constant pool cache
3353 __ get_cache_and_index_at_bcp(rcx, rbx, 1);
3354 // replace index with field offset from cache entry
3355 // [jk] not needed currently
3356 // if (os::is_MP()) {
3357 // __ movl(rdx, Address(rcx, rbx, Address::times_8,
3358 // in_bytes(ConstantPoolCache::base_offset() +
3359 // ConstantPoolCacheEntry::flags_offset())));
3360 // __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3361 // __ andl(rdx, 0x1);
3362 // }
3363 __ movptr(rbx, Address(rcx, rbx, Address::times_ptr,
3364 in_bytes(ConstantPoolCache::base_offset() +
3365 ConstantPoolCacheEntry::f2_offset())));
3366
3367 // rax: object
3368 __ verify_oop(rax);
3369 __ null_check(rax);
3370 Address field(rax, rbx, Address::times_1);
3371
3372 // access field
3373 switch (bytecode()) {
3374 case Bytecodes::_fast_agetfield:
3375 __ load_heap_oop(rax, field);
3376 __ verify_oop(rax);
3377 break;
3378 case Bytecodes::_fast_lgetfield:
3379 #ifdef _LP64
3380 __ movq(rax, field);
3381 #else
3382 __ stop("should not be rewritten");
3383 #endif
3384 break;
3385 case Bytecodes::_fast_igetfield:
3386 __ movl(rax, field);
3387 break;
3388 case Bytecodes::_fast_bgetfield:
3389 __ movsbl(rax, field);
3411 // __ membar(Assembler::LoadLoad);
3412 // __ bind(notVolatile);
3413 //};
3414 }
3415
3416 void TemplateTable::fast_xaccess(TosState state) {
3417 transition(vtos, state);
3418
3419 // get receiver
3420 __ movptr(rax, aaddress(0));
3421 // access constant pool cache
3422 __ get_cache_and_index_at_bcp(rcx, rdx, 2);
3423 __ movptr(rbx,
3424 Address(rcx, rdx, Address::times_ptr,
3425 in_bytes(ConstantPoolCache::base_offset() +
3426 ConstantPoolCacheEntry::f2_offset())));
3427 // make sure exception is reported in correct bcp range (getfield is
3428 // next instruction)
3429 __ increment(rbcp);
3430 __ null_check(rax);
3431 const Address field = Address(rax, rbx, Address::times_1, 0*wordSize);
3432 switch (state) {
3433 case itos:
3434 __ movl(rax, field);
3435 break;
3436 case atos:
3437 __ load_heap_oop(rax, field);
3438 __ verify_oop(rax);
3439 break;
3440 case ftos:
3441 __ load_float(field);
3442 break;
3443 default:
3444 ShouldNotReachHere();
3445 }
3446
3447 // [jk] not needed currently
3448 // if (os::is_MP()) {
3449 // Label notVolatile;
3450 // __ movl(rdx, Address(rcx, rdx, Address::times_8,
3813 __ jcc(Assembler::notZero, slow_case);
3814
3815 //
3816 // Allocate the instance
3817 // 1) Try to allocate in the TLAB
3818 // 2) if fail and the object is large allocate in the shared Eden
3819 // 3) if the above fails (or is not applicable), go to a slow case
3820 // (creates a new TLAB, etc.)
3821
3822 const bool allow_shared_alloc =
3823 Universe::heap()->supports_inline_contig_alloc();
3824
3825 const Register thread = LP64_ONLY(r15_thread) NOT_LP64(rcx);
3826 #ifndef _LP64
3827 if (UseTLAB || allow_shared_alloc) {
3828 __ get_thread(thread);
3829 }
3830 #endif // _LP64
3831
3832 if (UseTLAB) {
3833 __ movptr(rax, Address(thread, in_bytes(JavaThread::tlab_top_offset())));
3834 __ lea(rbx, Address(rax, rdx, Address::times_1));
3835 __ cmpptr(rbx, Address(thread, in_bytes(JavaThread::tlab_end_offset())));
3836 __ jcc(Assembler::above, allow_shared_alloc ? allocate_shared : slow_case);
3837 __ movptr(Address(thread, in_bytes(JavaThread::tlab_top_offset())), rbx);
3838 if (ZeroTLAB) {
3839 // the fields have been already cleared
3840 __ jmp(initialize_header);
3841 } else {
3842 // initialize both the header and fields
3843 __ jmp(initialize_object);
3844 }
3845 }
3846
3847 // Allocation in the shared Eden, if allowed.
3848 //
3849 // rdx: instance size in bytes
3850 if (allow_shared_alloc) {
3851 __ bind(allocate_shared);
3852
3853 ExternalAddress heap_top((address)Universe::heap()->top_addr());
3854 ExternalAddress heap_end((address)Universe::heap()->end_addr());
3855
3856 Label retry;
3857 __ bind(retry);
3958 Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
3959 __ load_unsigned_byte(rarg1, at_bcp(1));
3960 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3961 rarg1, rax);
3962 }
3963
3964 void TemplateTable::anewarray() {
3965 transition(itos, atos);
3966
3967 Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
3968 Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
3969
3970 __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
3971 __ get_constant_pool(rarg1);
3972 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
3973 rarg1, rarg2, rax);
3974 }
3975
3976 void TemplateTable::arraylength() {
3977 transition(atos, itos);
3978 __ null_check(rax, arrayOopDesc::length_offset_in_bytes());
3979 __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes()));
3980 }
3981
3982 void TemplateTable::checkcast() {
3983 transition(atos, atos);
3984 Label done, is_null, ok_is_subtype, quicked, resolved;
3985 __ testptr(rax, rax); // object is in rax
3986 __ jcc(Assembler::zero, is_null);
3987
3988 // Get cpool & tags index
3989 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
3990 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
3991 // See if bytecode has already been quicked
3992 __ cmpb(Address(rdx, rbx,
3993 Address::times_1,
3994 Array<u1>::base_offset_in_bytes()),
3995 JVM_CONSTANT_Class);
3996 __ jcc(Assembler::equal, quicked);
3997 __ push(atos); // save receiver for result, and for GC
4058 Array<u1>::base_offset_in_bytes()),
4059 JVM_CONSTANT_Class);
4060 __ jcc(Assembler::equal, quicked);
4061
4062 __ push(atos); // save receiver for result, and for GC
4063 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4064 // vm_result_2 has metadata result
4065
4066 #ifndef _LP64
4067 // borrow rdi from locals
4068 __ get_thread(rdi);
4069 __ get_vm_result_2(rax, rdi);
4070 __ restore_locals();
4071 #else
4072 __ get_vm_result_2(rax, r15_thread);
4073 #endif
4074
4075 __ pop_ptr(rdx); // restore receiver
4076 __ verify_oop(rdx);
4077 __ load_klass(rdx, rdx);
4078 __ jmpb(resolved);
4079
4080 // Get superklass in rax and subklass in rdx
4081 __ bind(quicked);
4082 __ load_klass(rdx, rax);
4083 __ movptr(rax, Address(rcx, rbx,
4084 Address::times_ptr, sizeof(ConstantPool)));
4085
4086 __ bind(resolved);
4087
4088 // Generate subtype check. Blows rcx, rdi
4089 // Superklass in rax. Subklass in rdx.
4090 __ gen_subtype_check(rdx, ok_is_subtype);
4091
4092 // Come here on failure
4093 __ xorl(rax, rax);
4094 __ jmpb(done);
4095 // Come here on success
4096 __ bind(ok_is_subtype);
4097 __ movl(rax, 1);
4098
4154 // Note: monitorenter & exit are symmetric routines; which is reflected
4155 // in the assembly code structure as well
4156 //
4157 // Stack layout:
4158 //
4159 // [expressions ] <--- rsp = expression stack top
4160 // ..
4161 // [expressions ]
4162 // [monitor entry] <--- monitor block top = expression stack bot
4163 // ..
4164 // [monitor entry]
4165 // [frame data ] <--- monitor block bot
4166 // ...
4167 // [saved rbp ] <--- rbp
4168 void TemplateTable::monitorenter() {
4169 transition(atos, vtos);
4170
4171 // check for NULL object
4172 __ null_check(rax);
4173
4174 const Address monitor_block_top(
4175 rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4176 const Address monitor_block_bot(
4177 rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4178 const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
4179
4180 Label allocated;
4181
4182 Register rtop = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
4183 Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4184 Register rmon = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4185
4186 // initialize entry pointer
4187 __ xorl(rmon, rmon); // points to free slot or NULL
4188
4189 // find a free slot in the monitor block (result in rmon)
4190 {
4191 Label entry, loop, exit;
4192 __ movptr(rtop, monitor_block_top); // points to current entry,
4193 // starting with top-most entry
4194 __ lea(rbot, monitor_block_bot); // points to word before bottom
4195 // of monitor block
4196 __ jmpb(entry);
4197
4198 __ bind(loop);
4199 // check if current entry is used
4200 __ cmpptr(Address(rtop, BasicObjectLock::obj_offset_in_bytes()), (int32_t) NULL_WORD);
4201 // if not used then remember entry in rmon
4202 __ cmovptr(Assembler::equal, rmon, rtop); // cmov => cmovptr
4203 // check if current entry is for same object
4204 __ cmpptr(rax, Address(rtop, BasicObjectLock::obj_offset_in_bytes()));
4205 // if same object then stop searching
4206 __ jccb(Assembler::equal, exit);
4207 // otherwise advance to next entry
4208 __ addptr(rtop, entry_size);
4209 __ bind(entry);
4210 // check if bottom reached
4211 __ cmpptr(rtop, rbot);
4212 // if not at bottom then check this entry
4213 __ jcc(Assembler::notEqual, loop);
4214 __ bind(exit);
4215 }
4216
4217 __ testptr(rmon, rmon); // check if a slot has been found
4218 __ jcc(Assembler::notZero, allocated); // if found, continue with that one
4219
4220 // allocate one if there's no free slot
4221 {
4222 Label entry, loop;
4223 // 1. compute new pointers // rsp: old expression stack top
4224 __ movptr(rmon, monitor_block_bot); // rmon: old expression stack bottom
4251
4252 // store object
4253 __ movptr(Address(rmon, BasicObjectLock::obj_offset_in_bytes()), rax);
4254 __ lock_object(rmon);
4255
4256 // check to make sure this monitor doesn't cause stack overflow after locking
4257 __ save_bcp(); // in case of exception
4258 __ generate_stack_overflow_check(0);
4259
4260 // The bcp has already been incremented. Just need to dispatch to
4261 // next instruction.
4262 __ dispatch_next(vtos);
4263 }
4264
4265 void TemplateTable::monitorexit() {
4266 transition(atos, vtos);
4267
4268 // check for NULL object
4269 __ null_check(rax);
4270
4271 const Address monitor_block_top(
4272 rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4273 const Address monitor_block_bot(
4274 rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4275 const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
4276
4277 Register rtop = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4278 Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4279
4280 Label found;
4281
4282 // find matching slot
4283 {
4284 Label entry, loop;
4285 __ movptr(rtop, monitor_block_top); // points to current entry,
4286 // starting with top-most entry
4287 __ lea(rbot, monitor_block_bot); // points to word before bottom
4288 // of monitor block
4289 __ jmpb(entry);
4290
4291 __ bind(loop);
4292 // check if current entry is for same object
4293 __ cmpptr(rax, Address(rtop, BasicObjectLock::obj_offset_in_bytes()));
4294 // if same object then stop searching
4295 __ jcc(Assembler::equal, found);
4296 // otherwise advance to next entry
4297 __ addptr(rtop, entry_size);
4298 __ bind(entry);
4299 // check if bottom reached
4300 __ cmpptr(rtop, rbot);
4301 // if not at bottom then check this entry
4302 __ jcc(Assembler::notEqual, loop);
4303 }
4304
4305 // error handling. Unlocking was not block-structured
4306 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4307 InterpreterRuntime::throw_illegal_monitor_state_exception));
4308 __ should_not_reach_here();
4309
4310 // call run-time routine
4311 __ bind(found);
4312 __ push_ptr(rax); // make sure object is on stack (contract with oopMaps)
4313 __ unlock_object(rtop);
4321 ExternalAddress wtable((address)Interpreter::_wentry_point);
4322 __ jump(ArrayAddress(wtable, Address(noreg, rbx, Address::times_ptr)));
4323 // Note: the rbcp increment step is part of the individual wide bytecode implementations
4324 }
4325
4326 // Multi arrays
4327 void TemplateTable::multianewarray() {
4328 transition(vtos, atos);
4329
4330 Register rarg = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4331 __ load_unsigned_byte(rax, at_bcp(3)); // get number of dimensions
4332 // last dim is on top of stack; we want address of first one:
4333 // first_addr = last_addr + (ndims - 1) * stackElementSize - 1*wordsize
4334 // the latter wordSize to point to the beginning of the array.
4335 __ lea(rarg, Address(rsp, rax, Interpreter::stackElementScale(), -wordSize));
4336 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::multianewarray), rarg);
4337 __ load_unsigned_byte(rbx, at_bcp(3));
4338 __ lea(rsp, Address(rsp, rbx, Interpreter::stackElementScale())); // get rid of counts
4339 }
4340 #endif /* !CC_INTERP */
4341
|
140 ShouldNotReachHere();
141 return Assembler::zero;
142 }
143
144
145
146 // Miscelaneous helper routines
147 // Store an oop (or NULL) at the address described by obj.
148 // If val == noreg this means store a NULL
149
150
151 static void do_oop_store(InterpreterMacroAssembler* _masm,
152 Address obj,
153 Register val,
154 BarrierSet::Name barrier,
155 bool precise) {
156 assert(val == noreg || val == rax, "parameter is just for looks");
157 switch (barrier) {
158 #if INCLUDE_ALL_GCS
159 case BarrierSet::G1SATBCTLogging:
160 case BarrierSet::ShenandoahBarrierSet:
161 {
162 // flatten object address if needed
163 // We do it regardless of precise because we need the registers
164 if (obj.index() == noreg && obj.disp() == 0) {
165 if (obj.base() != rdx) {
166 __ movptr(rdx, obj.base());
167 }
168 } else {
169 __ lea(rdx, obj);
170 }
171
172 Register rtmp = LP64_ONLY(r8) NOT_LP64(rsi);
173 Register rthread = LP64_ONLY(r15_thread) NOT_LP64(rcx);
174
175 NOT_LP64(__ get_thread(rcx));
176 NOT_LP64(__ save_bcp());
177
178 __ g1_write_barrier_pre(rdx /* obj */,
179 rbx /* pre_val */,
180 rthread /* thread */,
181 rtmp /* tmp */,
182 val != noreg /* tosca_live */,
183 false /* expand_call */);
184 if (val == noreg) {
185 __ store_heap_oop_null(Address(rdx, 0));
186 } else {
187 // G1 barrier needs uncompressed oop for region cross check.
188 Register new_val = val;
189 if (UseCompressedOops) {
190 new_val = rbx;
191 __ movptr(new_val, val);
192 }
193 // For Shenandoah, make sure we only store refs into to-space.
194 oopDesc::bs()->interpreter_read_barrier(_masm, val);
195
196 __ store_heap_oop(Address(rdx, 0), val);
197 __ g1_write_barrier_post(rdx /* store_adr */,
198 new_val /* new_val */,
199 rthread /* thread */,
200 rtmp /* tmp */,
201 rbx /* tmp2 */);
202 }
203 NOT_LP64( __ restore_bcp());
204 }
205 break;
206 #endif // INCLUDE_ALL_GCS
207 case BarrierSet::CardTableForRS:
208 case BarrierSet::CardTableExtension:
209 {
210 if (val == noreg) {
211 __ store_heap_oop_null(obj);
212 } else {
213 __ store_heap_oop(obj, val);
214 // flatten object address if needed
215 if (!precise || (obj.index() == noreg && obj.disp() == 0)) {
671 transition(vtos, dtos);
672 locals_index_wide(rbx);
673 __ load_double(daddress(rbx));
674 }
675
676 void TemplateTable::wide_aload() {
677 transition(vtos, atos);
678 locals_index_wide(rbx);
679 __ movptr(rax, aaddress(rbx));
680 }
681
682 void TemplateTable::index_check(Register array, Register index) {
683 // Pop ptr into array
684 __ pop_ptr(array);
685 index_check_without_pop(array, index);
686 }
687
688 void TemplateTable::index_check_without_pop(Register array, Register index) {
689 // destroys rbx
690 // check array
691
692 if (ShenandoahVerifyReadsToFromSpace) {
693 oopDesc::bs()->interpreter_read_barrier(_masm, array);
694 }
695
696 __ null_check(array, arrayOopDesc::length_offset_in_bytes());
697 // sign extend index for use by indexed load
698 __ movl2ptr(index, index);
699 // check index
700 __ cmpl(index, Address(array, arrayOopDesc::length_offset_in_bytes()));
701 if (index != rbx) {
702 // ??? convention: move aberrant index into rbx for exception message
703 assert(rbx != array, "different registers");
704 __ movl(rbx, index);
705 }
706 __ jump_cc(Assembler::aboveEqual,
707 ExternalAddress(Interpreter::_throw_ArrayIndexOutOfBoundsException_entry));
708 }
709
710
711 void TemplateTable::iaload() {
712 transition(itos, itos);
713 // rax: index
714 // rdx: array
715 index_check(rdx, rax); // kills rbx
716 oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rdx);
717 __ movl(rax, Address(rdx, rax,
718 Address::times_4,
719 arrayOopDesc::base_offset_in_bytes(T_INT)));
720 }
721
722 void TemplateTable::laload() {
723 transition(itos, ltos);
724 // rax: index
725 // rdx: array
726 index_check(rdx, rax); // kills rbx
727 NOT_LP64(__ mov(rbx, rax));
728 // rbx,: index
729 oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rdx);
730 __ movptr(rax, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 0 * wordSize));
731 NOT_LP64(__ movl(rdx, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 1 * wordSize)));
732 }
733
734
735
736 void TemplateTable::faload() {
737 transition(itos, ftos);
738 // rax: index
739 // rdx: array
740 index_check(rdx, rax); // kills rbx
741 oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rdx);
742 __ load_float(Address(rdx, rax,
743 Address::times_4,
744 arrayOopDesc::base_offset_in_bytes(T_FLOAT)));
745 }
746
747 void TemplateTable::daload() {
748 transition(itos, dtos);
749 // rax: index
750 // rdx: array
751 index_check(rdx, rax); // kills rbx
752 oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rdx);
753 __ load_double(Address(rdx, rax,
754 Address::times_8,
755 arrayOopDesc::base_offset_in_bytes(T_DOUBLE)));
756 }
757
758 void TemplateTable::aaload() {
759 transition(itos, atos);
760 // rax: index
761 // rdx: array
762 index_check(rdx, rax); // kills rbx
763 oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rdx);
764 __ load_heap_oop(rax, Address(rdx, rax,
765 UseCompressedOops ? Address::times_4 : Address::times_ptr,
766 arrayOopDesc::base_offset_in_bytes(T_OBJECT)));
767 }
768
769 void TemplateTable::baload() {
770 transition(itos, itos);
771 // rax: index
772 // rdx: array
773 index_check(rdx, rax); // kills rbx
774 oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rdx);
775 __ load_signed_byte(rax, Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)));
776 }
777
778 void TemplateTable::caload() {
779 transition(itos, itos);
780 // rax: index
781 // rdx: array
782 index_check(rdx, rax); // kills rbx
783 oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rdx);
784 __ load_unsigned_short(rax, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)));
785 }
786
787 // iload followed by caload frequent pair
788 void TemplateTable::fast_icaload() {
789 transition(vtos, itos);
790 // load index out of locals
791 locals_index(rbx);
792 __ movl(rax, iaddress(rbx));
793
794 // rax: index
795 // rdx: array
796 index_check(rdx, rax); // kills rbx
797 oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rdx);
798 __ load_unsigned_short(rax,
799 Address(rdx, rax,
800 Address::times_2,
801 arrayOopDesc::base_offset_in_bytes(T_CHAR)));
802 }
803
804
805 void TemplateTable::saload() {
806 transition(itos, itos);
807 // rax: index
808 // rdx: array
809 index_check(rdx, rax); // kills rbx
810 oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rdx);
811 __ load_signed_short(rax, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_SHORT)));
812 }
813
814 void TemplateTable::iload(int n) {
815 transition(vtos, itos);
816 __ movl(rax, iaddress(n));
817 }
818
819 void TemplateTable::lload(int n) {
820 transition(vtos, ltos);
821 __ movptr(rax, laddress(n));
822 NOT_LP64(__ movptr(rdx, haddress(n)));
823 }
824
825 void TemplateTable::fload(int n) {
826 transition(vtos, ftos);
827 __ load_float(faddress(n));
828 }
829
830 void TemplateTable::dload(int n) {
985 __ movdbl(daddress(rbx), xmm0);
986 #else
987 wide_lstore();
988 #endif
989 }
990
991 void TemplateTable::wide_astore() {
992 transition(vtos, vtos);
993 __ pop_ptr(rax);
994 locals_index_wide(rbx);
995 __ movptr(aaddress(rbx), rax);
996 }
997
998 void TemplateTable::iastore() {
999 transition(itos, vtos);
1000 __ pop_i(rbx);
1001 // rax: value
1002 // rbx: index
1003 // rdx: array
1004 index_check(rdx, rbx); // prefer index in rbx
1005 oopDesc::bs()->interpreter_write_barrier(_masm, rdx);
1006 __ movl(Address(rdx, rbx,
1007 Address::times_4,
1008 arrayOopDesc::base_offset_in_bytes(T_INT)),
1009 rax);
1010 }
1011
1012 void TemplateTable::lastore() {
1013 transition(ltos, vtos);
1014 __ pop_i(rbx);
1015 // rax,: low(value)
1016 // rcx: array
1017 // rdx: high(value)
1018 index_check(rcx, rbx); // prefer index in rbx,
1019 // rbx,: index
1020 oopDesc::bs()->interpreter_write_barrier(_masm, rcx);
1021 __ movptr(Address(rcx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 0 * wordSize), rax);
1022 NOT_LP64(__ movl(Address(rcx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 1 * wordSize), rdx));
1023 }
1024
1025
1026 void TemplateTable::fastore() {
1027 transition(ftos, vtos);
1028 __ pop_i(rbx);
1029 // value is in UseSSE >= 1 ? xmm0 : ST(0)
1030 // rbx: index
1031 // rdx: array
1032 index_check(rdx, rbx); // prefer index in rbx
1033 oopDesc::bs()->interpreter_write_barrier(_masm, rdx);
1034 __ store_float(Address(rdx, rbx, Address::times_4, arrayOopDesc::base_offset_in_bytes(T_FLOAT)));
1035 }
1036
1037 void TemplateTable::dastore() {
1038 transition(dtos, vtos);
1039 __ pop_i(rbx);
1040 // value is in UseSSE >= 2 ? xmm0 : ST(0)
1041 // rbx: index
1042 // rdx: array
1043 index_check(rdx, rbx); // prefer index in rbx
1044 oopDesc::bs()->interpreter_write_barrier(_masm, rdx);
1045 __ store_double(Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_DOUBLE)));
1046 }
1047
1048 void TemplateTable::aastore() {
1049 Label is_null, ok_is_subtype, done;
1050 transition(vtos, vtos);
1051 // stack: ..., array, index, value
1052 __ movptr(rax, at_tos()); // value
1053 __ movl(rcx, at_tos_p1()); // index
1054 __ movptr(rdx, at_tos_p2()); // array
1055
1056 Address element_address(rdx, rcx,
1057 UseCompressedOops? Address::times_4 : Address::times_ptr,
1058 arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1059
1060 index_check_without_pop(rdx, rcx); // kills rbx
1061 oopDesc::bs()->interpreter_write_barrier(_masm, rdx);
1062 __ testptr(rax, rax);
1063 __ jcc(Assembler::zero, is_null);
1064
1065 // Move subklass into rbx
1066 __ load_klass(rbx, rax);
1067 // Move superklass into rax
1068 __ load_klass(rax, rdx);
1069 __ movptr(rax, Address(rax,
1070 ObjArrayKlass::element_klass_offset()));
1071 // Compress array + index*oopSize + 12 into a single register. Frees rcx.
1072 __ lea(rdx, element_address);
1073
1074 // Generate subtype check. Blows rcx, rdi
1075 // Superklass in rax. Subklass in rbx.
1076 __ gen_subtype_check(rbx, ok_is_subtype);
1077
1078 // Come here on failure
1079 // object is at TOS
1080 __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
1081
1090
1091 // Have a NULL in rax, rdx=array, ecx=index. Store NULL at ary[idx]
1092 __ bind(is_null);
1093 __ profile_null_seen(rbx);
1094
1095 // Store a NULL
1096 do_oop_store(_masm, element_address, noreg, _bs->kind(), true);
1097
1098 // Pop stack arguments
1099 __ bind(done);
1100 __ addptr(rsp, 3 * Interpreter::stackElementSize);
1101 }
1102
1103 void TemplateTable::bastore() {
1104 transition(itos, vtos);
1105 __ pop_i(rbx);
1106 // rax: value
1107 // rbx: index
1108 // rdx: array
1109 index_check(rdx, rbx); // prefer index in rbx
1110 oopDesc::bs()->interpreter_write_barrier(_masm, rdx);
1111 __ movb(Address(rdx, rbx,
1112 Address::times_1,
1113 arrayOopDesc::base_offset_in_bytes(T_BYTE)),
1114 rax);
1115 }
1116
1117 void TemplateTable::castore() {
1118 transition(itos, vtos);
1119 __ pop_i(rbx);
1120 // rax: value
1121 // rbx: index
1122 // rdx: array
1123 index_check(rdx, rbx); // prefer index in rbx
1124 oopDesc::bs()->interpreter_write_barrier(_masm, rdx);
1125 __ movw(Address(rdx, rbx,
1126 Address::times_2,
1127 arrayOopDesc::base_offset_in_bytes(T_CHAR)),
1128 rax);
1129 }
1130
1131
1132 void TemplateTable::sastore() {
1133 castore();
1134 }
1135
1136 void TemplateTable::istore(int n) {
1137 transition(itos, vtos);
1138 __ movl(iaddress(n), rax);
1139 }
1140
1141 void TemplateTable::lstore(int n) {
1142 transition(ltos, vtos);
1143 __ movptr(laddress(n), rax);
1144 NOT_LP64(__ movptr(haddress(n), rdx));
2319 __ bind(not_taken);
2320 __ profile_not_taken_branch(rax);
2321 }
2322
2323 void TemplateTable::if_nullcmp(Condition cc) {
2324 transition(atos, vtos);
2325 // assume branch is more often taken than not (loops use backward branches)
2326 Label not_taken;
2327 __ testptr(rax, rax);
2328 __ jcc(j_not(cc), not_taken);
2329 branch(false, false);
2330 __ bind(not_taken);
2331 __ profile_not_taken_branch(rax);
2332 }
2333
2334 void TemplateTable::if_acmp(Condition cc) {
2335 transition(atos, vtos);
2336 // assume branch is more often taken than not (loops use backward branches)
2337 Label not_taken;
2338 __ pop_ptr(rdx);
2339 if (UseShenandoahGC) {
2340 // For Shenandoah, if the objects are not equal, we try again after
2341 // resolving both objects through a read barrier, to make sure we're
2342 // not comparing from-space and to-space copies of the same object.
2343 Label eq;
2344 __ cmpptr(rdx, rax);
2345 __ jcc(Assembler::equal, eq);
2346 oopDesc::bs()->interpreter_read_barrier(_masm, rax);
2347 oopDesc::bs()->interpreter_read_barrier(_masm, rdx);
2348 __ bind(eq);
2349 }
2350 __ cmpptr(rdx, rax);
2351 __ jcc(j_not(cc), not_taken);
2352 branch(false, false);
2353 __ bind(not_taken);
2354 __ profile_not_taken_branch(rax);
2355 }
2356
2357 void TemplateTable::ret() {
2358 transition(vtos, vtos);
2359 locals_index(rbx);
2360 LP64_ONLY(__ movslq(rbx, iaddress(rbx))); // get return bci, compute return bcp
2361 NOT_LP64(__ movptr(rbx, iaddress(rbx)));
2362 __ profile_ret(rbx, rcx);
2363 __ get_method(rax);
2364 __ movptr(rbcp, Address(rax, Method::const_offset()));
2365 __ lea(rbcp, Address(rbcp, rbx, Address::times_1,
2366 ConstMethod::codes_offset()));
2367 __ dispatch_next(vtos);
2368 }
2369
2770 __ pop_ptr(r);
2771 __ null_check(r); // for field access must check obj.
2772 __ verify_oop(r);
2773 }
2774
2775 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2776 transition(vtos, vtos);
2777
2778 const Register cache = rcx;
2779 const Register index = rdx;
2780 const Register obj = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
2781 const Register off = rbx;
2782 const Register flags = rax;
2783 const Register bc = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // uses same reg as obj, so don't mix them
2784
2785 resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2786 jvmti_post_field_access(cache, index, is_static, false);
2787 load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2788
2789 if (!is_static) pop_and_check_object(obj);
2790 oopDesc::bs()->interpreter_read_barrier_not_null(_masm, obj);
2791
2792 const Address field(obj, off, Address::times_1, 0*wordSize);
2793 NOT_LP64(const Address hi(obj, off, Address::times_1, 1*wordSize));
2794
2795 Label Done, notByte, notInt, notShort, notChar, notLong, notFloat, notObj, notDouble;
2796
2797 __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
2798 // Make sure we don't need to mask edx after the above shift
2799 assert(btos == 0, "change code, btos != 0");
2800
2801 __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
2802
2803 __ jcc(Assembler::notZero, notByte);
2804 // btos
2805 __ load_signed_byte(rax, field);
2806 __ push(btos);
2807 // Rewrite bytecode to be faster
2808 if (!is_static && rc == may_rewrite) {
2809 patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2810 }
3040 __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3041 __ andl(rdx, 0x1);
3042
3043 // field addresses
3044 const Address field(obj, off, Address::times_1, 0*wordSize);
3045 NOT_LP64( const Address hi(obj, off, Address::times_1, 1*wordSize);)
3046
3047 Label notByte, notInt, notShort, notChar,
3048 notLong, notFloat, notObj, notDouble;
3049
3050 __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
3051
3052 assert(btos == 0, "change code, btos != 0");
3053 __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
3054 __ jcc(Assembler::notZero, notByte);
3055
3056 // btos
3057 {
3058 __ pop(btos);
3059 if (!is_static) pop_and_check_object(obj);
3060 oopDesc::bs()->interpreter_write_barrier(_masm, obj);
3061 __ movb(field, rax);
3062 if (!is_static && rc == may_rewrite) {
3063 patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
3064 }
3065 __ jmp(Done);
3066 }
3067
3068 __ bind(notByte);
3069 __ cmpl(flags, atos);
3070 __ jcc(Assembler::notEqual, notObj);
3071
3072 // atos
3073 {
3074 __ pop(atos);
3075 if (!is_static) pop_and_check_object(obj);
3076 oopDesc::bs()->interpreter_write_barrier(_masm, obj);
3077 // Store into the field
3078 do_oop_store(_masm, field, rax, _bs->kind(), false);
3079 if (!is_static && rc == may_rewrite) {
3080 patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3081 }
3082 __ jmp(Done);
3083 }
3084
3085 __ bind(notObj);
3086 __ cmpl(flags, itos);
3087 __ jcc(Assembler::notEqual, notInt);
3088
3089 // itos
3090 {
3091 __ pop(itos);
3092 if (!is_static) pop_and_check_object(obj);
3093 oopDesc::bs()->interpreter_write_barrier(_masm, obj);
3094 __ movl(field, rax);
3095 if (!is_static && rc == may_rewrite) {
3096 patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3097 }
3098 __ jmp(Done);
3099 }
3100
3101 __ bind(notInt);
3102 __ cmpl(flags, ctos);
3103 __ jcc(Assembler::notEqual, notChar);
3104
3105 // ctos
3106 {
3107 __ pop(ctos);
3108 if (!is_static) pop_and_check_object(obj);
3109 oopDesc::bs()->interpreter_write_barrier(_masm, obj);
3110 __ movw(field, rax);
3111 if (!is_static && rc == may_rewrite) {
3112 patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx, true, byte_no);
3113 }
3114 __ jmp(Done);
3115 }
3116
3117 __ bind(notChar);
3118 __ cmpl(flags, stos);
3119 __ jcc(Assembler::notEqual, notShort);
3120
3121 // stos
3122 {
3123 __ pop(stos);
3124 if (!is_static) pop_and_check_object(obj);
3125 oopDesc::bs()->interpreter_write_barrier(_masm, obj);
3126 __ movw(field, rax);
3127 if (!is_static && rc == may_rewrite) {
3128 patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx, true, byte_no);
3129 }
3130 __ jmp(Done);
3131 }
3132
3133 __ bind(notShort);
3134 __ cmpl(flags, ltos);
3135 __ jcc(Assembler::notEqual, notLong);
3136
3137 // ltos
3138 #ifdef _LP64
3139 {
3140 __ pop(ltos);
3141 if (!is_static) pop_and_check_object(obj);
3142 oopDesc::bs()->interpreter_write_barrier(_masm, obj);
3143 __ movq(field, rax);
3144 if (!is_static && rc == may_rewrite) {
3145 patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no);
3146 }
3147 __ jmp(Done);
3148 }
3149 #else
3150 {
3151 Label notVolatileLong;
3152 __ testl(rdx, rdx);
3153 __ jcc(Assembler::zero, notVolatileLong);
3154
3155 __ pop(ltos); // overwrites rdx, do this after testing volatile.
3156 if (!is_static) pop_and_check_object(obj);
3157
3158 // Replace with real volatile test
3159 __ push(rdx);
3160 __ push(rax); // Must update atomically with FIST
3161 __ fild_d(Address(rsp,0)); // So load into FPU register
3162 __ fistp_d(field); // and put into memory atomically
3169
3170 __ bind(notVolatileLong);
3171
3172 __ pop(ltos); // overwrites rdx
3173 if (!is_static) pop_and_check_object(obj);
3174 __ movptr(hi, rdx);
3175 __ movptr(field, rax);
3176 // Don't rewrite to _fast_lputfield for potential volatile case.
3177 __ jmp(notVolatile);
3178 }
3179 #endif // _LP64
3180
3181 __ bind(notLong);
3182 __ cmpl(flags, ftos);
3183 __ jcc(Assembler::notEqual, notFloat);
3184
3185 // ftos
3186 {
3187 __ pop(ftos);
3188 if (!is_static) pop_and_check_object(obj);
3189 oopDesc::bs()->interpreter_write_barrier(_masm, obj);
3190 __ store_float(field);
3191 if (!is_static && rc == may_rewrite) {
3192 patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no);
3193 }
3194 __ jmp(Done);
3195 }
3196
3197 __ bind(notFloat);
3198 #ifdef ASSERT
3199 __ cmpl(flags, dtos);
3200 __ jcc(Assembler::notEqual, notDouble);
3201 #endif
3202
3203 // dtos
3204 {
3205 __ pop(dtos);
3206 if (!is_static) pop_and_check_object(obj);
3207 oopDesc::bs()->interpreter_write_barrier(_masm, obj);
3208 __ store_double(field);
3209 if (!is_static && rc == may_rewrite) {
3210 patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no);
3211 }
3212 }
3213
3214 #ifdef ASSERT
3215 __ jmp(Done);
3216
3217 __ bind(notDouble);
3218 __ stop("Bad state");
3219 #endif
3220
3221 __ bind(Done);
3222
3223 // Check for volatile store
3224 __ testl(rdx, rdx);
3225 __ jcc(Assembler::zero, notVolatile);
3226 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3227 Assembler::StoreStore));
3307
3308 // test for volatile with rdx but rdx is tos register for lputfield.
3309 __ movl(rdx, Address(rcx, rbx, Address::times_ptr,
3310 in_bytes(base +
3311 ConstantPoolCacheEntry::flags_offset())));
3312
3313 // replace index with field offset from cache entry
3314 __ movptr(rbx, Address(rcx, rbx, Address::times_ptr,
3315 in_bytes(base + ConstantPoolCacheEntry::f2_offset())));
3316
3317 // [jk] not needed currently
3318 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3319 // Assembler::StoreStore));
3320
3321 Label notVolatile;
3322 __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3323 __ andl(rdx, 0x1);
3324
3325 // Get object from stack
3326 pop_and_check_object(rcx);
3327 oopDesc::bs()->interpreter_write_barrier(_masm, rcx);
3328
3329 // field address
3330 const Address field(rcx, rbx, Address::times_1);
3331
3332 // access field
3333 switch (bytecode()) {
3334 case Bytecodes::_fast_aputfield:
3335 do_oop_store(_masm, field, rax, _bs->kind(), false);
3336 break;
3337 case Bytecodes::_fast_lputfield:
3338 #ifdef _LP64
3339 __ movq(field, rax);
3340 #else
3341 __ stop("should not be rewritten");
3342 #endif
3343 break;
3344 case Bytecodes::_fast_iputfield:
3345 __ movl(field, rax);
3346 break;
3347 case Bytecodes::_fast_bputfield:
3396 }
3397
3398 // access constant pool cache
3399 __ get_cache_and_index_at_bcp(rcx, rbx, 1);
3400 // replace index with field offset from cache entry
3401 // [jk] not needed currently
3402 // if (os::is_MP()) {
3403 // __ movl(rdx, Address(rcx, rbx, Address::times_8,
3404 // in_bytes(ConstantPoolCache::base_offset() +
3405 // ConstantPoolCacheEntry::flags_offset())));
3406 // __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3407 // __ andl(rdx, 0x1);
3408 // }
3409 __ movptr(rbx, Address(rcx, rbx, Address::times_ptr,
3410 in_bytes(ConstantPoolCache::base_offset() +
3411 ConstantPoolCacheEntry::f2_offset())));
3412
3413 // rax: object
3414 __ verify_oop(rax);
3415 __ null_check(rax);
3416 oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rax);
3417 Address field(rax, rbx, Address::times_1);
3418
3419 // access field
3420 switch (bytecode()) {
3421 case Bytecodes::_fast_agetfield:
3422 __ load_heap_oop(rax, field);
3423 __ verify_oop(rax);
3424 break;
3425 case Bytecodes::_fast_lgetfield:
3426 #ifdef _LP64
3427 __ movq(rax, field);
3428 #else
3429 __ stop("should not be rewritten");
3430 #endif
3431 break;
3432 case Bytecodes::_fast_igetfield:
3433 __ movl(rax, field);
3434 break;
3435 case Bytecodes::_fast_bgetfield:
3436 __ movsbl(rax, field);
3458 // __ membar(Assembler::LoadLoad);
3459 // __ bind(notVolatile);
3460 //};
3461 }
3462
3463 void TemplateTable::fast_xaccess(TosState state) {
3464 transition(vtos, state);
3465
3466 // get receiver
3467 __ movptr(rax, aaddress(0));
3468 // access constant pool cache
3469 __ get_cache_and_index_at_bcp(rcx, rdx, 2);
3470 __ movptr(rbx,
3471 Address(rcx, rdx, Address::times_ptr,
3472 in_bytes(ConstantPoolCache::base_offset() +
3473 ConstantPoolCacheEntry::f2_offset())));
3474 // make sure exception is reported in correct bcp range (getfield is
3475 // next instruction)
3476 __ increment(rbcp);
3477 __ null_check(rax);
3478 oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rax);
3479 const Address field = Address(rax, rbx, Address::times_1, 0*wordSize);
3480 switch (state) {
3481 case itos:
3482 __ movl(rax, field);
3483 break;
3484 case atos:
3485 __ load_heap_oop(rax, field);
3486 __ verify_oop(rax);
3487 break;
3488 case ftos:
3489 __ load_float(field);
3490 break;
3491 default:
3492 ShouldNotReachHere();
3493 }
3494
3495 // [jk] not needed currently
3496 // if (os::is_MP()) {
3497 // Label notVolatile;
3498 // __ movl(rdx, Address(rcx, rdx, Address::times_8,
3861 __ jcc(Assembler::notZero, slow_case);
3862
3863 //
3864 // Allocate the instance
3865 // 1) Try to allocate in the TLAB
3866 // 2) if fail and the object is large allocate in the shared Eden
3867 // 3) if the above fails (or is not applicable), go to a slow case
3868 // (creates a new TLAB, etc.)
3869
3870 const bool allow_shared_alloc =
3871 Universe::heap()->supports_inline_contig_alloc();
3872
3873 const Register thread = LP64_ONLY(r15_thread) NOT_LP64(rcx);
3874 #ifndef _LP64
3875 if (UseTLAB || allow_shared_alloc) {
3876 __ get_thread(thread);
3877 }
3878 #endif // _LP64
3879
3880 if (UseTLAB) {
3881 uint oop_extra_words = Universe::heap()->oop_extra_words();
3882 if (oop_extra_words > 0) {
3883 __ addq(rdx, oop_extra_words * HeapWordSize);
3884 }
3885
3886 __ movptr(rax, Address(thread, in_bytes(JavaThread::tlab_top_offset())));
3887 __ lea(rbx, Address(rax, rdx, Address::times_1));
3888 __ cmpptr(rbx, Address(thread, in_bytes(JavaThread::tlab_end_offset())));
3889 __ jcc(Assembler::above, allow_shared_alloc ? allocate_shared : slow_case);
3890 __ movptr(Address(thread, in_bytes(JavaThread::tlab_top_offset())), rbx);
3891 Universe::heap()->compile_prepare_oop(_masm, rax);
3892 if (ZeroTLAB) {
3893 // the fields have been already cleared
3894 __ jmp(initialize_header);
3895 } else {
3896 // initialize both the header and fields
3897 __ jmp(initialize_object);
3898 }
3899 }
3900
3901 // Allocation in the shared Eden, if allowed.
3902 //
3903 // rdx: instance size in bytes
3904 if (allow_shared_alloc) {
3905 __ bind(allocate_shared);
3906
3907 ExternalAddress heap_top((address)Universe::heap()->top_addr());
3908 ExternalAddress heap_end((address)Universe::heap()->end_addr());
3909
3910 Label retry;
3911 __ bind(retry);
4012 Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4013 __ load_unsigned_byte(rarg1, at_bcp(1));
4014 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
4015 rarg1, rax);
4016 }
4017
4018 void TemplateTable::anewarray() {
4019 transition(itos, atos);
4020
4021 Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4022 Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4023
4024 __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4025 __ get_constant_pool(rarg1);
4026 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
4027 rarg1, rarg2, rax);
4028 }
4029
4030 void TemplateTable::arraylength() {
4031 transition(atos, itos);
4032 if (ShenandoahVerifyReadsToFromSpace) {
4033 oopDesc::bs()->interpreter_read_barrier(_masm, rax);
4034 }
4035 __ null_check(rax, arrayOopDesc::length_offset_in_bytes());
4036 __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes()));
4037 }
4038
4039 void TemplateTable::checkcast() {
4040 transition(atos, atos);
4041 Label done, is_null, ok_is_subtype, quicked, resolved;
4042 __ testptr(rax, rax); // object is in rax
4043 __ jcc(Assembler::zero, is_null);
4044
4045 // Get cpool & tags index
4046 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4047 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4048 // See if bytecode has already been quicked
4049 __ cmpb(Address(rdx, rbx,
4050 Address::times_1,
4051 Array<u1>::base_offset_in_bytes()),
4052 JVM_CONSTANT_Class);
4053 __ jcc(Assembler::equal, quicked);
4054 __ push(atos); // save receiver for result, and for GC
4115 Array<u1>::base_offset_in_bytes()),
4116 JVM_CONSTANT_Class);
4117 __ jcc(Assembler::equal, quicked);
4118
4119 __ push(atos); // save receiver for result, and for GC
4120 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4121 // vm_result_2 has metadata result
4122
4123 #ifndef _LP64
4124 // borrow rdi from locals
4125 __ get_thread(rdi);
4126 __ get_vm_result_2(rax, rdi);
4127 __ restore_locals();
4128 #else
4129 __ get_vm_result_2(rax, r15_thread);
4130 #endif
4131
4132 __ pop_ptr(rdx); // restore receiver
4133 __ verify_oop(rdx);
4134 __ load_klass(rdx, rdx);
4135 if (ShenandoahVerifyReadsToFromSpace) {
4136 __ jmp(resolved);
4137 } else {
4138 __ jmpb(resolved);
4139 }
4140
4141 // Get superklass in rax and subklass in rdx
4142 __ bind(quicked);
4143 __ load_klass(rdx, rax);
4144 __ movptr(rax, Address(rcx, rbx,
4145 Address::times_ptr, sizeof(ConstantPool)));
4146
4147 __ bind(resolved);
4148
4149 // Generate subtype check. Blows rcx, rdi
4150 // Superklass in rax. Subklass in rdx.
4151 __ gen_subtype_check(rdx, ok_is_subtype);
4152
4153 // Come here on failure
4154 __ xorl(rax, rax);
4155 __ jmpb(done);
4156 // Come here on success
4157 __ bind(ok_is_subtype);
4158 __ movl(rax, 1);
4159
4215 // Note: monitorenter & exit are symmetric routines; which is reflected
4216 // in the assembly code structure as well
4217 //
4218 // Stack layout:
4219 //
4220 // [expressions ] <--- rsp = expression stack top
4221 // ..
4222 // [expressions ]
4223 // [monitor entry] <--- monitor block top = expression stack bot
4224 // ..
4225 // [monitor entry]
4226 // [frame data ] <--- monitor block bot
4227 // ...
4228 // [saved rbp ] <--- rbp
4229 void TemplateTable::monitorenter() {
4230 transition(atos, vtos);
4231
4232 // check for NULL object
4233 __ null_check(rax);
4234
4235 // We need to preemptively evacuate the object, because we later compare
4236 // it to objects in the BasicObjectLock list, and we might get false negatives
4237 // if another thread evacuates the object in the meantime. See acmp.
4238 oopDesc::bs()->interpreter_write_barrier(_masm, rax);
4239
4240 const Address monitor_block_top(
4241 rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4242 const Address monitor_block_bot(
4243 rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4244 const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
4245
4246 Label allocated;
4247
4248 Register rtop = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
4249 Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4250 Register rmon = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4251
4252 // initialize entry pointer
4253 __ xorl(rmon, rmon); // points to free slot or NULL
4254
4255 // find a free slot in the monitor block (result in rmon)
4256 {
4257 Label entry, loop, exit;
4258 __ movptr(rtop, monitor_block_top); // points to current entry,
4259 // starting with top-most entry
4260 __ lea(rbot, monitor_block_bot); // points to word before bottom
4261 // of monitor block
4262 if (UseShenandoahGC && ShenandoahVerifyReadsToFromSpace) {
4263 __ jmp(entry);
4264 } else {
4265 __ jmpb(entry);
4266 }
4267
4268 __ bind(loop);
4269 // check if current entry is used
4270 __ cmpptr(Address(rtop, BasicObjectLock::obj_offset_in_bytes()), (int32_t) NULL_WORD);
4271 // if not used then remember entry in rmon
4272 __ cmovptr(Assembler::equal, rmon, rtop); // cmov => cmovptr
4273 // check if current entry is for same object
4274 __ movptr(rscratch1, Address(rtop, BasicObjectLock::obj_offset_in_bytes()));
4275 oopDesc::bs()->interpreter_read_barrier(_masm, rscratch1);
4276 __ cmpptr(rax, rscratch1);
4277 // if same object then stop searching
4278 __ jccb(Assembler::equal, exit);
4279 // otherwise advance to next entry
4280 __ addptr(rtop, entry_size);
4281 __ bind(entry);
4282 // check if bottom reached
4283 __ cmpptr(rtop, rbot);
4284 // if not at bottom then check this entry
4285 __ jcc(Assembler::notEqual, loop);
4286 __ bind(exit);
4287 }
4288
4289 __ testptr(rmon, rmon); // check if a slot has been found
4290 __ jcc(Assembler::notZero, allocated); // if found, continue with that one
4291
4292 // allocate one if there's no free slot
4293 {
4294 Label entry, loop;
4295 // 1. compute new pointers // rsp: old expression stack top
4296 __ movptr(rmon, monitor_block_bot); // rmon: old expression stack bottom
4323
4324 // store object
4325 __ movptr(Address(rmon, BasicObjectLock::obj_offset_in_bytes()), rax);
4326 __ lock_object(rmon);
4327
4328 // check to make sure this monitor doesn't cause stack overflow after locking
4329 __ save_bcp(); // in case of exception
4330 __ generate_stack_overflow_check(0);
4331
4332 // The bcp has already been incremented. Just need to dispatch to
4333 // next instruction.
4334 __ dispatch_next(vtos);
4335 }
4336
4337 void TemplateTable::monitorexit() {
4338 transition(atos, vtos);
4339
4340 // check for NULL object
4341 __ null_check(rax);
4342
4343 // We need to preemptively evacuate the object, because we later compare
4344 // it to objects in the BasicObjectLock list, and we might get false negatives
4345 // if another thread evacuates the object in the meantime. See acmp.
4346 oopDesc::bs()->interpreter_write_barrier(_masm, rax);
4347
4348 const Address monitor_block_top(
4349 rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4350 const Address monitor_block_bot(
4351 rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4352 const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
4353
4354 Register rtop = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4355 Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4356
4357 Label found;
4358
4359 // find matching slot
4360 {
4361 Label entry, loop;
4362 __ movptr(rtop, monitor_block_top); // points to current entry,
4363 // starting with top-most entry
4364 __ lea(rbot, monitor_block_bot); // points to word before bottom
4365 // of monitor block
4366 if (UseShenandoahGC && ShenandoahVerifyReadsToFromSpace) {
4367 __ jmp(entry);
4368 } else {
4369 __ jmpb(entry);
4370 }
4371
4372 __ bind(loop);
4373 // check if current entry is for same object
4374 __ movptr(rscratch1, Address(rtop, BasicObjectLock::obj_offset_in_bytes()));
4375 oopDesc::bs()->interpreter_read_barrier(_masm, rscratch1);
4376 __ cmpptr(rax, rscratch1);
4377 // if same object then stop searching
4378 __ jcc(Assembler::equal, found);
4379 // otherwise advance to next entry
4380 __ addptr(rtop, entry_size);
4381 __ bind(entry);
4382 // check if bottom reached
4383 __ cmpptr(rtop, rbot);
4384 // if not at bottom then check this entry
4385 __ jcc(Assembler::notEqual, loop);
4386 }
4387
4388 // error handling. Unlocking was not block-structured
4389 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4390 InterpreterRuntime::throw_illegal_monitor_state_exception));
4391 __ should_not_reach_here();
4392
4393 // call run-time routine
4394 __ bind(found);
4395 __ push_ptr(rax); // make sure object is on stack (contract with oopMaps)
4396 __ unlock_object(rtop);
4404 ExternalAddress wtable((address)Interpreter::_wentry_point);
4405 __ jump(ArrayAddress(wtable, Address(noreg, rbx, Address::times_ptr)));
4406 // Note: the rbcp increment step is part of the individual wide bytecode implementations
4407 }
4408
4409 // Multi arrays
4410 void TemplateTable::multianewarray() {
4411 transition(vtos, atos);
4412
4413 Register rarg = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4414 __ load_unsigned_byte(rax, at_bcp(3)); // get number of dimensions
4415 // last dim is on top of stack; we want address of first one:
4416 // first_addr = last_addr + (ndims - 1) * stackElementSize - 1*wordsize
4417 // the latter wordSize to point to the beginning of the array.
4418 __ lea(rarg, Address(rsp, rax, Interpreter::stackElementScale(), -wordSize));
4419 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::multianewarray), rarg);
4420 __ load_unsigned_byte(rbx, at_bcp(3));
4421 __ lea(rsp, Address(rsp, rbx, Interpreter::stackElementScale())); // get rid of counts
4422 }
4423 #endif /* !CC_INTERP */
|